diff --git a/akka-actor/src/main/scala/akka/config/Config.scala b/akka-actor/src/main/scala/akka/config/Config.scala
index 0571449b67..371e61b640 100644
--- a/akka-actor/src/main/scala/akka/config/Config.scala
+++ b/akka-actor/src/main/scala/akka/config/Config.scala
@@ -69,7 +69,7 @@ object Config {
"\n\tdue to: " + e.toString)
}
Configgy.config
- } else if (HOME.isDefined) {
+ } else if (HOME.isDefined) {
try {
val configFile = HOME.getOrElse(throwNoAkkaHomeException) + "/config/" + confName
Configgy.configure(configFile)
diff --git a/akka-camel/src/main/scala/akka/CamelService.scala b/akka-camel/src/main/scala/akka/CamelService.scala
index b546636610..da71701ae1 100644
--- a/akka-camel/src/main/scala/akka/CamelService.scala
+++ b/akka-camel/src/main/scala/akka/CamelService.scala
@@ -26,7 +26,7 @@ trait CamelService extends Bootable with Logging {
private[camel] val consumerPublisher = actorOf[ConsumerPublisher]
private[camel] val publishRequestor = actorOf[PublishRequestor]
- private val serviceEnabled = config.getBool("akka.camel.service", true)
+ private val serviceEnabled = config.getList("akka.enabled-modules").exists(_ == "camel")
/**
* Starts this CamelService unless akka.camel.service is set to false.
diff --git a/akka-http/src/main/scala/akka/AkkaBroadcaster.scala b/akka-http/src/main/scala/akka/AkkaBroadcaster.scala
index fd0f76631a..4da1a1fda5 100644
--- a/akka-http/src/main/scala/akka/AkkaBroadcaster.scala
+++ b/akka-http/src/main/scala/akka/AkkaBroadcaster.scala
@@ -12,7 +12,7 @@ import akka.dispatch.Dispatchers
import org.atmosphere.jersey.util.JerseyBroadcasterUtil
object AkkaBroadcaster {
- val broadcasterDispatcher = Dispatchers.fromConfig("akka.rest.comet-dispatcher")
+ val broadcasterDispatcher = Dispatchers.fromConfig("akka.http.comet-dispatcher")
type Event = AtmosphereResourceEvent[_,_]
type Resource = AtmosphereResource[_,_]
diff --git a/akka-http/src/main/scala/akka/AkkaCometServlet.scala b/akka-http/src/main/scala/akka/AkkaCometServlet.scala
index 5b15096c92..0c651bc776 100644
--- a/akka-http/src/main/scala/akka/AkkaCometServlet.scala
+++ b/akka-http/src/main/scala/akka/AkkaCometServlet.scala
@@ -51,11 +51,11 @@ class AkkaServlet extends AtmosphereServlet {
addInitParameter(AtmosphereServlet.DISABLE_ONSTATE_EVENT,"true")
addInitParameter(AtmosphereServlet.BROADCASTER_CLASS,classOf[AkkaBroadcaster].getName)
addInitParameter(AtmosphereServlet.PROPERTY_USE_STREAM,"true")
- addInitParameter("com.sun.jersey.config.property.packages",c.getList("akka.rest.resource_packages").mkString(";"))
- addInitParameter("com.sun.jersey.spi.container.ResourceFilters",c.getList("akka.rest.filters").mkString(","))
+ addInitParameter("com.sun.jersey.config.property.packages",c.getList("akka.http.resource_packages").mkString(";"))
+ addInitParameter("com.sun.jersey.spi.container.ResourceFilters",c.getList("akka.http.filters").mkString(","))
- c.getInt("akka.rest.maxInactiveActivity") foreach { value => addInitParameter(CometSupport.MAX_INACTIVE,value.toString) }
- c.getString("akka.rest.cometSupport") foreach { value => addInitParameter("cometSupport",value) }
+ c.getInt("akka.http.maxInactiveActivity") foreach { value => addInitParameter(CometSupport.MAX_INACTIVE,value.toString) }
+ c.getString("akka.http.cometSupport") foreach { value => addInitParameter("cometSupport",value) }
/*
* Provide a fallback for default values
diff --git a/akka-http/src/main/scala/akka/EmbeddedAppServer.scala b/akka-http/src/main/scala/akka/EmbeddedAppServer.scala
index ea68bf1106..db5761eb29 100644
--- a/akka-http/src/main/scala/akka/EmbeddedAppServer.scala
+++ b/akka-http/src/main/scala/akka/EmbeddedAppServer.scala
@@ -20,23 +20,25 @@ import org.eclipse.jetty.server.handler.{HandlerList, HandlerCollection, Context
* Handles the Akka Comet Support (load/unload)
*/
trait EmbeddedAppServer extends Bootable with Logging {
- self : BootableActorLoaderService =>
+ self: BootableActorLoaderService =>
import akka.config.Config._
- val REST_HOSTNAME = config.getString("akka.rest.hostname", "localhost")
- val REST_PORT = config.getInt("akka.rest.port", 9998)
+ val REST_HOSTNAME = config.getString("akka.http.hostname", "localhost")
+ val REST_PORT = config.getInt("akka.http.port", 9998)
+
+ val isRestEnabled = config.getList("akka.enabled-modules").exists(_ == "http")
protected var server: Option[Server] = None
abstract override def onLoad = {
super.onLoad
- if (config.getBool("akka.rest.service", true)) {
- log.info("Attempting to start Akka REST service (Jersey)")
+ if (isRestEnabled) {
+ log.info("Attempting to start Akka HTTP service")
- System.setProperty("jetty.port",REST_PORT.toString)
- System.setProperty("jetty.host",REST_HOSTNAME)
- System.setProperty("jetty.home",HOME.getOrElse(throwNoAkkaHomeException) + "/deploy/root")
+ System.setProperty("jetty.port", REST_PORT.toString)
+ System.setProperty("jetty.host", REST_HOSTNAME)
+ System.setProperty("jetty.home", HOME.getOrElse(throwNoAkkaHomeException) + "/deploy/root")
val configuration = new XmlConfiguration(
new File(HOME.getOrElse(throwNoAkkaHomeException) + "/config/microkernel-server.xml").toURI.toURL)
@@ -57,16 +59,15 @@ trait EmbeddedAppServer extends Bootable with Logging {
s.start()
s
}
- log.info("Akka REST service started (Jersey)")
+ log.info("Akka HTTP service started")
}
}
abstract override def onUnload = {
super.onUnload
- server foreach { t => {
+ server foreach { t =>
log.info("Shutting down REST service (Jersey)")
t.stop()
- }
}
}
}
diff --git a/akka-http/src/main/scala/akka/ListWriter.scala b/akka-http/src/main/scala/akka/ListWriter.scala
index 3a2c69d02a..b2addd6cd7 100644
--- a/akka-http/src/main/scala/akka/ListWriter.scala
+++ b/akka-http/src/main/scala/akka/ListWriter.scala
@@ -1,10 +1,11 @@
/**
* Copyright (C) 2009-2010 Scalable Solutions AB
*/
-package akka.rest
+package akka.http
+
+import akka.serialization.Serializer
import java.io.OutputStream
-import akka.serialization.Serializer
import javax.ws.rs.core.{MultivaluedMap, MediaType}
import javax.ws.rs.ext.{MessageBodyWriter, Provider}
import javax.ws.rs.Produces
diff --git a/akka-http/src/main/scala/akka/Security.scala b/akka-http/src/main/scala/akka/Security.scala
index b6aec9a1f0..e9152970fa 100644
--- a/akka-http/src/main/scala/akka/Security.scala
+++ b/akka-http/src/main/scala/akka/Security.scala
@@ -101,8 +101,8 @@ class AkkaSecurityFilterFactory extends ResourceFilterFactory with Logging {
}
lazy val authenticatorFQN = {
- val auth = Config.config.getString("akka.rest.authenticator", "N/A")
- if (auth == "N/A") throw new IllegalActorStateException("The config option 'akka.rest.authenticator' is not defined in 'akka.conf'")
+ val auth = Config.config.getString("akka.http.authenticator", "N/A")
+ if (auth == "N/A") throw new IllegalActorStateException("The config option 'akka.http.authenticator' is not defined in 'akka.conf'")
auth
}
@@ -399,8 +399,8 @@ trait SpnegoAuthenticationActor extends AuthenticationActor[SpnegoCredentials] w
* principal name for the HTTP kerberos service, i.e HTTP/ { server } @ { realm }
*/
lazy val servicePrincipal = {
- val p = Config.config.getString("akka.rest.kerberos.servicePrincipal", "N/A")
- if (p == "N/A") throw new IllegalActorStateException("The config option 'akka.rest.kerberos.servicePrincipal' is not defined in 'akka.conf'")
+ val p = Config.config.getString("akka.http.kerberos.servicePrincipal", "N/A")
+ if (p == "N/A") throw new IllegalActorStateException("The config option 'akka.http.kerberos.servicePrincipal' is not defined in 'akka.conf'")
p
}
@@ -408,21 +408,21 @@ trait SpnegoAuthenticationActor extends AuthenticationActor[SpnegoCredentials] w
* keytab location with credentials for the service principal
*/
lazy val keyTabLocation = {
- val p = Config.config.getString("akka.rest.kerberos.keyTabLocation", "N/A")
- if (p == "N/A") throw new IllegalActorStateException("The config option 'akka.rest.kerberos.keyTabLocation' is not defined in 'akka.conf'")
+ val p = Config.config.getString("akka.http.kerberos.keyTabLocation", "N/A")
+ if (p == "N/A") throw new IllegalActorStateException("The config option 'akka.http.kerberos.keyTabLocation' is not defined in 'akka.conf'")
p
}
lazy val kerberosDebug = {
- val p = Config.config.getString("akka.rest.kerberos.kerberosDebug", "N/A")
- if (p == "N/A") throw new IllegalActorStateException("The config option 'akka.rest.kerberos.kerberosDebug' is not defined in 'akka.conf'")
+ val p = Config.config.getString("akka.http.kerberos.kerberosDebug", "N/A")
+ if (p == "N/A") throw new IllegalActorStateException("The config option 'akka.http.kerberos.kerberosDebug' is not defined in 'akka.conf'")
p
}
/**
* is not used by this authenticator, so accept an empty value
*/
- lazy val realm = Config.config.getString("akka.rest.kerberos.realm", "")
+ lazy val realm = Config.config.getString("akka.http.kerberos.realm", "")
/**
* verify the kerberos token from a client with the server
diff --git a/akka-persistence/akka-persistence-cassandra/src/main/scala/akka/CassandraStorageBackend.scala b/akka-persistence/akka-persistence-cassandra/src/main/scala/akka/CassandraStorageBackend.scala
index a4eb481215..d077215a7f 100644
--- a/akka-persistence/akka-persistence-cassandra/src/main/scala/akka/CassandraStorageBackend.scala
+++ b/akka-persistence/akka-persistence-cassandra/src/main/scala/akka/CassandraStorageBackend.scala
@@ -34,10 +34,10 @@ private[akka] object CassandraStorageBackend extends CommonStorageBackend {
val REF_KEY = "item".getBytes("UTF-8")
val EMPTY_BYTE_ARRAY = new Array[Byte](0)
- val CASSANDRA_SERVER_HOSTNAME = config.getString("akka.storage.cassandra.hostname", "127.0.0.1")
- val CASSANDRA_SERVER_PORT = config.getInt("akka.storage.cassandra.port", 9160)
+ val CASSANDRA_SERVER_HOSTNAME = config.getString("akka.persistence.cassandra.hostname", "127.0.0.1")
+ val CASSANDRA_SERVER_PORT = config.getInt("akka.persistence.cassandra.port", 9160)
val CONSISTENCY_LEVEL = {
- config.getString("akka.storage.cassandra.consistency-level", "QUORUM") match {
+ config.getString("akka.persistence.cassandra.consistency-level", "QUORUM") match {
case "ZERO" => ConsistencyLevel.ZERO
case "ONE" => ConsistencyLevel.ONE
case "QUORUM" => ConsistencyLevel.QUORUM
diff --git a/akka-persistence/akka-persistence-couchdb/src/main/scala/akka/CouchDBStorageBackend.scala b/akka-persistence/akka-persistence-couchdb/src/main/scala/akka/CouchDBStorageBackend.scala
index 780222535d..e86ad9bfd1 100644
--- a/akka-persistence/akka-persistence-couchdb/src/main/scala/akka/CouchDBStorageBackend.scala
+++ b/akka-persistence/akka-persistence-couchdb/src/main/scala/akka/CouchDBStorageBackend.scala
@@ -31,8 +31,8 @@ private [akka] object CouchDBStorageBackend extends
}
lazy val URL = config.
- getString("akka.storage.couchdb.url").
- getOrElse(throw new IllegalArgumentException("'akka.storage.couchdb.url' not found in config"))
+ getString("akka.persistence.couchdb.url").
+ getOrElse(throw new IllegalArgumentException("'akka.persistence.couchdb.url' not found in config"))
def drop() = {
val client = new HttpClient()
diff --git a/akka-persistence/akka-persistence-hbase/src/main/scala/akka/HbaseStorageBackend.scala b/akka-persistence/akka-persistence-hbase/src/main/scala/akka/HbaseStorageBackend.scala
index 2b33876ecf..7e3d750803 100644
--- a/akka-persistence/akka-persistence-hbase/src/main/scala/akka/HbaseStorageBackend.scala
+++ b/akka-persistence/akka-persistence-hbase/src/main/scala/akka/HbaseStorageBackend.scala
@@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.util.Bytes
*/
private[akka] object HbaseStorageBackend extends MapStorageBackend[Array[Byte], Array[Byte]] with VectorStorageBackend[Array[Byte]] with RefStorageBackend[Array[Byte]] with Logging {
- val HBASE_ZOOKEEPER_QUORUM = config.getString("akka.storage.hbase.zookeeper-quorum", "localhost")
+ val HBASE_ZOOKEEPER_QUORUM = config.getString("akka.persistence.hbase.zookeeper-quorum", "localhost")
val CONFIGURATION = new HBaseConfiguration
val REF_TABLE_NAME = "__REF_TABLE"
val VECTOR_TABLE_NAME = "__VECTOR_TABLE"
diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTestIntegration.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTestIntegration.scala
index 427163d634..5e949c8a28 100644
--- a/akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTestIntegration.scala
+++ b/akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTestIntegration.scala
@@ -48,7 +48,7 @@ class SimpleHbaseSpecTestIntegration extends Spec with BeforeAndAfterAll with Sh
import org.apache.hadoop.hbase.client.HBaseAdmin
import org.apache.hadoop.hbase.client.HTable
- val HBASE_ZOOKEEPER_QUORUM = config.getString("akka.storage.hbase.zookeeper-quorum", "0")
+ val HBASE_ZOOKEEPER_QUORUM = config.getString("akka.persistence.hbase.zookeeper-quorum", "0")
HBASE_ZOOKEEPER_QUORUM should not equal ("0")
HBASE_ZOOKEEPER_QUORUM should equal("localhost")
diff --git a/akka-persistence/akka-persistence-memcached/src/main/scala/akka/MemcachedStorageBackend.scala b/akka-persistence/akka-persistence-memcached/src/main/scala/akka/MemcachedStorageBackend.scala
index 4b36fa4b87..0859e6d88e 100644
--- a/akka-persistence/akka-persistence-memcached/src/main/scala/akka/MemcachedStorageBackend.scala
+++ b/akka-persistence/akka-persistence-memcached/src/main/scala/akka/MemcachedStorageBackend.scala
@@ -20,7 +20,7 @@ private[akka] object MemcachedStorageBackend extends CommonStorageBackend {
import KVStorageBackend._
import org.apache.commons.codec.binary.Base64
- val clientAddresses = config.getString("akka.storage.memcached.client.addresses", "localhost:11211")
+ val clientAddresses = config.getString("akka.persistence.memcached.client.addresses", "localhost:11211")
val factory = new KetamaConnectionFactory
val client = new MemcachedClient(factory, AddrUtil.getAddresses(clientAddresses))
val base64 = new Base64(76, Array.empty[Byte], true)
@@ -114,4 +114,4 @@ private[akka] object MemcachedStorageBackend extends CommonStorageBackend {
}
-}
\ No newline at end of file
+}
diff --git a/akka-persistence/akka-persistence-mongo/src/main/scala/akka/MongoStorageBackend.scala b/akka-persistence/akka-persistence-mongo/src/main/scala/akka/MongoStorageBackend.scala
index cab7237eb0..e1a2405222 100644
--- a/akka-persistence/akka-persistence-mongo/src/main/scala/akka/MongoStorageBackend.scala
+++ b/akka-persistence/akka-persistence-mongo/src/main/scala/akka/MongoStorageBackend.scala
@@ -31,9 +31,9 @@ private[akka] object MongoStorageBackend extends
val REF = "__ref"
val COLLECTION = "akka_coll"
- val HOSTNAME = config.getString("akka.storage.mongodb.hostname", "127.0.0.1")
- val DBNAME = config.getString("akka.storage.mongodb.dbname", "testdb")
- val PORT = config.getInt("akka.storage.mongodb.port", 27017)
+ val HOSTNAME = config.getString("akka.persistence.mongodb.hostname", "127.0.0.1")
+ val DBNAME = config.getString("akka.persistence.mongodb.dbname", "testdb")
+ val PORT = config.getInt("akka.persistence.mongodb.port", 27017)
val db: MongoDB = MongoConnection(HOSTNAME, PORT)(DBNAME)
val coll: MongoCollection = db(COLLECTION)
diff --git a/akka-persistence/akka-persistence-redis/src/main/scala/akka/RedisStorageBackend.scala b/akka-persistence/akka-persistence-redis/src/main/scala/akka/RedisStorageBackend.scala
index df7a1feab4..d6a8cfb0a7 100644
--- a/akka-persistence/akka-persistence-redis/src/main/scala/akka/RedisStorageBackend.scala
+++ b/akka-persistence/akka-persistence-redis/src/main/scala/akka/RedisStorageBackend.scala
@@ -52,14 +52,14 @@ private [akka] object RedisStorageBackend extends
Logging {
// need an explicit definition in akka-conf
- val nodes = config.getList("akka.storage.redis.cluster")
+ val nodes = config.getList("akka.persistence.redis.cluster")
def connect() =
nodes match {
case Seq() =>
// no cluster defined
- val REDIS_SERVER_HOSTNAME = config.getString("akka.storage.redis.hostname", "127.0.0.1")
- val REDIS_SERVER_PORT = config.getInt("akka.storage.redis.port", 6379)
+ val REDIS_SERVER_HOSTNAME = config.getString("akka.persistence.redis.hostname", "127.0.0.1")
+ val REDIS_SERVER_PORT = config.getInt("akka.persistence.redis.port", 6379)
new RedisClient(REDIS_SERVER_HOSTNAME, REDIS_SERVER_PORT)
case s =>
diff --git a/akka-persistence/akka-persistence-riak/src/main/scala/akka/RiakStorageBackend.scala b/akka-persistence/akka-persistence-riak/src/main/scala/akka/RiakStorageBackend.scala
index 57c8776a1b..149576da05 100644
--- a/akka-persistence/akka-persistence-riak/src/main/scala/akka/RiakStorageBackend.scala
+++ b/akka-persistence/akka-persistence-riak/src/main/scala/akka/RiakStorageBackend.scala
@@ -18,12 +18,12 @@ import com.trifork.riak.{RequestMeta, RiakObject, RiakClient}
private[akka] object RiakStorageBackend extends CommonStorageBackend {
- val refBucket = config.getString("akka.storage.riak.bucket.ref", "Refs")
- val mapBucket = config.getString("akka.storage.riak.bucket.map", "Maps")
- val vectorBucket = config.getString("akka.storage.riak.bucket.vector", "Vectors")
- val queueBucket = config.getString("akka.storage.riak.bucket.queue", "Queues")
- val clientHost = config.getString("akka.storage.riak.client.host", "localhost")
- val clientPort = config.getInt("akka.storage.riak.client.port", 8087)
+ val refBucket = config.getString("akka.persistence.riak.bucket.ref", "Refs")
+ val mapBucket = config.getString("akka.persistence.riak.bucket.map", "Maps")
+ val vectorBucket = config.getString("akka.persistence.riak.bucket.vector", "Vectors")
+ val queueBucket = config.getString("akka.persistence.riak.bucket.queue", "Queues")
+ val clientHost = config.getString("akka.persistence.riak.client.host", "localhost")
+ val clientPort = config.getInt("akka.persistence.riak.client.port", 8087)
val riakClient: RiakClient = new RiakClient(clientHost, clientPort);
import CommonStorageBackendAccess._
diff --git a/akka-persistence/akka-persistence-simpledb/src/main/scala/akka/SimpledbStorageBackend.scala b/akka-persistence/akka-persistence-simpledb/src/main/scala/akka/SimpledbStorageBackend.scala
index f0441a60ef..dba6579e41 100644
--- a/akka-persistence/akka-persistence-simpledb/src/main/scala/akka/SimpledbStorageBackend.scala
+++ b/akka-persistence/akka-persistence-simpledb/src/main/scala/akka/SimpledbStorageBackend.scala
@@ -28,42 +28,42 @@ private[akka] object SimpledbStorageBackend extends CommonStorageBackend {
val ownerAtt = "owner"
val base64 = new Base64(1024, seperatorBytes, true)
val base64key = new Base64(1024, Array.empty[Byte], true)
- val id = config.getString("akka.storage.simpledb.account.id").getOrElse{
+ val id = config.getString("akka.persistence.simpledb.account.id").getOrElse{
val e = new IllegalStateException("You must provide an AWS id")
log.error(e, "You Must Provide an AWS id to use the SimpledbStorageBackend")
throw e
}
- val secretKey = config.getString("akka.storage.simpledb.account.secretKey").getOrElse{
+ val secretKey = config.getString("akka.persistence.simpledb.account.secretKey").getOrElse{
val e = new IllegalStateException("You must provide an AWS secretKey")
log.error(e, "You Must Provide an AWS secretKey to use the SimpledbStorageBackend")
throw e
}
- val refDomain = config.getString("akka.storage.simpledb.domain.ref", "ref")
- val mapDomain = config.getString("akka.storage.simpledb.domain.map", "map")
- val queueDomain = config.getString("akka.storage.simpledb.domain.queue", "queue")
- val vectorDomain = config.getString("akka.storage.simpledb.domain.vector", "vector")
+ val refDomain = config.getString("akka.persistence.simpledb.domain.ref", "ref")
+ val mapDomain = config.getString("akka.persistence.simpledb.domain.map", "map")
+ val queueDomain = config.getString("akka.persistence.simpledb.domain.queue", "queue")
+ val vectorDomain = config.getString("akka.persistence.simpledb.domain.vector", "vector")
val credentials = new BasicAWSCredentials(id, secretKey);
val clientConfig = new ClientConfiguration()
- for (i <- config.getInt("akka.storage.simpledb.client.timeout")) {
+ for (i <- config.getInt("akka.persistence.simpledb.client.timeout")) {
clientConfig.setConnectionTimeout(i)
}
- for (i <- config.getInt("akka.storage.simpledb.client.maxconnections")) {
+ for (i <- config.getInt("akka.persistence.simpledb.client.maxconnections")) {
clientConfig.setMaxConnections(i)
}
- clientConfig.setMaxErrorRetry(config.getInt("akka.storage.simpledb.client.maxretries", 10))
+ clientConfig.setMaxErrorRetry(config.getInt("akka.persistence.simpledb.client.maxretries", 10))
- for (s <- config.getString("akka.storage.simpledb.client.protocol")) {
+ for (s <- config.getString("akka.persistence.simpledb.client.protocol")) {
clientConfig.setProtocol(Protocol.valueOf(s))
}
- for (i <- config.getInt("akka.storage.simpledb.client.sockettimeout")) {
+ for (i <- config.getInt("akka.persistence.simpledb.client.sockettimeout")) {
clientConfig.setSocketTimeout(i)
}
- for {s <- config.getInt("akka.storage.simpledb.client.sendbuffer")
- r <- config.getInt("akka.storage.simpledb.client.receivebuffer")} {
+ for {s <- config.getInt("akka.persistence.simpledb.client.sendbuffer")
+ r <- config.getInt("akka.persistence.simpledb.client.receivebuffer")} {
clientConfig.setSocketBufferSizeHints(s, r)
}
- for (s <- config.getString("akka.storage.simpledb.client.useragent")) {
+ for (s <- config.getString("akka.persistence.simpledb.client.useragent")) {
clientConfig.setUserAgent(s)
}
@@ -292,4 +292,4 @@ private[akka] object SimpledbStorageBackend extends CommonStorageBackend {
}
-}
\ No newline at end of file
+}
diff --git a/akka-persistence/akka-persistence-voldemort/src/main/scala/akka/VoldemortStorageBackend.scala b/akka-persistence/akka-persistence-voldemort/src/main/scala/akka/VoldemortStorageBackend.scala
index 520b2030fe..8f2779eb06 100644
--- a/akka-persistence/akka-persistence-voldemort/src/main/scala/akka/VoldemortStorageBackend.scala
+++ b/akka-persistence/akka-persistence-voldemort/src/main/scala/akka/VoldemortStorageBackend.scala
@@ -28,14 +28,14 @@ private[akka] object VoldemortStorageBackend extends CommonStorageBackend {
import VoldemortAccess._
val bootstrapUrlsProp = "bootstrap_urls"
- val clientConfig = config.getConfigMap("akka.storage.voldemort.client") match {
+ val clientConfig = config.getConfigMap("akka.persistence.voldemort.client") match {
case Some(configMap) => getClientConfig(configMap.asMap)
case None => getClientConfig(new HashMap[String, String] + (bootstrapUrlsProp -> "tcp://localhost:6666"))
}
- val refStore = config.getString("akka.storage.voldemort.store.ref", "Refs")
- val mapStore = config.getString("akka.storage.voldemort.store.map", "Maps")
- val vectorStore = config.getString("akka.storage.voldemort.store.vector", "Vectors")
- val queueStore = config.getString("akka.storage.voldemort.store.queue", "Queues")
+ val refStore = config.getString("akka.persistence.voldemort.store.ref", "Refs")
+ val mapStore = config.getString("akka.persistence.voldemort.store.map", "Maps")
+ val vectorStore = config.getString("akka.persistence.voldemort.store.vector", "Vectors")
+ val queueStore = config.getString("akka.persistence.voldemort.store.queue", "Queues")
var storeClientFactory: StoreClientFactory = null
diff --git a/akka-remote/src/main/scala/akka/remote/BootableRemoteActorService.scala b/akka-remote/src/main/scala/akka/remote/BootableRemoteActorService.scala
index 506d95905b..d67c09cd93 100644
--- a/akka-remote/src/main/scala/akka/remote/BootableRemoteActorService.scala
+++ b/akka-remote/src/main/scala/akka/remote/BootableRemoteActorService.scala
@@ -22,11 +22,11 @@ trait BootableRemoteActorService extends Bootable with Logging {
else RemoteNode.start
}
}, "Akka Remote Service")
-
+
def startRemoteService = remoteServerThread.start
abstract override def onLoad = {
- if (config.getBool("akka.remote.server.service", true)) {
+ if (RemoteServer.isRemotingEnabled) {
log.info("Initializing Remote Actors Service...")
startRemoteService
log.info("Remote Actors Service initialized")
diff --git a/akka-remote/src/main/scala/akka/remote/RemoteServer.scala b/akka-remote/src/main/scala/akka/remote/RemoteServer.scala
index 3d4e81e336..33b71f1425 100644
--- a/akka-remote/src/main/scala/akka/remote/RemoteServer.scala
+++ b/akka-remote/src/main/scala/akka/remote/RemoteServer.scala
@@ -66,12 +66,14 @@ object RemoteNode extends RemoteServer
* @author Jonas Bonér
*/
object RemoteServer {
+ val isRemotingEnabled = config.getList("akka.enabled-modules").exists(_ == "remote")
+
val UUID_PREFIX = "uuid:"
val MESSAGE_FRAME_SIZE = config.getInt("akka.remote.server.message-frame-size", 1048576)
val SECURE_COOKIE = config.getString("akka.remote.secure-cookie")
val REQUIRE_COOKIE = {
val requireCookie = config.getBool("akka.remote.server.require-cookie", true)
- if (requireCookie && RemoteServer.SECURE_COOKIE.isEmpty) throw new ConfigurationException(
+ if (isRemotingEnabled && requireCookie && RemoteServer.SECURE_COOKIE.isEmpty) throw new ConfigurationException(
"Configuration option 'akka.remote.server.require-cookie' is turned on but no secure cookie is defined in 'akka.remote.secure-cookie'.")
requireCookie
}
diff --git a/akka-samples/akka-sample-security/src/main/scala/SimpleService.scala b/akka-samples/akka-sample-security/src/main/scala/SimpleService.scala
index 1a462d1700..bee8dd5bd9 100644
--- a/akka-samples/akka-sample-security/src/main/scala/SimpleService.scala
+++ b/akka-samples/akka-sample-security/src/main/scala/SimpleService.scala
@@ -39,7 +39,7 @@ class Boot {
}
/*
- * In akka.conf you can set the FQN of any AuthenticationActor of your wish, under the property name: akka.rest.authenticator
+ * In akka.conf you can set the FQN of any AuthenticationActor of your wish, under the property name: akka.http.authenticator
*/
class DigestAuthenticationService extends DigestAuthenticationActor {
//If you want to have a distributed nonce-map, you can use something like below,
diff --git a/akka-samples/akka-sample-security/src/main/webapp/WEB-INF/web.xml b/akka-samples/akka-sample-security/src/main/webapp/WEB-INF/web.xml
index e9d5bbb4db..d532f0e3f2 100644
--- a/akka-samples/akka-sample-security/src/main/webapp/WEB-INF/web.xml
+++ b/akka-samples/akka-sample-security/src/main/webapp/WEB-INF/web.xml
@@ -8,7 +8,7 @@
AkkaServlet
- akka.rest.AkkaServlet
+ akka.http.AkkaServlet
diff --git a/config/akka-reference.conf b/config/akka-reference.conf
index 17a29b0ff9..04992fb83d 100644
--- a/config/akka-reference.conf
+++ b/config/akka-reference.conf
@@ -8,7 +8,9 @@
akka {
version = "1.0-SNAPSHOT" # Akka version, checked against the runtime version of Akka.
- time-unit = "seconds" # Default timeout time unit for all timeout properties throughout the config
+ enabled-modules = [] # Comma separated list of the enabled modules. Options: ["remote", "remote.ssl", "camel", "http"]
+
+ time-unit = "seconds" # Time unit for all timeout properties throughout the config
# These boot classes are loaded (and created) automatically when the Akka Microkernel boots up
# Can be used to bootstrap your application(s)
@@ -84,8 +86,7 @@ akka {
timeout = 60
}
- rest {
- service = on
+ http {
hostname = "localhost"
port = 9998
#cometSupport = "org.atmosphere.container.Jetty7CometSupport" # Disregard autodetection, for valid values: http://doc.akkasource.org/comet
@@ -133,13 +134,12 @@ akka {
}
server {
- service = on
- hostname = "localhost" # The hostname or IP that clients should connect to
- port = 2552 # The port clients should connect to
- message-frame-size = 1048576
+ hostname = "localhost" # The hostname or IP that clients should connect to
+ port = 2552 # The port clients should connect to. Default is 2552 (AKKA)
+ message-frame-size = 1048576 # Increase this if you want to be able to send messages with large payloads
connection-timeout = 1
- require-cookie = on
- untrusted-mode = off
+ require-cookie = on # Should the remote server require that it peers share the same secure-cookie (defined in the 'remote' section)?
+ untrusted-mode = off # Enable untrusted mode for full security of server managed actors, allows untrusted clients to connect.
}
client {
@@ -148,15 +148,9 @@ akka {
message-frame-size = 1048576
reconnection-time-window = 600 # Maximum time window that a client should try to reconnect for
}
-
- cluster {
- service = on
- name = "default" # The name of the cluster
- serializer = "akka.serialization.Serializer$Java$" # FQN of the serializer class
- }
}
- storage {
+ persistence {
cassandra {
hostname = "127.0.0.1" # IP address or hostname of one of the Cassandra cluster's seeds
port = 9160 # Port to Cassandra
@@ -203,24 +197,25 @@ akka {
queue = "Queues"
}
- client{
+ client {
host = "localhost"
port = 8087 #Default Riak Protobuf port
}
}
memcached {
- client{
- addresses = "localhost:11211" #Formatted according to spymemcached "localhost:11211 otherhost:11211" etc..
+ client {
+ addresses = "localhost:11211" #Formatted according to spymemcached "localhost:11211 otherhost:11211" etc..
}
}
simpledb {
- account{
+ account {
id = "YOU NEED TO PROVIDE AN AWS ID"
secretKey = "YOU NEED TO PROVIDE AN AWS SECRETKEY"
}
- client{
+
+ client {
#Defaults to default AWS ClientConfiguration
timeout =50000
#maxconnections =
@@ -231,7 +226,8 @@ akka {
#receivebuffer = 0
#useragent = "AWS Java SDK-1.0.14"
}
- domain{
+
+ domain {
ref = "ref"
map = "map"
vector = "vector"
@@ -239,8 +235,4 @@ akka {
}
}
}
-
- camel {
- service = on
- }
}