restructured distribution and maven files, removed unused jars, added bunch of maven plugins, added ActorRegistry, using real AOP aspects for proxies

This commit is contained in:
Jonas Boner 2009-08-14 00:01:18 +02:00
parent e3de827b37
commit d3c62e4233
27 changed files with 721 additions and 178 deletions

View file

@ -11,30 +11,26 @@ set LIB_DIR=%AKKA_HOME%\lib
set CLASSPATH=%AKKA_HOME%\config
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\akka-kernel-0.5.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\akka-util-java-0.5.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\antlr-3.1.3.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\aopalliance-1.0.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\asm-3.1.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\aspectwerkz-jdk5-2.1.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\aspectwerkz-nodeps-jdk5-2.1.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\atmosphere-core-0.3.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\atmosphere-portable-runtime-0.3.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\camel-core-2.0-SNAPSHOT.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\atmosphere-compat-0.3.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\cassandra-0.4.0-dev.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\cassandra-0.4.0-trunk.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\cglib-2.2.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\commons-cli-1.1.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\commons-collections-3.2.1.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\commons-io-1.3.2.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\commons-javaflow-1.0-SNAPSHOT.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\commons-lang-2.4.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\commons-logging-1.0.4.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\commons-math-1.1.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\commons-pool-1.5.1.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\configgy-1.3.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\fscontext.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\google-collect-snapshot-20090211.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\grizzly-comet-webserver-1.8.6.3.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\guice-core-2.0-SNAPSHOT.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\guice-jsr250-2.0-SNAPSHOT.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\high-scale-lib.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\jackson-core-asl-1.1.0.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\jackson-mapper-asl-1.1.0.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\jersey-client-1.1.1-ea.jar
@ -43,13 +39,11 @@ set CLASSPATH=%CLASSPATH%;%LIB_DIR%\jersey-json-1.1.1-ea.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\jersey-server-1.1.1-ea.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\jersey-scala-1.1.2-ea-SNAPSHOT.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\JSAP-2.1.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\jsr250-api-1.0.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\jsr311-api-1.0.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\libfb303.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\libthrift.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\log4j-1.2.15.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\lucene-core-2.2.0.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\netty-3.1.0.CR1.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\netty-3.1.0.GA.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\providerutil.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\protobuf-java-2.1.0.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\scala-library-2.7.5.jar

View file

@ -5,15 +5,6 @@ VERSION=0.5
BASE_DIR=$(dirname $0)/..
echo 'Starting Akka Kernel from directory' $BASE_DIR
echo 'Resetting persistent storage in' $BASE_DIR/storage
rm -rf $BASE_DIR/storage
mkdir $BASE_DIR/storage
mkdir $BASE_DIR/storage/bootstrap
mkdir $BASE_DIR/storage/callouts
mkdir $BASE_DIR/storage/commitlog
mkdir $BASE_DIR/storage/data
mkdir $BASE_DIR/storage/system
LIB_DIR=$BASE_DIR/lib
CLASSPATH=$BASE_DIR/config
@ -22,12 +13,13 @@ CLASSPATH=$CLASSPATH:$LIB_DIR/akka-util-java-0.5.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/antlr-3.1.3.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/aopalliance-1.0.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/asm-3.1.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/aspectwerkz-jdk5-2.1.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/aspectwerkz-nodeps-jdk5-2.1.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/atmosphere-core-0.3.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/atmosphere-portable-runtime-0.3.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/camel-core-2.0-SNAPSHOT.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/atmosphere-compat-0.3.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/cassandra-0.4.0-dev.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/cassandra-0.4.0-trunk.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/cglib-2.2.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/commons-cli-1.1.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/commons-collections-3.2.1.jar
@ -36,13 +28,12 @@ CLASSPATH=$CLASSPATH:$LIB_DIR/commons-javaflow-1.0-SNAPSHOT.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/commons-lang-2.4.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/commons-logging-1.0.4.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/commons-math-1.1.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/commons-pool-1.5.1.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/configgy-1.3.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/fscontext.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/google-collect-snapshot-20090211.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/grizzly-comet-webserver-1.8.6.3.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/guice-core-2.0-SNAPSHOT.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/guice-jsr250-2.0-SNAPSHOT.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/high-scale-lib.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/jackson-core-asl-1.1.0.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/jackson-mapper-asl-1.1.0.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/javautils-2.7.4-0.1.jar
@ -52,12 +43,10 @@ CLASSPATH=$CLASSPATH:$LIB_DIR/jersey-json-1.1.1-ea.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/jersey-server-1.1.1-ea.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/jersey-scala-1.1.2-ea-SNAPSHOT.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/JSAP-2.1.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/jsr250-api-1.0.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/jsr311-api-1.0.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/libfb303.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/libthrift.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/log4j-1.2.15.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/lucene-core-2.2.0.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/netty-3.1.0.GA.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/providerutil.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/protobuf-java-2.1.0.jar
@ -89,7 +78,5 @@ JVM_OPTS=" \
-Dcom.sun.grizzly.cometSupport=true \
-Dcom.sun.management.jmxremote.authenticate=false"
#$JAVA_HOME/bin/java $JVM_OPTS -cp $CLASSPATH se.scalablesolutions.akka.Boot se.scalablesolutions.akka.kernel.Kernel ${1}
echo $JAVA_HOME/bin/java $JVM_OPTS -cp $CLASSPATH se.scalablesolutions.akka.kernel.Kernel ${1}
$JAVA_HOME/bin/java $JVM_OPTS -cp $CLASSPATH se.scalablesolutions.akka.kernel.Kernel ${1}

45
changes.xml Executable file
View file

@ -0,0 +1,45 @@
<!-- mini guide
<action
invoke with 'mvn changes:changes-report'
dev="Name of developer who committed the change"
type="add|fix|remove|update"
issue="(optional) Id of the issue related to this change"
due-to="(optional)Name of the person 'non-committer' to be credited for this change"
>
description
</action>
see http://maven.apache.org/plugins/maven-changes-plugin/usage.html for full guide
-->
<document>
<properties>
<title>Akka Release Notes</title>
<author></author>
</properties>
<body>
<release version="0.6" date="" description="">
<action dev="Debasish Ghosh" type="add">MongoDB as Akka storage backend </action>
<action dev="Tim Perrett" type="add">Support for using the Lift Web framework with Actors</action>
<action dev="Viktor Klang" type="add">Support for using Scala XML tags in RESTful Actors (scala-jersey)</action>
<action dev="Viktor Klang" type="add">Support for Comet Actors using Atmosphere</action>
<action dev="Jonas Bon&#233;r" type="add">Statistics recorder with JMX and REST APIs</action>
<action dev="Jonas Bon&#233;r" type="add">Management service with JMX and REST APIs</action>
<action dev="Jonas Bon&#233;r" type="add">JSON serialization for Java objects (using Jackson)</action>
<action dev="Jonas Bon&#233;r" type="add">JSON serialization for Scala objects (using scala-json)</action>
<action dev="Jonas Bon&#233;r" type="add">Protobuf serialization for Java and Scala objects</action>
<action dev="Jonas Bon&#233;r" type="add">SBinary serialization for Scala objects</action>
<action dev="Jonas Bon&#233;r" type="add">Protobuf as remote protocol</action>
<action dev="Jonas Bon&#233;r" type="add">Added CassandraSession API (with socket pooling) wrapping Cassandra's Thrift API in Scala and Java APIs</action>
<action dev="Jonas Bon&#233;r" type="add">CassandraStorage is now works with external Cassandra cluster</action>
<action dev="Jonas Bon&#233;r" type="remove">Removed embedded Cassandra mode</action>
<action dev="Jonas Bon&#233;r" type="add">ActorRegistry for retrieving Actor instances</action>
<action dev="Jonas Bon&#233;r" type="fix">Concurrent mode is now per actor basis</action>
<action dev="Jonas Bon&#233;r" type="fix">Made Akka Web App aware, does not require AKKA_HOME when using it as a library</action>
<action dev="Jonas Bon&#233;r" type="fix">Fixed dispatcher bug</action>
<action dev="Jonas Bon&#233;r" type="fix">Cleaned up Maven scripts and distribution in general</action>
<action dev="Jonas Bon&#233;r" type="add">Added mailing list: akka-user@googlegroups.com</action>
<action dev="Jonas Bon&#233;r" type="add">Improved and restructured documentation</action>
<action dev="Jonas Bon&#233;r" type="add">New URL: http://akkasource.org</action>
</release>
<release version="0.5" date="2009-07-12" description="First public release" />
</body>
</document>

View file

@ -1,51 +1,45 @@
####################
# Akka Config File #
####################
# This file has all the default settings, so all these could be remove with no visible effect.
# Modify as needed.
<log>
filename = "./logs/akka.log"
roll = "daily" # Options: never, hourly, daily, sunday/monday/...
level = "debug" # Options: fatal, critical, error, warning, info, debug, trace
filename = "./logs/akka.log"
roll = "daily" # Options: never, hourly, daily, sunday/monday/...
level = "debug" # Options: fatal, critical, error, warning, info, debug, trace
console = on
# syslog_host = ""
# syslog_server_name = ""
</log>
<akka>
version = "v0.5"
#boot = ["sample.scala.Boot"] # FQN to the class doing initial active object/actor
boot = ["sample.java.Boot", "sample.scala.Boot"] # FQN to the class doing initial active object/actor
# supervisor bootstrap, should be defined in default constructor
<management>
service = on
record-stats = on
</management>
version = "0.6"
boot = ["sample.java.Boot", "sample.scala.Boot"] # FQN to the class doing initial active object/actor
# supervisor bootstrap, should be defined in default constructor
<actor>
timeout = 5000 # default timeout for future based invocations
serialize-messages = on # does a deep clone of (non-primitive) messages to ensure immutability
timeout = 5000 # default timeout for future based invocations
serialize-messages = off # does a deep clone of (non-primitive) messages to ensure immutability
</actor>
<stm>
service = on
restart-on-collision = off # (not implemented yet) if 'on' then it reschedules the transaction,
# if 'off' then throws an exception or rollback for user to handle
wait-for-completion = 100 # how long time in millis a transaction should be given time to complete when a collision is detected
wait-nr-of-times = 3 # the number of times it should check for completion of a pending transaction upon collision
distributed = off # not implemented yet
restart-on-collision = off # (not implemented yet) if 'on' then it reschedules the transaction,
# if 'off' then throws an exception or rollback for user to handle
wait-for-completion = 100 # how long time in millis a transaction should be given time to complete when a collision is detected
wait-nr-of-times = 3 # the number of times it should check for completion of a pending transaction upon collision
distributed = off # not implemented yet
</stm>
<remote>
service = on
service = on
hostname = "localhost"
port = 9999
connection-timeout = 1000 # in millis
connection-timeout = 1000 # in millis
</remote>
<rest>
service = on
hostname = "localhost"
@ -53,15 +47,22 @@
</rest>
<storage>
system = "cassandra" # Options: cassandra (coming: terracotta, mongodb, redis, tokyo-cabinet, voldemort, memcached)
system = "cassandra" # Options: cassandra, mongodb
<cassandra>
service = on
hostname = "127.0.0.1" # ip address or hostname of one of the Cassandra cluster's seeds
hostname = "127.0.0.1" # IP address or hostname of one of the Cassandra cluster's seeds
port = 9160
storage-format = "java" # Options: java, scala-json, java-json, protobuf
consistency-level = 1 #
storage-format = "java" # Options: java, scala-json, java-json, protobuf
consistency-level = 1
</cassandra>
</rest>
</akka>
<mongodb>
service = on
hostname = "127.0.0.1" # IP address or hostname of the MongoDB DB instance
port = 27017
dbname = "mydb"
storage-format = "scala-json" # Options: java, scala-json, java-json, protobuf
</mongodb>
</storage>
</akka>

View file

@ -15,6 +15,6 @@ log4j.appender.R.layout=org.apache.log4j.PatternLayout
log4j.appender.R.layout.ConversionPattern=%5p [%t] %d{ISO8601} %F (line %L) %m%n
# Edit the next line to point to your logs directory
log4j.appender.R.File=./logs/cassandra.log
log4j.appender.R.File=./logs/akka.log
log4j.logger.org.atmosphere=DEBUG

View file

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.codehaus.aspectwerkz</groupId>
<artifactId>aspectwerkz-jdk5</artifactId>
<version>2.1</version>
<packaging>jar</packaging>
</project>

View file

@ -32,6 +32,11 @@
<artifactId>aspectwerkz-nodeps-jdk5</artifactId>
<version>2.1</version>
</dependency>
<dependency>
<groupId>org.codehaus.aspectwerkz</groupId>
<artifactId>aspectwerkz-jdk5</artifactId>
<version>2.1</version>
</dependency>
<dependency>
<groupId>com.twitter</groupId>
<artifactId>scala-stats</artifactId>
@ -235,6 +240,41 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.dstovall</groupId>
<artifactId>onejar-maven-plugin</artifactId>
<version>1.4.1</version>
<executions>
<execution>
<configuration>
<mainClass>se.scalablesolutions.akka.kernel.Kernel</mainClass>
<onejarVersion>0.96</onejarVersion>
<attachToBuild>true</attachToBuild>
<classifier>onejar</classifier>
</configuration>
<goals>
<goal>one-jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
<resources>
<resource>
<filtering>false</filtering>
<directory>../config</directory>
<includes>
<include>akka.conf</include>
<include>akka-reference.conf</include>
</includes>
</resource>
<resource>
<filtering>false</filtering>
<directory>src/main/resources</directory>
<includes>
<include>META-INF/*</include>
</includes>
</resource>
</resources>
</build>
</project>

View file

@ -0,0 +1,8 @@
<!DOCTYPE aspectwerkz PUBLIC "-//AspectWerkz//DTD//EN" "http://aspectwerkz.codehaus.org/dtd/aspectwerkz2.dtd">
<aspectwerkz>
<system id="akka">
<package name="se.scalablesolutions.akka.kernel.actor">
<aspect class="ActiveObjectAspect" />
</package>
</system>
</aspectwerkz>

View file

@ -1,9 +0,0 @@
<!DOCTYPE aspectwerkz PUBLIC
"-//AspectWerkz//DTD 2.0//EN"
"http://aspectwerkz.codehaus.org/dtd/aspectwerkz_2_0.dtd">
<aspectwerkz>
<system id="akka">
<aspect class="se.scalablesolutions.akka.kernel.config.ConfigurationAspect"/>
</system>
</aspectwerkz>

View file

@ -24,7 +24,7 @@ import kernel.management.Management
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
object Kernel extends Logging {
val version = "0.6"
val VERSION = "0.6"
val HOME = {
val home = System.getenv("AKKA_HOME")
if (home == null) None
@ -33,6 +33,9 @@ object Kernel extends Logging {
val config = setupConfig
val CONFIG_VERSION = config.getString("akka.version", "0")
if (VERSION != CONFIG_VERSION) throw new IllegalStateException("Akka JAR version [" + VERSION + "] is different than the provided config ('akka.conf') version [" + CONFIG_VERSION + "]")
val BOOT_CLASSES = config.getList("akka.boot")
val RUN_REMOTE_SERVICE = config.getBool("akka.remote.service", true)
val RUN_MANAGEMENT_SERVICE = config.getBool("akka.management.service", true)
@ -73,8 +76,7 @@ object Kernel extends Logging {
if (RUN_REST_SERVICE) startJersey
runApplicationBootClasses
Thread.currentThread.setContextClassLoader(getClass.getClassLoader)
log.info("Akka started successfully")
hasBooted = true
}
@ -83,21 +85,22 @@ object Kernel extends Logging {
def uptime = (System.currentTimeMillis - startTime) / 1000
def setupConfig: Config = {
try {
Configgy.configureFromResource("akka.conf", getClass.getClassLoader)
log.info("Config loaded from the application classpath.")
} catch {
case e: ParseException =>
if (HOME.isDefined) {
try {
if (HOME.isDefined) {
val configFile = HOME.get + "/config/akka.conf"
log.info("AKKA_HOME is defined to [%s], loading config from [%s].", HOME.get, configFile)
Configgy.configure(configFile)
} else throw new IllegalStateException("AKKA_HOME is not defined and no 'akka.conf' can be found on the classpath, aborting")
val configFile = HOME.get + "/config/akka.conf"
Configgy.configure(configFile)
log.info("AKKA_HOME is defined to [%s], config loaded from [%s].", HOME.get, configFile)
} catch {
case e: ParseException => throw new IllegalStateException("AKKA_HOME is not defined and no 'akka.conf' can be found on the classpath, aborting")
case e: ParseException => throw new IllegalStateException("'akka.conf' config file can not be found in [" + HOME + "/config/akka.conf] - aborting. Either add it in the 'config' directory or add it to the classpath.")
}
}
} else {
try {
Configgy.configureFromResource("akka.conf", getClass.getClassLoader)
log.info("Config loaded from the application classpath.")
} catch {
case e: ParseException => throw new IllegalStateException("'$AKKA_HOME/config/akka.conf' could not be found and no 'akka.conf' can be found on the classpath - aborting. . Either add it in the '$AKKA_HOME/config' directory or add it to the classpath.")
}
}
val config = Configgy.config
config.registerWithJmx("com.scalablesolutions.akka")
// FIXME fix Configgy JMX subscription to allow management
@ -108,19 +111,20 @@ object Kernel extends Logging {
private[akka] def runApplicationBootClasses = {
new management.RestfulJMXBoot // add the REST/JMX service
val loader =
if (getClass.getClassLoader.getResourceAsStream("akka.conf") != null) getClass.getClassLoader
else if (HOME.isDefined) {
if (HOME.isDefined) {
val CONFIG = HOME.get + "/config"
val DEPLOY = HOME.get + "/deploy"
val DEPLOY_DIR = new File(DEPLOY)
if (!DEPLOY_DIR.exists) { log.error("Could not find a deploy directory at [" + DEPLOY + "]"); System.exit(-1) }
val toDeploy = for (f <- DEPLOY_DIR.listFiles().toArray.toList.asInstanceOf[List[File]]) yield f.toURL
//val toDeploy = DEPLOY_DIR.toURL :: (for (f <- DEPLOY_DIR.listFiles().toArray.toList.asInstanceOf[List[File]]) yield f.toURL)
log.info("Deploying applications from [%s]: [%s]", DEPLOY, toDeploy.toArray.toList)
new URLClassLoader(toDeploy.toArray, getClass.getClassLoader)
} else if (getClass.getClassLoader.getResourceAsStream("akka.conf") != null) {
getClass.getClassLoader
} else throw new IllegalStateException("AKKA_HOME is not defined and no 'akka.conf' can be found on the classpath, aborting")
for (clazz <- BOOT_CLASSES) {
log.info("Loading boot class [%s]", clazz)
log.info("--------------- LOADER [%s]", loader)
loader.loadClass(clazz).newInstance
}
applicationLoader = Some(loader)
@ -182,7 +186,7 @@ object Kernel extends Logging {
(____ /__|_ \__|_ \(____ /
\/ \/ \/ \/
""")
log.info(" Running version " + config.getString("akka.version", "Awesome"))
log.info(" Running version " + VERSION)
log.info("==============================")
}

View file

@ -14,9 +14,11 @@ import kernel.config.ScalaConfig._
import kernel.util._
import serialization.Serializer
import org.codehaus.aspectwerkz.intercept.{Advisable, AroundAdvice}
import org.codehaus.aspectwerkz.intercept.{Advisable, AroundAdvice, Advice}
import org.codehaus.aspectwerkz.joinpoint.{MethodRtti, JoinPoint}
import org.codehaus.aspectwerkz.proxy.Proxy
import org.codehaus.aspectwerkz.annotation.{Aspect, Around}
import org.codehaus.aspectwerkz.aspect.management.Aspects
sealed class ActiveObjectException(msg: String) extends RuntimeException(msg)
class ActiveObjectInvocationTimeoutException(msg: String) extends ActiveObjectException(msg)
@ -221,9 +223,9 @@ object ActiveObject {
if (remoteAddress.isDefined) actor.makeRemote(remoteAddress.get)
val proxy = Proxy.newInstance(target, false, true)
actor.initialize(target, proxy)
// FIXME switch to weaving in the aspect at compile time
proxy.asInstanceOf[Advisable].aw_addAdvice(
MATCH_ALL, new ActorAroundAdvice(target, proxy, actor, remoteAddress, timeout))
actor.timeout = timeout
actor.start
AspectInitRegistry.register(proxy, AspectInit(target, actor, remoteAddress, timeout))
proxy.asInstanceOf[T]
}
@ -231,11 +233,13 @@ object ActiveObject {
if (remoteAddress.isDefined) actor.makeRemote(remoteAddress.get)
val proxy = Proxy.newInstance(Array(intf), Array(target), false, true)
actor.initialize(target.getClass, target)
proxy.asInstanceOf[Advisable].aw_addAdvice(
MATCH_ALL, new ActorAroundAdvice(intf, target, actor, remoteAddress, timeout))
actor.timeout = timeout
actor.start
AspectInitRegistry.register(proxy, AspectInit(intf, actor, remoteAddress, timeout))
proxy.asInstanceOf[T]
}
private[kernel] def supervise(restartStrategy: RestartStrategy, components: List[Supervise]): Supervisor = {
object factory extends SupervisorFactory {
override def getSupervisorConfig = SupervisorConfig(restartStrategy, components)
@ -246,20 +250,46 @@ object ActiveObject {
}
}
object AspectInitRegistry {
private val inits = new java.util.concurrent.ConcurrentHashMap[AnyRef, AspectInit]
def initFor(target: AnyRef) = {
val init = inits.get(target)
inits.remove(target)
init
}
def register(target: AnyRef, init: AspectInit) = inits.put(target, init)
}
sealed case class AspectInit(
val target: Class[_],
val actor: Dispatcher,
val remoteAddress: Option[InetSocketAddress],
val timeout: Long)
/**
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
@serializable
sealed class ActorAroundAdvice(val target: Class[_],
val targetInstance: AnyRef,
val actor: Dispatcher,
val remoteAddress: Option[InetSocketAddress],
val timeout: Long) extends AroundAdvice {
val id = target.getName
actor.timeout = timeout
actor.start
def invoke(joinpoint: JoinPoint): AnyRef = dispatch(joinpoint)
@Aspect("perInstance")
sealed class ActiveObjectAspect {
@volatile var isInitialized = false
var target: Class[_] = _
var actor: Dispatcher = _
var remoteAddress: Option[InetSocketAddress] = _
var timeout: Long = _
@Around("execution(* *..*(..))")
def invoke(joinpoint: JoinPoint): AnyRef = {
if (!isInitialized) {
val init = AspectInitRegistry.initFor(joinpoint.getThis)
target = init.target
actor = init.actor
remoteAddress = init.remoteAddress
timeout = init.timeout
isInitialized = true
}
dispatch(joinpoint)
}
private def dispatch(joinpoint: JoinPoint) = {
if (remoteAddress.isDefined) remoteDispatch(joinpoint)

View file

@ -54,7 +54,8 @@ object Actor {
*/
trait Actor extends Logging with TransactionManagement {
Stats.getCounter("NrOfActors").incr
ActorRegistry.register(this)
@volatile private[this] var isRunning: Boolean = false
private[this] val remoteFlagLock = new ReadWriteLock
private[this] val transactionalFlagLock = new ReadWriteLock

View file

@ -0,0 +1,33 @@
/**
* Copyright (C) 2009 Scalable Solutions.
*/
package se.scalablesolutions.akka.kernel.actor
import kernel.util.Logging
import scala.collection.jcl.HashMap
/**
* Registry holding all actor instances, mapped by class..
*
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
object ActorRegistry extends Logging {
private val actors = new HashMap[String, List[Actor]]
def actorsFor(clazz: Class[_]): List[Actor] = synchronized {
actors.get(clazz.getName) match {
case None => Nil
case Some(instances) => instances
}
}
def register(actor: Actor) = synchronized {
val name = actor.getClass.getName
actors.get(name) match {
case Some(instances) => actors + (name -> (actor :: instances))
case None => actors + (name -> (actor :: Nil))
}
}
}

View file

@ -47,7 +47,7 @@ object Management extends Logging {
def getStats(reset: Boolean) = {
var statistics = new ArrayBuffer[Tuple2[String, String]]
statistics += (("current time", (System.currentTimeMillis / 1000).toString))
statistics += (("akka version", Kernel.version))
statistics += (("akka version", Kernel.VERSION))
statistics += (("uptime", Kernel.uptime.toString))
for ((key, value) <- Stats.getJvmStats) statistics += (key, value.toString)
for ((key, value) <- Stats.getCounterStats) statistics += (key, value.toString)

View file

@ -0,0 +1,171 @@
/**
* Copyright (C) 2009 Scalable Solutions.
*/
package se.scalablesolutions.akka.kernel.management
import javax.management._
import java.lang.management._
/*
object ScalaJMX {
val mbeanServer = ManagementFactory.getPlatformMBeanServer
def register(t: AnyRef, i: Class, name: ObjectName) = mbeanServer.registerMBean(new StandardMBean(t, i), name)
def registerBean(bean: DynamicMBean, name: ObjectName): ObjectInstance = mbeanServer.registerMBean(bean, name)
def register(t: AnyRef, name: String): ObjectInstance = register(t, beanClass(t), name)
def info(name: ObjectName): SBean = mbeanServer.getMBeanInfo(name)
def bean(name: ObjectName): SBeanInfo = convBeanInfo(name, mbeanServer.getMBeanInfo(name))
def invoke(name: ObjectName, operationName: String, params: Array[Object], signature: Array[String]): Object =
mbeanServer.invoke(name, operationName, params, signature)
def call(name: ObjectName, operationName: String): Object = invoke(name, operationName, Array[Object](), Array[String]())
def get(name: ObjectName, attribute: String) = mbeanServer.getAttribute(name, attribute)
def set(name: ObjectName, attribute: String, value: Object) = mbeanServer.setAttribute(name, new Attribute(attribute, value))
implicit def instanceToName(oi: ObjectInstance) = oi.getObjectName()
implicit def stringToName(name: String) = ObjectName.getInstance(name)
implicit def convBean(bi: MBeanInfo):SBean = SBean(bi.getClassName(), bi.getDescription(), bi.getAttributes(), bi.getNotifications(), bi.getOperations(), bi.getConstructors())
implicit def seqToArr(seq: Seq[AnyRef]): Array[Object] = seq.toArray
def convBeanInfo(name: ObjectName, bi: MBeanInfo):SBeanInfo = new SBeanInfo(name, bi.getClassName(), bi.getDescription(), bi.getAttributes(), bi.getNotifications(), bi.getOperations(), bi.getConstructors())
implicit def convAttrs(attrs: Array[MBeanAttributeInfo]): Seq[SAttr] =
for (val a <- attrs) yield a
implicit def convParams(params: Array[MBeanParameterInfo]): Seq[SParameter] =
for (val p <- params) yield p
implicit def convNotes(notes: Array[MBeanNotificationInfo]): Seq[SNotification] =
for (val p <- notes) yield p
implicit def convCons(cons: Array[MBeanConstructorInfo]): Seq[SConstructor] =
for (val p <- cons) yield p
implicit def convOps(cons: Array[MBeanOperationInfo]): Seq[SOperation] =
for (val p <- cons) yield p
implicit def convAttr(attr: MBeanAttributeInfo) = SAttr(attr.getName(), attr.getDescription(), attr.getType(), attr.isIs(), attr.isReadable(), attr.isWritable())
implicit def convNote(note: MBeanNotificationInfo) = SNotification(note.getName(), note.getDescription(), note.getNotifTypes())
implicit def convOp(op: MBeanOperationInfo):SOperation = SOperation(op.getName(), op.getDescription(), op.getImpact(), op.getReturnType(), op.getSignature())
implicit def convCon(con: MBeanConstructorInfo):SConstructor = SConstructor(con getName, con getDescription, con getSignature)
implicit def convParam(p: MBeanParameterInfo) = SParameter(p getName, p getDescription, p getType)
private def beanClass(t: AnyRef) = Class.forName(t.getClass().getName() + "MBean")
}
class MBean(mbeanInterface: String) extends StandardMBean(Class.forName(mbeanInterface))
abstract class SFeature(val name: String, val description: String)
case class SBean(className: String, description: String,
attrs: Seq[SAttr], notes: Seq[SNotification],
ops: Seq[SOperation], cons: Seq[SConstructor]) {
def writable = attrs.toList.filter(sa => sa.writable)
}
class SBeanInfo(name: ObjectName, className: String, description: String,
attrs: Seq[SAttr], notes: Seq[SNotification],
ops: Seq[SOperation], cons: Seq[SConstructor])
extends SBean(className, description, attrs, notes, ops, cons) {
def get(attribute: String) = SJMX.get(name, attribute)
def set(attribute: String, value: Object) = SJMX.set(name, attribute, value)
def call(opName: String) = SJMX.call(name, opName)
}
case class SAttr(
override val name: String,
override val description: String,
jmxType: String, isIs: boolean, readable: boolean, writable: boolean
) extends SFeature(name, description)
case class SNotification(
override val name: String,
override val description: String,
notifTypes: Array[String]) extends SFeature(name, description)
case class SOperation(
override val name: String,
override val description: String,
impact: int,
returnType: String,
signature: Seq[SParameter]) extends SFeature(name, description)
case class SParameter(
override val name: String,
override val description: String,
jmxType: String) extends SFeature(name, description)
case class SConstructor(
override val name: String,
override val description: String,
signature: Seq[SParameter]) extends SFeature(name, description)
*/
/*
package com.soletta.spipe;
import javax.management.{StandardMBean,ObjectName,MBeanInfo};
class SPipe extends MBean("com.soletta.spipe.SPipeMBean") with SPipeMBean {
import Console.println;
import SJMX._;
private var desc: String = "Yipe!";
def go = {
val oname: ObjectName = "default:name=SPipe";
val instance = SJMX.registerBean(this, oname);
set(oname, "Factor", "Hello!");
println(get(oname, "Factor"));
val SBean(n, d, Seq(_, a2, a3, _*), _, ops, _) = info(oname);
println("Bean name is " + n + ", description is " + d);
println("Second attribute is " + a2);
println("Third attribute is " + a3);
println("Writable attributes are " + info(oname).writable);
println("Ops: " + ops);
val x =
<bean name={n} description={d}>
{ops.toList.map(o => <operation name={o.name} description={o.description}/>)}
</bean> ;
println(x);
val inf = bean(oname);
inf.call("start");
println(inf.get("Factor"));
}
def getName = "SPipe!";
def setDescription(d: String) = desc = d;
override def getDescription() = desc;
def getFactor = desc;
def setFactor(s: String) = desc = s;
def isHappy = true;
override def getDescription(info: MBeanInfo) = desc;
}
object PipeMain {
def main(args: Array[String]): unit = {
(new SPipe) go;
}
}
trait SPipeMBean {
def getName: String;
def getDescription: String = getName;
def setDescription(d: String): unit;
def getFactor: String;
def setFactor(s: String): unit;
def isHappy: boolean;
def start() = { Console.println("Starting"); }
def stop() = { }
*/

Binary file not shown.

BIN
lib/aspectwerkz-jdk5-2.1.jar Executable file

Binary file not shown.

252
lib/aspectwerkz2.dtd Executable file
View file

@ -0,0 +1,252 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--*****************************************************************************************************************************
AspectWerkz DTD 2.0
This DTD is provided as is. Some elements might have been deprecated since 0.7.4 (see comments)
This DTD might be incomplete - drop a mail on the user / dev mailing list
This DTD might clash with some of existing XML files since it imposes for some elements an order
(see aspect: introductions then pointcuts then advices)
The "unspecified" order of the aspectwerkz childs might change to the one similar of aspects
(transformation scope, introduction-def, advice-def, advices-def, abstract-aspect, aspect, package)
To use this DTD, start your defintion file with
<!DOCTYPE aspectwerkz PUBLIC
"-//AspectWerkz//DTD 2.0//EN"
"http://aspectwerkz.codehaus.org/dtd/aspectwerkz_2_0.dtd">
You can also use the "aliasing DTD" that matchs always the latest release of AspectWerkz
<!DOCTYPE aspectwerkz PUBLIC
"-//AspectWerkz//DTD//EN"
"http://aspectwerkz.codehaus.org/dtd/aspectwerkz2.dtd">
To not use this DTD, start your definition file with
<?xml version="1.0"?>
Change from 1.0 to 2.0
most of the changes implies non backward compatibility
aspect/introduce element only allows to introduce interface. Use new <mixin element.
introduce@deploymentModel is removed
new system/mixin top level element
deployment-scope
advisable
******************************************************************************************************************************-->
<!--*****************************************************************************************************************************
aspectwerkz
*********************************************************************************************************************************
Aspectwerkz defines the root element
One system defines a namespace. Usually systems are used with several aop.xml file to isolate system
following classloader hierarchies.
******************************************************************************************************************************-->
<!ELEMENT aspectwerkz (system*)>
<!--*****************************************************************************************************************************
system
*********************************************************************************************************************************
System allows isolation of AspectWerkz model
For now we only support a single system.
A system contains use-aspect syntax OR *-def *-ref syntax. The DTD cannot represent that.
Attributes:
id: unique name of the AspectWerkz system (replaces <aspectwerkz id=..> in 0.8.1 DTD)
base-package: provides the global package prefix
Notes:
Due to package nesting, we cannot define an order here
******************************************************************************************************************************-->
<!ELEMENT system (
(exclude | include | prepare | pointcut | deployment-scope | advisable)*,
(aspect | mixin | package)*
)>
<!ATTLIST system
id CDATA #REQUIRED
base-package CDATA #IMPLIED
>
<!--*****************************************************************************************************************************
exclude
*********************************************************************************************************************************
Declares one global package prefix. Any class in the scope cannot have aspects on it within this system
Attributes:
package: package prefix
Note: package does not support pattern, excepted ending ".*" like "foo.*", which is equivalent to "foo."
During weaving, a class is considered to be in the transformation scope with a classFQN.startsWith( ) test.
Note: if a class is both in exclude and include, it is considered excluded.
******************************************************************************************************************************-->
<!ELEMENT exclude EMPTY>
<!ATTLIST exclude
package CDATA #REQUIRED
>
<!--*****************************************************************************************************************************
include
*********************************************************************************************************************************
Declares one global package prefix. Any class not in the scope cannot have aspects on it within this system
Attributes:
package: package prefix
Note: package does not support pattern, excepted ending ".*" like "foo.*", which is equivalent to "foo."
During weaving, a class is considered to be in the transformation scope with a classFQN.startsWith( ) test.
******************************************************************************************************************************-->
<!ELEMENT include EMPTY>
<!ATTLIST include
package CDATA #REQUIRED
>
<!--*****************************************************************************************************************************
prepare
*********************************************************************************************************************************
deprecated
******************************************************************************************************************************-->
<!ELEMENT prepare EMPTY>
<!ATTLIST prepare
package CDATA #REQUIRED
>
<!--*****************************************************************************************************************************
package
*********************************************************************************************************************************
Declares a package prefix which applies to all FQN (aspect) and patterns (pointcuts, class)
contained in the package element.
TODO REMOVE or IMPL - Package elements can be nested to ease writing of package and subpackage
TODO REMOVE or IMPL - (package@name "bar" nested in package@name "foo" means "foo.bar").
Attributes:
name: package prefix
Notes:
Does not support pattern, excepted ending ".*" like "foo.*" which is equivalent to "foo." and to "foo"
******************************************************************************************************************************-->
<!ELEMENT package (
(aspect | mixin)*
)>
<!ATTLIST package
name CDATA #REQUIRED
>
<!--*****************************************************************************************************************************
aspect
*********************************************************************************************************************************
Declares aspect.
Attributes:
name: unique name of the aspect in the system. Defaults to FQN (see class)
class: FQN of Aspect (can have metdata or not)
container: FQN of AspectContainer, defaults to regular org.codehaus.aspectwerkz.aspect.DefaultAspectContainerStrategy
deployment-model: aspect life cycle, defaults to perJVM
valid values are: perJVM | perClass | perInstance | perthis(...pc expr) | pertarget(...pc expr)
******************************************************************************************************************************-->
<!ELEMENT aspect (
param*,
(pointcut | deployment-scope | advice | introduce)*
)>
<!ATTLIST aspect
name CDATA #IMPLIED
class CDATA #REQUIRED
container CDATA #IMPLIED
deployment-model CDATA #IMPLIED
>
<!--*****************************************************************************************************************************
param
*********************************************************************************************************************************
Declares param for a aspect element
******************************************************************************************************************************-->
<!ELEMENT param EMPTY>
<!ATTLIST param
name CDATA #REQUIRED
value CDATA #REQUIRED
>
<!--*****************************************************************************************************************************
pointcut
*********************************************************************************************************************************
Attributes:
name
expression (required unless element data is used)
Element data: equivalent to expression, but ignored if pointcut@expression is already specified
******************************************************************************************************************************-->
<!ELEMENT pointcut (#PCDATA)>
<!ATTLIST pointcut
name CDATA #REQUIRED
expression CDATA #IMPLIED
>
<!--*****************************************************************************************************************************
deployment-scope
*********************************************************************************************************************************
Attributes:
name
expression (required unless element data is used)
Element data: equivalent to expression, but ignored if deployment-scope@expression is already specified
******************************************************************************************************************************-->
<!ELEMENT deployment-scope (#PCDATA)>
<!ATTLIST deployment-scope
name CDATA #REQUIRED
expression CDATA #IMPLIED
>
<!--*****************************************************************************************************************************
advisable
*********************************************************************************************************************************
Attributes:
pointcut-type: the pointcut type: call, get, set, execution, handler, all, or any piped list like get|set
expression: the pointcut expression, can be within/withincode/or more fine grained, as long as consistent with pointcut-type
******************************************************************************************************************************-->
<!ELEMENT advisable EMPTY>
<!ATTLIST advisable
pointcut-type CDATA #REQUIRED
expression CDATA #REQUIRED
>
<!--*****************************************************************************************************************************
advice
*********************************************************************************************************************************
Allow for advice declaration for use in several aspects
Use nested bind-to element for multiple binding
Attributes:
name: method name implementing the advice
type: advice type (around | before | after | after finally | after returning | after throwing), "after xxx(TYPE varName)"
bind-to: pointcut expression
******************************************************************************************************************************-->
<!ELEMENT advice (
bind-to*
)>
<!ATTLIST advice
name CDATA #REQUIRED
type CDATA #REQUIRED
bind-to CDATA #IMPLIED
attribute CDATA #IMPLIED
>
<!--*****************************************************************************************************************************
bind-to
*********************************************************************************************************************************
Allow to bind an advice several times instead of using an OR expression
******************************************************************************************************************************-->
<!ELEMENT bind-to EMPTY>
<!ATTLIST bind-to
pointcut CDATA #REQUIRED
>
<!--*****************************************************************************************************************************
introduce
*********************************************************************************************************************************
Attributes:
class: interface introduction only
bind-to: pointcut expression (only within/hasField/hasMethod makes sense)
******************************************************************************************************************************-->
<!ELEMENT introduce (
bind-to*
)>
<!ATTLIST introduce
class CDATA #REQUIRED
bind-to CDATA #IMPLIED
>
<!--*****************************************************************************************************************************
mixin
*********************************************************************************************************************************
Declares mixin.
Attributes:
class: FQN of mixin
bind-to: pointcut where to bind the mixin, optional (can be specified in annotations)
deployment-model: mixin life cycle, defaults to perInstance
transient: true | false, defaults to false, to control mixin persistence alongside target instances
factory: FQN of MixinFactory, implements org.codehaus.aspectwerkz.aspect.MixinFactory, defaults to regular DefaultMixinFactory
TODO nested pc + package support ??
Elements:
param: name value pairs
******************************************************************************************************************************-->
<!ELEMENT mixin (param*)>
<!ATTLIST mixin
class CDATA #REQUIRED
bind-to CDATA #IMPLIED
deployment-model (perClass | perInstance | perJVM) #IMPLIED
transient (false | true) #IMPLIED
factory CDATA #IMPLIED
>

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
lib/commons-pool-1.5.1.jar Executable file

Binary file not shown.

Binary file not shown.

Binary file not shown.

118
pom.xml
View file

@ -160,13 +160,6 @@
<build>
<sourceDirectory>src/main/scala</sourceDirectory>
<testSourceDirectory>src/test/scala</testSourceDirectory>
<extensions>
<extension>
<groupId>org.apache.maven.wagon</groupId>
<artifactId>wagon-webdav</artifactId>
<version>1.0-beta-2</version>
</extension>
</extensions>
<plugins>
<plugin>
<groupId>org.mortbay.jetty</groupId>
@ -176,40 +169,6 @@
<scanIntervalSeconds>5</scanIntervalSeconds>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<configuration>
<tasks>
<!-- To start an akka server, use: mvn antrun:run -->
<!--echo message="akka kernel starting..."/>
<property name="compile_classpath" refid="maven.compile.classpath"/>
<property name="runtime_classpath" refid="maven.runtime.classpath"/>
<property name="test_classpath" refid="maven.test.classpath"/>
<java classname="com.scalablesolutions.akka.kernel.Kernel" fork="true">
<classpath>
<pathelement path="${runtime_classpath}"/>
<pathelement path="${compile_classpath}"/>
</classpath>
<jvmarg value="-server"/>
<jvmarg value="-Xms256M"/>
<jvmarg value="-Xmx256M"/>
<jvmarg value="-verbosegc"/>
<for profiling/performance...
<jvmarg value="-javaagent:tmp/shiftone-jrat.jar"/>
<jvmarg value="-agentlib:hprof=heap=sites"/>
<jvmarg value="-agentlib:hprof=cpu=samples"/>
<jvmarg value="-agentlib:hprof=cpu=times,thread=y"/>
<arg value="com.scalablesolutions.akka.kernel.Kernel"/>
<arg value="."/>
</java>
<echo message="akka server is shutdown"/-->
</tasks>
</configuration>
</plugin>
<!-- Helps with importing Maven projects into Eclipse correctly -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
@ -272,35 +231,40 @@
</archive>
</configuration>
</plugin>
<!--plugin>
<groupId>org.dstovall</groupId>
<artifactId>onejar-maven-plugin</artifactId>
<version>1.4.1</version>
<executions>
<execution>
<configuration>
<mainClass>se.scalablesolutions.akka.kernel.Kernel</mainClass>
<onejarVersion>0.96</onejarVersion>
<attachToBuild>true</attachToBuild>
<classifier>onejar</classifier>
</configuration>
<goals>
<goal>one-jar</goal>
</goals>
</execution>
</executions>
</plugin-->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<configuration>
<tasks>
<!-- To start an akka server, use: mvn antrun:run -->
<!--echo message="akka kernel starting..."/>
<property name="compile_classpath" refid="maven.compile.classpath"/>
<property name="runtime_classpath" refid="maven.runtime.classpath"/>
<property name="test_classpath" refid="maven.test.classpath"/>
<java classname="com.scalablesolutions.akka.kernel.Kernel" fork="true">
<classpath>
<pathelement path="${runtime_classpath}"/>
<pathelement path="${compile_classpath}"/>
</classpath>
<jvmarg value="-server"/>
<jvmarg value="-Xms256M"/>
<jvmarg value="-Xmx256M"/>
<jvmarg value="-verbosegc"/>
<for profiling/performance...
<jvmarg value="-javaagent:tmp/shiftone-jrat.jar"/>
<jvmarg value="-agentlib:hprof=heap=sites"/>
<jvmarg value="-agentlib:hprof=cpu=samples"/>
<jvmarg value="-agentlib:hprof=cpu=times,thread=y"/>
<arg value="com.scalablesolutions.akka.kernel.Kernel"/>
<arg value="."/>
</java>
<echo message="akka server is shutdown"/-->
</tasks>
</configuration>
</plugin>
</plugins>
<resources>
<resource>
<filtering>false</filtering>
<directory>config</directory>
<includes>
<include>akka.conf</include>
<include>akka-reference.conf</include>
</includes>
</resource>
</resources>
<pluginManagement>
<plugins>
<plugin>
@ -336,6 +300,20 @@
</build>
<reporting>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>taglist-maven-plugin</artifactId>
<version>2.3</version>
<configuration>
<tags>
<tag>FIXME</tag>
<tag>TODO</tag>
<tag>XXX</tag>
<tag>@todo</tag>
<tag>@deprecated</tag>
</tags>
</configuration>
</plugin>
<plugin>
<artifactId>maven-project-info-reports-plugin</artifactId>
</plugin>
@ -366,7 +344,7 @@
</reportSet>
</reportSets>
<configuration>
<xmlPath>${basedir}/src/changes/changes.xml</xmlPath>
<xmlPath>${basedir}/changes.xml</xmlPath>
</configuration>
</plugin>
<plugin>