merged with upstream

This commit is contained in:
Jonas Boner 2009-08-14 08:28:27 +02:00
commit ce02e9f8be
108 changed files with 4137 additions and 1061 deletions

232
akka.ipr
View file

@ -559,6 +559,7 @@
<module fileurl="file://$PROJECT_DIR$/fun-test-java/akka-fun-test-java.iml" filepath="$PROJECT_DIR$/fun-test-java/akka-fun-test-java.iml" />
<module fileurl="file://$PROJECT_DIR$/kernel/akka-kernel.iml" filepath="$PROJECT_DIR$/kernel/akka-kernel.iml" />
<module fileurl="file://$PROJECT_DIR$/samples-java/akka-samples-java.iml" filepath="$PROJECT_DIR$/samples-java/akka-samples-java.iml" />
<module fileurl="file://$PROJECT_DIR$/samples-lift/akka-samples-lift.iml" filepath="$PROJECT_DIR$/samples-lift/akka-samples-lift.iml" />
<module fileurl="file://$PROJECT_DIR$/samples-scala/akka-samples-scala.iml" filepath="$PROJECT_DIR$/samples-scala/akka-samples-scala.iml" />
<module fileurl="file://$PROJECT_DIR$/util-java/akka-util-java.iml" filepath="$PROJECT_DIR$/util-java/akka-util-java.iml" />
</modules>
@ -916,17 +917,6 @@
<root url="jar://$MAVEN_REPOSITORY$/com/sun/xml/bind/jaxb-impl/2.1.10/jaxb-impl-2.1.10-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: org.jboss.netty:netty:3.1.0.CR1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/jboss/netty/netty/3.1.0.CR1/netty-3.1.0.CR1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/jboss/netty/netty/3.1.0.CR1/netty-3.1.0.CR1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/jboss/netty/netty/3.1.0.CR1/netty-3.1.0.CR1-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: com.sun.grizzly:grizzly-servlet-webserver:1.9.9">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/sun/grizzly/grizzly-servlet-webserver/1.9.9/grizzly-servlet-webserver-1.9.9.jar!/" />
@ -1315,28 +1305,6 @@
<root url="jar://$MAVEN_REPOSITORY$/org/apache/camel/camel-core/2.0-SNAPSHOT/camel-core-2.0-SNAPSHOT-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: commons-logging:commons-logging-api:1.1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/commons-logging/commons-logging-api/1.1/commons-logging-api-1.1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/commons-logging/commons-logging-api/1.1/commons-logging-api-1.1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/commons-logging/commons-logging-api/1.1/commons-logging-api-1.1-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: com.sun.xml.bind:jaxb-impl:2.1.6">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/sun/xml/bind/jaxb-impl/2.1.6/jaxb-impl-2.1.6.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/sun/xml/bind/jaxb-impl/2.1.6/jaxb-impl-2.1.6-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/sun/xml/bind/jaxb-impl/2.1.6/jaxb-impl-2.1.6-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: org.codehaus.:jackson:3.1.0">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/codehaus/jackson/3.1.0/jackson-3.1.0.jar!/" />
@ -1654,6 +1622,204 @@
<root url="jar://$MAVEN_REPOSITORY$/org/atmosphere/atmosphere-compat/0.3/atmosphere-compat-0.3-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: se.foldleft:cassidy:0.1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/se/foldleft/cassidy/0.1/cassidy-0.1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/se/foldleft/cassidy/0.1/cassidy-0.1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/se/foldleft/cassidy/0.1/cassidy-0.1-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: com.sun.xml.bind:jaxb-impl:2.1.12">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/sun/xml/bind/jaxb-impl/2.1.12/jaxb-impl-2.1.12.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/sun/xml/bind/jaxb-impl/2.1.12/jaxb-impl-2.1.12-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/sun/xml/bind/jaxb-impl/2.1.12/jaxb-impl-2.1.12-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: org.scala-lang:scala-compiler:2.7.5">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/scala-lang/scala-compiler/2.7.5/scala-compiler-2.7.5.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/scala-lang/scala-compiler/2.7.5/scala-compiler-2.7.5-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/scala-lang/scala-compiler/2.7.5/scala-compiler-2.7.5-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: net.liftweb:lift-util:1.1-SNAPSHOT">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/net/liftweb/lift-util/1.1-SNAPSHOT/lift-util-1.1-SNAPSHOT.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/net/liftweb/lift-util/1.1-SNAPSHOT/lift-util-1.1-SNAPSHOT-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/net/liftweb/lift-util/1.1-SNAPSHOT/lift-util-1.1-SNAPSHOT-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: javax.mail:mail:1.4">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/javax/mail/mail/1.4/mail-1.4.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/javax/mail/mail/1.4/mail-1.4-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/javax/mail/mail/1.4/mail-1.4-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: commons-httpclient:commons-httpclient:3.1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/commons-httpclient/commons-httpclient/3.1/commons-httpclient-3.1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/commons-httpclient/commons-httpclient/3.1/commons-httpclient-3.1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/commons-httpclient/commons-httpclient/3.1/commons-httpclient-3.1-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: commons-codec:commons-codec:1.3">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/commons-codec/commons-codec/1.3/commons-codec-1.3.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/commons-codec/commons-codec/1.3/commons-codec-1.3-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/commons-codec/commons-codec/1.3/commons-codec-1.3-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: commons-fileupload:commons-fileupload:1.2.1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/commons-fileupload/commons-fileupload/1.2.1/commons-fileupload-1.2.1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/commons-fileupload/commons-fileupload/1.2.1/commons-fileupload-1.2.1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/commons-fileupload/commons-fileupload/1.2.1/commons-fileupload-1.2.1-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: net.liftweb:lift-webkit:1.1-SNAPSHOT">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/net/liftweb/lift-webkit/1.1-SNAPSHOT/lift-webkit-1.1-SNAPSHOT.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/net/liftweb/lift-webkit/1.1-SNAPSHOT/lift-webkit-1.1-SNAPSHOT-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/net/liftweb/lift-webkit/1.1-SNAPSHOT/lift-webkit-1.1-SNAPSHOT-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: net.liftweb:lift-actor:1.1-SNAPSHOT">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/net/liftweb/lift-actor/1.1-SNAPSHOT/lift-actor-1.1-SNAPSHOT.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/net/liftweb/lift-actor/1.1-SNAPSHOT/lift-actor-1.1-SNAPSHOT-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/net/liftweb/lift-actor/1.1-SNAPSHOT/lift-actor-1.1-SNAPSHOT-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: org.mortbay.jetty:jetty:7.0.0.pre5">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/jetty/7.0.0.pre5/jetty-7.0.0.pre5.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/jetty/7.0.0.pre5/jetty-7.0.0.pre5-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/jetty/7.0.0.pre5/jetty-7.0.0.pre5-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: org.mortbay.jetty:servlet-api:3.0.pre4">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/servlet-api/3.0.pre4/servlet-api-3.0.pre4.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/servlet-api/3.0.pre4/servlet-api-3.0.pre4-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/servlet-api/3.0.pre4/servlet-api-3.0.pre4-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: org.mortbay.jetty:jetty-util:7.0.0.pre5">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/jetty-util/7.0.0.pre5/jetty-util-7.0.0.pre5.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/jetty-util/7.0.0.pre5/jetty-util-7.0.0.pre5-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/jetty-util/7.0.0.pre5/jetty-util-7.0.0.pre5-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: commons-pool:commons-pool:1.5.1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/commons-pool/commons-pool/1.5.1/commons-pool-1.5.1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/commons-pool/commons-pool/1.5.1/commons-pool-1.5.1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/commons-pool/commons-pool/1.5.1/commons-pool-1.5.1-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: commons-logging:commons-logging-api:1.1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/commons-logging/commons-logging-api/1.1/commons-logging-api-1.1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/commons-logging/commons-logging-api/1.1/commons-logging-api-1.1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/commons-logging/commons-logging-api/1.1/commons-logging-api-1.1-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: com.sun.xml.bind:jaxb-impl:2.1.6">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/com/sun/xml/bind/jaxb-impl/2.1.6/jaxb-impl-2.1.6.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/com/sun/xml/bind/jaxb-impl/2.1.6/jaxb-impl-2.1.6-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/com/sun/xml/bind/jaxb-impl/2.1.6/jaxb-impl-2.1.6-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: org.scala-tools:javautils:2.7.4-0.1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/scala-tools/javautils/2.7.4-0.1/javautils-2.7.4-0.1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/scala-tools/javautils/2.7.4-0.1/javautils-2.7.4-0.1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/scala-tools/javautils/2.7.4-0.1/javautils-2.7.4-0.1-sources.jar!/" />
</SOURCES>
</library>
<library name="Maven: org.jboss.netty:netty:3.1.0.GA">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/jboss/netty/netty/3.1.0.GA/netty-3.1.0.GA.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/jboss/netty/netty/3.1.0.GA/netty-3.1.0.GA-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/jboss/netty/netty/3.1.0.GA/netty-3.1.0.GA-sources.jar!/" />
</SOURCES>
</library>
</component>
<UsedPathMacros>
<macro name="MAVEN_REPOSITORY" description="Maven Local Repostiry" />

590
akka.iws
View file

@ -5,17 +5,7 @@
<commander_bookmark type="package" url="file://$PROJECT_DIR$/kernel/src/main/scala/actor" />
</component>
<component name="ChangeListManager">
<list default="true" id="212ccd86-01aa-4780-a2f0-0d130be5abd2" name="Test" comment="Test">
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/kernel/akka-kernel.iml" afterPath="$PROJECT_DIR$/kernel/akka-kernel.iml" />
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/fun-test-java/akka-fun-test-java.iml" afterPath="$PROJECT_DIR$/fun-test-java/akka-fun-test-java.iml" />
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/kernel/src/main/scala/Kernel.scala" afterPath="$PROJECT_DIR$/kernel/src/main/scala/Kernel.scala" />
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/kernel/src/main/scala/jersey/AkkaServlet.scala" afterPath="$PROJECT_DIR$/kernel/src/main/scala/jersey/AkkaServlet.scala" />
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/samples-scala/akka-samples-scala.iml" afterPath="$PROJECT_DIR$/samples-scala/akka-samples-scala.iml" />
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/samples-java/akka-samples-java.iml" afterPath="$PROJECT_DIR$/samples-java/akka-samples-java.iml" />
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/akka.ipr" afterPath="$PROJECT_DIR$/akka.ipr" />
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/samples-scala/src/main/scala/SimpleService.scala" afterPath="$PROJECT_DIR$/samples-scala/src/main/scala/SimpleService.scala" />
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/akka.iws" afterPath="$PROJECT_DIR$/akka.iws" />
</list>
<list default="true" id="212ccd86-01aa-4780-a2f0-0d130be5abd2" name="Test" comment="Test" />
<list readonly="true" id="6e842704-fac6-40e9-8a67-d02385f87db9" name="Default" comment="&#10;# Brief commit desciption here&#10;&#10;# Full commit description here (comment lines starting with '#' will not be included)&#10;&#10;" />
<ignored path=".idea/workspace.xml" />
<ignored path="akka.iws" />
@ -74,6 +64,19 @@
<option name="CONDITION" value="" />
<option name="LOG_MESSAGE" value="" />
</breakpoint>
<breakpoint url="file://$PROJECT_DIR$/kernel/src/main/scala/Kernel.scala" line="126" class="Class at Kernel.scala:126" package="">
<option name="ENABLED" value="true" />
<option name="LOG_ENABLED" value="false" />
<option name="LOG_EXPRESSION_ENABLED" value="false" />
<option name="SUSPEND_POLICY" value="SuspendAll" />
<option name="COUNT_FILTER_ENABLED" value="false" />
<option name="COUNT_FILTER" value="0" />
<option name="CONDITION_ENABLED" value="false" />
<option name="CLASS_FILTERS_ENABLED" value="false" />
<option name="INSTANCE_FILTERS_ENABLED" value="false" />
<option name="CONDITION" value="" />
<option name="LOG_MESSAGE" value="" />
</breakpoint>
</line_breakpoints>
<breakpoint_any>
<breakpoint>
@ -137,82 +140,91 @@
<component name="FileColors" enabled="false" enabledForTabs="false" />
<component name="FileEditorManager">
<leaf>
<file leaf-file-name="MessageDispatcherBase.scala" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/reactor/MessageDispatcherBase.scala">
<file leaf-file-name="CassandraStorage.scala" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/state/CassandraStorage.scala">
<provider selected="true" editor-type-id="text-editor">
<state line="13" column="0" selection-start="398" selection-end="398" vertical-scroll-proportion="0.0">
<state line="22" column="0" selection-start="0" selection-end="470" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
</file>
<file leaf-file-name="Actor.scala" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/actor/Actor.scala">
<file leaf-file-name="CassandraSession.scala" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/state/CassandraSession.scala">
<provider selected="true" editor-type-id="text-editor">
<state line="33" column="131" selection-start="1265" selection-end="1265" vertical-scroll-proportion="0.0">
<state line="22" column="6" selection-start="476" selection-end="476" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
</file>
<file leaf-file-name="AkkaServlet.scala" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/jersey/AkkaServlet.scala">
<file leaf-file-name="SimpleService.java" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/samples-java/src/main/java/sample/java/SimpleService.java">
<provider selected="true" editor-type-id="text-editor">
<state line="60" column="80" selection-start="2412" selection-end="2412" vertical-scroll-proportion="0.0">
<state line="31" column="33" selection-start="971" selection-end="971" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
</file>
<file leaf-file-name="SimpleService.scala" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/samples-scala/src/main/scala/SimpleService.scala">
<file leaf-file-name="PersistentStateful.java" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStateful.java">
<provider selected="true" editor-type-id="text-editor">
<state line="15" column="40" selection-start="609" selection-end="609" vertical-scroll-proportion="0.0">
<state line="19" column="42" selection-start="764" selection-end="764" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
</file>
<file leaf-file-name="Kernel.scala" pinned="false" current="true" current-in-tab="true">
<file leaf-file-name="PersistentActorSpec.scala" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/kernel/src/test/scala/PersistentActorSpec.scala">
<provider selected="true" editor-type-id="text-editor">
<state line="101" column="6" selection-start="3487" selection-end="3487" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
</file>
<file leaf-file-name="Kernel.scala" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/Kernel.scala">
<provider selected="true" editor-type-id="text-editor">
<state line="20" column="24" selection-start="528" selection-end="528" vertical-scroll-proportion="0.39889196">
<state line="14" column="77" selection-start="408" selection-end="408" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
</file>
<file leaf-file-name="EventBasedThreadPoolDispatcherTest.scala" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/kernel/src/test/scala/EventBasedThreadPoolDispatcherTest.scala">
<file leaf-file-name="akka-reference.conf" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/config/akka-reference.conf">
<provider selected="true" editor-type-id="text-editor">
<state line="2" column="189" selection-start="113" selection-end="113" vertical-scroll-proportion="0.0">
<state line="57" column="34" selection-start="2152" selection-end="2152" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
</file>
<file leaf-file-name="Future.scala" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/reactor/Future.scala">
<file leaf-file-name="pom.xml" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/pom.xml">
<provider selected="true" editor-type-id="text-editor">
<state line="14" column="13" selection-start="411" selection-end="411" vertical-scroll-proportion="0.0">
<state line="55" column="20" selection-start="1626" selection-end="1626" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
</file>
<file leaf-file-name="Reactor.scala" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/reactor/Reactor.scala">
<file leaf-file-name="State.scala" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/state/State.scala">
<provider selected="true" editor-type-id="text-editor">
<state line="43" column="0" selection-start="1056" selection-end="1056" vertical-scroll-proportion="0.0">
<state line="39" column="6" selection-start="1032" selection-end="1032" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
</file>
<file leaf-file-name="ActiveObjectGuiceConfiguratorTest.java" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/fun-test-java/src/test/java/se/scalablesolutions/akka/api/ActiveObjectGuiceConfiguratorTest.java">
<file leaf-file-name="pom.xml" pinned="false" current="true" current-in-tab="true">
<entry file="file://$PROJECT_DIR$/fun-test-java/pom.xml">
<provider selected="true" editor-type-id="text-editor">
<state line="23" column="77" selection-start="822" selection-end="822" vertical-scroll-proportion="0.0">
<state line="87" column="0" selection-start="2599" selection-end="2599" vertical-scroll-proportion="0.6960651">
<folding />
</state>
</provider>
@ -231,22 +243,22 @@
<component name="IdeDocumentHistory">
<option name="changedFiles">
<list>
<option value="$PROJECT_DIR$/kernel/src/main/scala/serialization/Serializer.scala" />
<option value="$PROJECT_DIR$/kernel/src/main/scala/nio/RemoteProtocolBuilder.scala" />
<option value="$PROJECT_DIR$/fun-test-java/src/test/java/se/scalablesolutions/akka/api/Foo.java" />
<option value="$PROJECT_DIR$/kernel/src/main/scala/serialization/Serializable.scala" />
<option value="$PROJECT_DIR$/kernel/src/main/scala/state/CassandraStorage.scala" />
<option value="$PROJECT_DIR$/kernel/src/main/scala/reactor/MessageDispatcherBase.scala" />
<option value="$PROJECT_DIR$/kernel/src/main/scala/actor/Actor.scala" />
<option value="$PROJECT_DIR$/kernel/src/test/scala/EventBasedThreadPoolDispatcherTest.scala" />
<option value="$PROJECT_DIR$/fun-test-java/src/test/java/se/scalablesolutions/akka/api/ActiveObjectGuiceConfiguratorTest.java" />
<option value="$PROJECT_DIR$/kernel/src/main/scala/reactor/Dispatchers.scala" />
<option value="$PROJECT_DIR$/kernel/src/main/scala/reactor/Reactor.scala" />
<option value="$PROJECT_DIR$/kernel/src/main/scala/reactor/EventBasedThreadPoolDispatcher.scala" />
<option value="$PROJECT_DIR$/samples-java/src/main/java/sample/java/SimpleService.java" />
<option value="$PROJECT_DIR$/kernel/src/main/scala/jersey/AkkaServlet.scala" />
<option value="$PROJECT_DIR$/samples-scala/src/main/scala/SimpleService.scala" />
<option value="$PROJECT_DIR$/kernel/src/main/scala/Boot.scala" />
<option value="$PROJECT_DIR$/kernel/src/main/scala/actor/ActiveObject.scala" />
<option value="$PROJECT_DIR$/kernel/src/main/scala/serialization/Serializer.scala" />
<option value="$PROJECT_DIR$/kernel/src/main/scala/util/Helpers.scala" />
<option value="$PROJECT_DIR$/kernel/src/main/scala/state/State.scala" />
<option value="$PROJECT_DIR$/kernel/pom.xml" />
<option value="$PROJECT_DIR$/pom.xml" />
<option value="$PROJECT_DIR$/kernel/src/main/scala/state/Pool.scala" />
<option value="$PROJECT_DIR$/kernel/src/main/scala/Kernel.scala" />
<option value="$PROJECT_DIR$/config/akka-reference.conf" />
<option value="$PROJECT_DIR$/kernel/src/main/scala/state/CassandraStorage.scala" />
<option value="$PROJECT_DIR$/fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStateful.java" />
<option value="$PROJECT_DIR$/kernel/src/main/scala/state/CassandraSession.scala" />
<option value="$PROJECT_DIR$/samples-java/src/main/java/sample/java/SimpleService.java" />
<option value="$PROJECT_DIR$/fun-test-java/pom.xml" />
</list>
</option>
</component>
@ -284,7 +296,7 @@
</expanded-state>
<selected-state>
<State>
<id>Inspections</id>
<id>AOP</id>
</State>
</selected-state>
</profile-state>
@ -362,6 +374,12 @@
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
</PATH>
<PATH>
<PATH_ELEMENT>
<option name="myItemId" value="akka" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewProjectNode" />
</PATH_ELEMENT>
</PATH>
<PATH>
<PATH_ELEMENT>
<option name="myItemId" value="akka" />
@ -464,6 +482,128 @@
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.LibraryGroupNode" />
</PATH_ELEMENT>
</PATH>
<PATH>
<PATH_ELEMENT>
<option name="myItemId" value="akka" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewProjectNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="akka-kernel" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewModuleNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="Libraries" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.LibraryGroupNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="Maven: org.apache.cassandra:cassandra:0.4.0-dev" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.NamedLibraryElementNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="cassandra-0.4.0-dev.jar" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
</PATH>
<PATH>
<PATH_ELEMENT>
<option name="myItemId" value="akka" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewProjectNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="akka-kernel" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewModuleNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="Libraries" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.LibraryGroupNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="Maven: org.apache.cassandra:cassandra:0.4.0-dev" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.NamedLibraryElementNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="cassandra-0.4.0-dev.jar" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="service" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
</PATH>
<PATH>
<PATH_ELEMENT>
<option name="myItemId" value="akka" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewProjectNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="akka-kernel" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewModuleNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="Libraries" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.LibraryGroupNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="Maven: org.apache.cassandra:cassandra:0.4.0-dev" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.NamedLibraryElementNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="cassandra-0.4.0-dev.jar" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="service" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="Cassandra" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ClassTreeNode" />
</PATH_ELEMENT>
</PATH>
<PATH>
<PATH_ELEMENT>
<option name="myItemId" value="akka" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewProjectNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="akka-kernel" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewModuleNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="Libraries" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.LibraryGroupNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="Maven: commons-pool:commons-pool:1.5.1" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.NamedLibraryElementNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="commons-pool-1.5.1.jar" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
</PATH>
<PATH>
<PATH_ELEMENT>
<option name="myItemId" value="akka" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewProjectNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="akka-kernel" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewModuleNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="Libraries" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.LibraryGroupNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="Maven: com.facebook:thrift:1.0" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.NamedLibraryElementNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="thrift-1.0.jar" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
</PATH>
<PATH>
<PATH_ELEMENT>
<option name="myItemId" value="akka" />
@ -495,6 +635,10 @@
<option name="myItemId" value="src" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="test" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
</PATH>
<PATH>
<PATH_ELEMENT>
@ -548,7 +692,7 @@
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="jersey" />
<option name="myItemId" value="util" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
</PATH>
@ -578,7 +722,7 @@
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="actor" />
<option name="myItemId" value="state" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
</PATH>
@ -688,75 +832,7 @@
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="ProtobufSerializationTest" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ClassTreeNode" />
</PATH_ELEMENT>
</PATH>
<PATH>
<PATH_ELEMENT>
<option name="myItemId" value="akka" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewProjectNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="akka-fun-test-java" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewModuleNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="fun-test-java" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="src" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="test" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="java" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="api" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="InMemoryStateTest" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ClassTreeNode" />
</PATH_ELEMENT>
</PATH>
<PATH>
<PATH_ELEMENT>
<option name="myItemId" value="akka" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewProjectNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="akka-fun-test-java" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewModuleNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="fun-test-java" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="src" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="test" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="java" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="api" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="ActiveObjectGuiceConfiguratorTest" />
<option name="myItemId" value="PersistentStateful" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ClassTreeNode" />
</PATH_ELEMENT>
</PATH>
@ -769,6 +845,92 @@
<option name="myItemId" value="akka" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewModuleNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="Libraries" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.LibraryGroupNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="&lt; 1.6 &gt;" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.NamedLibraryElementNode" />
</PATH_ELEMENT>
</PATH>
<PATH>
<PATH_ELEMENT>
<option name="myItemId" value="akka" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewProjectNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="akka" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewModuleNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="Libraries" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.LibraryGroupNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="&lt; 1.6 &gt;" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.NamedLibraryElementNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="classes.jar" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
</PATH>
<PATH>
<PATH_ELEMENT>
<option name="myItemId" value="akka" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewProjectNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="akka" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewModuleNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="Libraries" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.LibraryGroupNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="&lt; 1.6 &gt;" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.NamedLibraryElementNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="classes.jar" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="lang" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
</PATH>
<PATH>
<PATH_ELEMENT>
<option name="myItemId" value="akka" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewProjectNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="akka" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewModuleNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="Libraries" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.LibraryGroupNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="&lt; 1.6 &gt;" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.NamedLibraryElementNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="classes.jar" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="lang" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="ClassLoader" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ClassTreeNode" />
</PATH_ELEMENT>
</PATH>
<PATH>
<PATH_ELEMENT>
@ -784,6 +946,24 @@
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
</PATH>
<PATH>
<PATH_ELEMENT>
<option name="myItemId" value="akka" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewProjectNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="akka" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewModuleNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="akka" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="config" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
</PATH>
</subPane>
</component>
<component name="ProjectReloadState">
@ -833,7 +1013,7 @@
<recent name="stm" />
</key>
</component>
<component name="RunManager" selected="JUnit.InMemoryStateTest">
<component name="RunManager" selected="JUnit.PersistentActorSpec">
<configuration default="false" name="InMemoryStateTest.testRefShouldRollbackStateForStatefulServerInCaseOfFailure" type="JUnit" factoryName="JUnit" temporary="true" enabled="false" merge="false" sample_coverage="true" runner="emma">
<pattern>
<option name="PATTERN" value="se.scalablesolutions.akka.api.*" />
@ -2002,16 +2182,17 @@
<frame x="4" y="22" width="1436" height="878" extended-state="6" />
<editor active="true" />
<layout>
<window_info id="Maven Projects" active="false" anchor="right" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.3294881" sideWeight="0.5212177" order="5" side_tool="false" />
<window_info id="Maven Projects" active="false" anchor="right" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.3294881" sideWeight="0.6619898" order="5" side_tool="false" />
<window_info id="Dependency Viewer" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.33" sideWeight="0.5" order="15" side_tool="false" />
<window_info id="Palette" active="false" anchor="right" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.33" sideWeight="0.5" order="6" side_tool="false" />
<window_info id="Ant Build" active="false" anchor="right" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.24906267" sideWeight="0.5212177" order="1" side_tool="false" />
<window_info id="Changes" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.32749078" sideWeight="0.5" order="9" side_tool="false" />
<window_info id="Run" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.601476" sideWeight="0.0" order="2" side_tool="false" x="4" y="22" width="1436" height="878" />
<window_info id="Run" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.28316328" sideWeight="0.0" order="2" side_tool="false" x="4" y="22" width="1436" height="878" />
<window_info id="TODO" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.32749078" sideWeight="0.5" order="6" side_tool="false" />
<window_info id="Project" active="false" anchor="left" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="true" weight="0.1744773" sideWeight="0.6721939" order="1" side_tool="false" />
<window_info id="Project" active="false" anchor="left" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="true" weight="0.1744773" sideWeight="0.6619898" order="1" side_tool="false" />
<window_info id="Debug" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.39668366" sideWeight="0.5" order="3" side_tool="false" />
<window_info id="Structure" active="false" anchor="left" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.16818425" sideWeight="0.24354243" order="0" side_tool="true" x="1609" y="144" width="252" height="862" />
<window_info id="Messages" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.32908162" sideWeight="0.0" order="14" side_tool="false" />
<window_info id="Version Control" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.3265306" sideWeight="0.5" order="13" side_tool="false" />
<window_info id="Web Preview" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.32822478" sideWeight="0.5" order="16" side_tool="false" />
<window_info id="Dataflow to this" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.33" sideWeight="0.5" order="17" side_tool="false" />
@ -2022,8 +2203,7 @@
<window_info id="Clojure REPL" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.32749078" sideWeight="0.5" order="8" side_tool="false" />
<window_info id="Data Sources" active="false" anchor="right" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.3294881" sideWeight="0.6875" order="3" side_tool="false" />
<window_info id="Web" active="false" anchor="left" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.26479077" sideWeight="0.24617347" order="2" side_tool="true" />
<window_info id="Find" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="true" weight="0.31887755" sideWeight="0.5" order="1" side_tool="false" />
<window_info id="Messages" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.3265306" sideWeight="0.0" order="14" side_tool="false" />
<window_info id="Find" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.31887755" sideWeight="0.5" order="1" side_tool="false" />
<window_info id="Inspection" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.4" sideWeight="0.5" order="5" side_tool="false" />
<window_info id="FindBugs-IDEA" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.32976654" sideWeight="0.5" order="12" side_tool="false" />
<window_info id="Cvs" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.25" sideWeight="0.5" order="4" side_tool="false" />
@ -2066,114 +2246,114 @@
<option name="FILTER_TARGETS" value="false" />
</component>
<component name="editorHistoryManager">
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/config/ActiveObjectGuiceConfigurator.scala">
<entry file="jar://$MAVEN_REPOSITORY$/com/facebook/thrift/1.0/thrift-1.0.jar!/org/apache/thrift/transport/TIOStreamTransport.class">
<provider selected="true" editor-type-id="text-editor">
<state line="100" column="53" selection-start="4167" selection-end="4167" vertical-scroll-proportion="0.0">
<state line="0" column="0" selection-start="0" selection-end="0" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/serialization/Serializer.scala">
<entry file="jar://$MAVEN_REPOSITORY$/com/facebook/thrift/1.0/thrift-1.0.jar!/org/apache/thrift/transport/TSocket.class">
<provider selected="true" editor-type-id="text-editor">
<state line="60" column="6" selection-start="1648" selection-end="1648" vertical-scroll-proportion="0.0">
<state line="5" column="64" selection-start="216" selection-end="216" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/reactor/EventBasedThreadPoolDispatcher.scala">
<entry file="jar://$MAVEN_REPOSITORY$/com/facebook/thrift/1.0/thrift-1.0.jar!/org/apache/thrift/protocol/TProtocol.class">
<provider selected="true" editor-type-id="text-editor">
<state line="59" column="25" selection-start="1920" selection-end="1920" vertical-scroll-proportion="0.0">
<state line="0" column="0" selection-start="0" selection-end="0" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/reactor/Dispatchers.scala">
<entry file="jar://$MAVEN_REPOSITORY$/com/facebook/thrift/1.0/thrift-1.0.jar!/org/apache/thrift/protocol/TBinaryProtocol.class">
<provider selected="true" editor-type-id="text-editor">
<state line="52" column="78" selection-start="1532" selection-end="1532" vertical-scroll-proportion="0.0">
<state line="5" column="60" selection-start="211" selection-end="211" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/state/CassandraStorage.scala">
<entry file="jar://$MAVEN_REPOSITORY$/org/apache/cassandra/cassandra/0.4.0-dev/cassandra-0.4.0-dev.jar!/org/apache/cassandra/service/Cassandra.class">
<provider selected="true" editor-type-id="text-editor">
<state line="182" column="195" selection-start="6148" selection-end="6148" vertical-scroll-proportion="0.0">
<state line="2686" column="57" selection-start="135796" selection-end="135796" vertical-scroll-proportion="0.4197031">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/state/State.scala">
<entry file="jar://$MAVEN_REPOSITORY$/net/lag/configgy/1.3/configgy-1.3.jar!/net/lag/configgy/ParseException.class">
<provider selected="true" editor-type-id="text-editor">
<state line="167" column="119" selection-start="5039" selection-end="5039" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/reactor/MessageDispatcherBase.scala">
<provider selected="true" editor-type-id="text-editor">
<state line="13" column="0" selection-start="398" selection-end="398" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/kernel/src/test/scala/EventBasedThreadPoolDispatcherTest.scala">
<provider selected="true" editor-type-id="text-editor">
<state line="2" column="189" selection-start="113" selection-end="113" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/reactor/Future.scala">
<provider selected="true" editor-type-id="text-editor">
<state line="14" column="13" selection-start="411" selection-end="411" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/reactor/Reactor.scala">
<provider selected="true" editor-type-id="text-editor">
<state line="43" column="0" selection-start="1056" selection-end="1056" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/fun-test-java/src/test/java/se/scalablesolutions/akka/api/ActiveObjectGuiceConfiguratorTest.java">
<provider selected="true" editor-type-id="text-editor">
<state line="23" column="77" selection-start="822" selection-end="822" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/samples-java/src/main/java/sample/java/SimpleService.java">
<provider selected="true" editor-type-id="text-editor">
<state line="4" column="0" selection-start="51" selection-end="51" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/actor/Actor.scala">
<provider selected="true" editor-type-id="text-editor">
<state line="33" column="131" selection-start="1265" selection-end="1265" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/jersey/AkkaServlet.scala">
<provider selected="true" editor-type-id="text-editor">
<state line="60" column="80" selection-start="2412" selection-end="2412" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/samples-scala/src/main/scala/SimpleService.scala">
<provider selected="true" editor-type-id="text-editor">
<state line="15" column="40" selection-start="609" selection-end="609" vertical-scroll-proportion="0.0">
<state line="0" column="0" selection-start="0" selection-end="0" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/Kernel.scala">
<provider selected="true" editor-type-id="text-editor">
<state line="20" column="24" selection-start="528" selection-end="528" vertical-scroll-proportion="0.39889196">
<state line="14" column="77" selection-start="408" selection-end="408" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/kernel/src/test/scala/PersistentActorSpec.scala">
<provider selected="true" editor-type-id="text-editor">
<state line="101" column="6" selection-start="3487" selection-end="3487" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/pom.xml">
<provider selected="true" editor-type-id="text-editor">
<state line="55" column="20" selection-start="1626" selection-end="1626" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/config/akka-reference.conf">
<provider selected="true" editor-type-id="text-editor">
<state line="57" column="34" selection-start="2152" selection-end="2152" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/state/CassandraStorage.scala">
<provider selected="true" editor-type-id="text-editor">
<state line="22" column="0" selection-start="0" selection-end="470" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStateful.java">
<provider selected="true" editor-type-id="text-editor">
<state line="19" column="42" selection-start="764" selection-end="764" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/state/CassandraSession.scala">
<provider selected="true" editor-type-id="text-editor">
<state line="22" column="6" selection-start="476" selection-end="476" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/samples-java/src/main/java/sample/java/SimpleService.java">
<provider selected="true" editor-type-id="text-editor">
<state line="31" column="33" selection-start="971" selection-end="971" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/state/State.scala">
<provider selected="true" editor-type-id="text-editor">
<state line="39" column="6" selection-start="1032" selection-end="1032" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/fun-test-java/pom.xml">
<provider selected="true" editor-type-id="text-editor">
<state line="87" column="0" selection-start="2599" selection-end="2599" vertical-scroll-proportion="0.6960651">
<folding />
</state>
</provider>

85
bin/start-akka-server.bat Executable file
View file

@ -0,0 +1,85 @@
@ECHO OFF
@REM IF "%1"=="" goto error
IF "%AKKA_HOME%"=="" goto error_no_akka_home
IF "%JAVA_COMMAND%"=="" set JAVA_COMMAND=%JAVA_HOME%\bin\java
IF "%JAVA_HOME%"=="" goto error_no_java_home
set VERSION=0.5
set LIB_DIR=%AKKA_HOME%\lib
set CLASSPATH=%AKKA_HOME%\config
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\akka-kernel-0.5.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\akka-util-java-0.5.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\aopalliance-1.0.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\asm-3.1.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\aspectwerkz-jdk5-2.1.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\aspectwerkz-nodeps-jdk5-2.1.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\atmosphere-core-0.3.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\atmosphere-portable-runtime-0.3.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\camel-core-2.0-SNAPSHOT.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\atmosphere-compat-0.3.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\cassandra-0.4.0-trunk.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\cglib-2.2.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\commons-cli-1.1.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\commons-io-1.3.2.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\commons-logging-1.0.4.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\commons-math-1.1.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\commons-pool-1.5.1.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\configgy-1.3.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\fscontext.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\google-collect-snapshot-20090211.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\grizzly-comet-webserver-1.8.6.3.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\guice-core-2.0-SNAPSHOT.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\jackson-core-asl-1.1.0.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\jackson-mapper-asl-1.1.0.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\jersey-client-1.1.1-ea.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\jersey-core-1.1.1-ea.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\jersey-json-1.1.1-ea.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\jersey-server-1.1.1-ea.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\jersey-scala-1.1.2-ea-SNAPSHOT.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\JSAP-2.1.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\jsr311-api-1.0.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\libfb303.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\libthrift.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\log4j-1.2.15.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\netty-3.1.0.GA.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\providerutil.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\protobuf-java-2.1.0.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\scala-library-2.7.5.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\scala-stats-1.0.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\servlet-api-2.5.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\slf4j-api-1.4.3.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\slf4j-log4j12-1.4.3.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\stringtemplate-3.0.jar
set CLASSPATH=%CLASSPATH%;%LIB_DIR%\zookeeper-3.1.0.jar
@REM Add for debugging; -Xdebug -Xrunjdwp;transport=dt_socket,server=y,suspend=y,address=5005 \
@REM To have Akka dump the generated classes, add the '-Daspectwerkz.transform.dump=*' option and it will dump classes to $BASE_DIR/_dump
set JVM_OPTS=-server -Xms128M -Xmx1G -XX:SurvivorRatio=8 -XX:TargetSurvivorRatio=90 -XX:+AggressiveOpts -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=1 -XX:+CMSParallelRemarkEnabled -XX:+HeapDumpOnOutOfMemoryError -Dcom.sun.management.jmxremote.port=8080 -Dcom.sun.management.jmxremote.ssl=false -Djava.naming.factory.initial=com.sun.jndi.fscontext.RefFSContextFactory -Dcom.sun.grizzly.cometSupport=true -Dcom.sun.management.jmxremote.authenticate=false
@ECHO ON
%JAVA_HOME%\bin\java %JVM_OPTS% -cp %CLASSPATH% se.scalablesolutions.akka.kernel.Kernel %1 %2 %3
@exit /B %ERRORLEVEL%
;error
IF EXIST "%AKKA_HOME%\bin\usage.txt" (
type %AKKA_HOME%\bin\usage.txt"
) ELSE (
echo AKKA_HOME does not point to the Akka directory
)
@goto error_exit
;error_no_java_home
@echo Please specify the JAVA_HOME environment variable.
@goto error_exit
;error_no_akka_home
@echo Please specify the AKKA_HOME environment variable.
@goto error_exit
;error_exit
@exit /B -1

View file

@ -3,33 +3,23 @@
VERSION=0.5
BASE_DIR=$(dirname $0)/..
echo 'Starting Akka Kernel from directory' $BASE_DIR
echo 'Resetting persistent storage in' $BASE_DIR/storage
rm -rf $BASE_DIR/storage
mkdir $BASE_DIR/storage
mkdir $BASE_DIR/storage/bootstrap
mkdir $BASE_DIR/storage/callouts
mkdir $BASE_DIR/storage/commitlog
mkdir $BASE_DIR/storage/data
mkdir $BASE_DIR/storage/system
LIB_DIR=$BASE_DIR/lib
CLASSPATH=$BASE_DIR/config
CLASSPATH=$CLASSPATH:$LIB_DIR/akka-kernel-0.5.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/akka-util-java-0.5.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/antlr-3.1.3.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/aopalliance-1.0.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/asm-3.1.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/aspectwerkz-jdk5-2.1.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/aspectwerkz-nodeps-jdk5-2.1.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/atmosphere-core-0.3.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/atmosphere-portable-runtime-0.3.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/camel-core-2.0-SNAPSHOT.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/atmosphere-compat-0.3.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/cassandra-0.4.0-dev.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/cassandra-0.4.0-trunk.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/cglib-2.2.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/commons-cli-1.1.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/commons-collections-3.2.1.jar
@ -38,31 +28,30 @@ CLASSPATH=$CLASSPATH:$LIB_DIR/commons-javaflow-1.0-SNAPSHOT.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/commons-lang-2.4.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/commons-logging-1.0.4.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/commons-math-1.1.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/commons-pool-1.5.1.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/configgy-1.3.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/fscontext.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/google-collect-snapshot-20090211.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/grizzly-comet-webserver-1.8.6.3.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/guice-core-2.0-SNAPSHOT.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/guice-jsr250-2.0-SNAPSHOT.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/high-scale-lib.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/jackson-core-asl-1.1.0.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/jackson-mapper-asl-1.1.0.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/javautils-2.7.4-0.1.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/jersey-client-1.1.1-ea.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/jersey-core-1.1.1-ea.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/jersey-json-1.1.1-ea.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/jersey-server-1.1.1-ea.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/jersey-scala-1.1.2-ea-SNAPSHOT.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/JSAP-2.1.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/jsr250-api-1.0.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/jsr311-api-1.0.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/libfb303.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/libthrift.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/log4j-1.2.15.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/lucene-core-2.2.0.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/netty-3.1.0.CR1.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/netty-3.1.0.GA.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/providerutil.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/protobuf-java-2.1.0.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/scala-library-2.7.5.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/scala-stats-1.0.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/servlet-api-2.5.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/slf4j-api-1.4.3.jar
CLASSPATH=$CLASSPATH:$LIB_DIR/slf4j-log4j12-1.4.3.jar
@ -74,7 +63,7 @@ CLASSPATH=$CLASSPATH:$LIB_DIR/zookeeper-3.1.0.jar
JVM_OPTS=" \
-server \
-Xms128M \
-Xmx2G \
-Xmx1G \
-XX:SurvivorRatio=8 \
-XX:TargetSurvivorRatio=90 \
-XX:+AggressiveOpts \
@ -89,6 +78,5 @@ JVM_OPTS=" \
-Dcom.sun.grizzly.cometSupport=true \
-Dcom.sun.management.jmxremote.authenticate=false"
#$JAVA_HOME/bin/java $JVM_OPTS -cp $CLASSPATH se.scalablesolutions.akka.Boot se.scalablesolutions.akka.kernel.Kernel ${1}
echo $JAVA_HOME/bin/java $JVM_OPTS -cp $CLASSPATH se.scalablesolutions.akka.kernel.Kernel ${1}
$JAVA_HOME/bin/java $JVM_OPTS -cp $CLASSPATH se.scalablesolutions.akka.kernel.Kernel ${1}

View file

@ -18,7 +18,7 @@ see http://maven.apache.org/plugins/maven-changes-plugin/usage.html for full gui
<body>
<release version="0.6" date="" description="">
<action dev="Debasish Ghosh" type="add">MongoDB as Akka storage backend </action>
<action dev="Tim Perrett" type="add">Support for using the Lift Web framework
<action dev="Tim Perrett" type="add">Support for using the Lift Web framework with Actors</action>
<action dev="Viktor Klang" type="add">Support for using Scala XML tags in RESTful Actors (scala-jersey)</action>
<action dev="Viktor Klang" type="add">Support for Comet Actors using Atmosphere</action>
<action dev="Jonas Bon&#233;r" type="add">Statistics recorder with JMX and REST APIs</action>

View file

@ -1,66 +1,68 @@
#################################
# Akka Actor Kernel Config File #
#################################
# This file has all the default settings, so all these could be remove with no visible effect.
# Modify as needed.
<log>
filename = "./logs/akka.log"
roll = "daily" # Options: never, hourly, daily, sunday/monday/...
level = "debug" # Options: fatal, critical, error, warning, info, debug, trace
console = on
# syslog_host = ""
# syslog_server_name = ""
</log>
<akka>
version = "v0.5"
boot = ["sample.java.Boot", "sample.scala.Boot"] # FQN to the class doing initial active object/actor
# supervisor bootstrap, should be defined in default constructor
<actor>
timeout = 5000 # default timeout for future based invocations
concurrent-mode = off # if turned on, then the same actor instance is allowed to execute concurrently -
# e.g. departing from the actor model for better performance
serialize-messages = on # does a deep clone of (non-primitive) messages to ensure immutability
</actor>
<stm>
service = on
restart-on-collision = off # (not implemented yet) if 'on' then it reschedules the transaction,
# if 'off' then throws an exception or rollback for user to handle
wait-for-completion = 100 # how long time in millis a transaction should be given time to complete when a collision is detected
wait-nr-of-times = 3 # the number of times it should check for completion of a pending transaction upon collision
distributed = off # not implemented yet
</stm>
<remote>
service = on
hostname = "localhost"
port = 9999
connection-timeout = 1000 # in millis
</remote>
<rest>
service = on
hostname = "localhost"
port = 9998
</rest>
<storage>
system = "cassandra" # Options: cassandra (coming: terracotta, redis, tokyo-cabinet, tokyo-tyrant, voldemort, memcached, hazelcast)
<cassandra>
service = on
storage-format = "java" # Options: java, scala-json, java-json
blocking = false # inserts and queries should be blocking or not
<thrift-server>
service = on
pidfile = "akka.pid"
</thrift-server>
</cassandra>
</rest>
</akka>
####################
# Akka Config File #
####################
# This file has all the default settings, so all these could be remove with no visible effect.
# Modify as needed.
<log>
filename = "./logs/akka.log"
roll = "daily" # Options: never, hourly, daily, sunday/monday/...
level = "debug" # Options: fatal, critical, error, warning, info, debug, trace
console = on
# syslog_host = ""
# syslog_server_name = ""
</log>
<akka>
version = "0.6"
boot = ["sample.java.Boot", "sample.scala.Boot"] # FQN to the class doing initial active object/actor
# supervisor bootstrap, should be defined in default constructor
<actor>
timeout = 5000 # default timeout for future based invocations
serialize-messages = off # does a deep clone of (non-primitive) messages to ensure immutability
</actor>
<stm>
service = on
restart-on-collision = off # (not implemented yet) if 'on' then it reschedules the transaction,
# if 'off' then throws an exception or rollback for user to handle
wait-for-completion = 100 # how long time in millis a transaction should be given time to complete when a collision is detected
wait-nr-of-times = 3 # the number of times it should check for completion of a pending transaction upon collision
distributed = off # not implemented yet
</stm>
<remote>
service = on
hostname = "localhost"
port = 9999
connection-timeout = 1000 # in millis
</remote>
<rest>
service = on
hostname = "localhost"
port = 9998
</rest>
<storage>
system = "cassandra" # Options: cassandra, mongodb
<cassandra>
service = on
hostname = "127.0.0.1" # IP address or hostname of one of the Cassandra cluster's seeds
port = 9160
storage-format = "java" # Options: java, scala-json, java-json, protobuf
consistency-level = 1
</cassandra>
<mongodb>
service = on
hostname = "127.0.0.1" # IP address or hostname of the MongoDB DB instance
port = 27017
dbname = "mydb"
storage-format = "scala-json" # Options: java, scala-json, java-json, protobuf
</mongodb>
</storage>
</akka>

View file

@ -15,6 +15,6 @@ log4j.appender.R.layout=org.apache.log4j.PatternLayout
log4j.appender.R.layout.ConversionPattern=%5p [%t] %d{ISO8601} %F (line %L) %m%n
# Edit the next line to point to your logs directory
log4j.appender.R.File=./logs/cassandra.log
log4j.appender.R.File=./logs/akka.log
log4j.logger.org.atmosphere=DEBUG

Binary file not shown.

View file

@ -3,6 +3,6 @@
<modelVersion>4.0.0</modelVersion>
<groupId>com.twitter</groupId>
<artifactId>scala-json</artifactId>
<version>0.1</version>
<version>1.0</version>
<packaging>jar</packaging>
</project>

View file

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.twitter</groupId>
<artifactId>scala-stats</artifactId>
<version>1.0</version>
<packaging>jar</packaging>
</project>

View file

@ -1,8 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.apache.cassandra</groupId>
<artifactId>cassandra</artifactId>
<version>0.4.0-dev</version>
<packaging>jar</packaging>
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.apache.cassandra</groupId>
<artifactId>cassandra</artifactId>
<version>0.4.0-trunk</version>
<packaging>jar</packaging>
</project>

View file

View file

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.codehaus.aspectwerkz</groupId>
<artifactId>aspectwerkz-jdk5</artifactId>
<version>2.1</version>
<packaging>jar</packaging>
</project>

BIN
embedded-repo/se/foldleft/cassidy/0.1/cassidy-0.1.jar Executable file → Normal file

Binary file not shown.

View file

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<module relativePaths="true" MavenProjectsManager.isMavenModule="true" type="JAVA_MODULE" version="4">
<component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_5" inherit-compiler-output="false">
<component name="NewModuleRootManager" inherit-compiler-output="false">
<output url="file://$MODULE_DIR$/target/classes" />
<output-test url="file://$MODULE_DIR$/target/test-classes" />
<content url="file://$MODULE_DIR$">
@ -32,13 +32,9 @@
<orderEntry type="library" exported="" name="Maven: asm:asm:3.1" level="project" />
<orderEntry type="library" exported="" name="Maven: aopalliance:aopalliance:1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: org.apache.camel:camel-core:2.0-SNAPSHOT" level="project" />
<orderEntry type="library" exported="" name="Maven: commons-logging:commons-logging-api:1.1" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.xml.bind:jaxb-api:2.1" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.xml.stream:stax-api:1.0-2" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.activation:activation:1.1" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.xml.bind:jaxb-impl:2.1.10" level="project" />
<orderEntry type="library" exported="" name="Maven: org.jboss.netty:netty:3.1.0.CR1" level="project" />
<orderEntry type="library" exported="" name="Maven: org.jboss.netty:netty:3.1.0.GA" level="project" />
<orderEntry type="library" exported="" name="Maven: org.apache:zookeeper:3.1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: org.scala-tools:javautils:2.7.4-0.1" level="project" />
<orderEntry type="library" exported="" name="Maven: org.codehaus.jackson:jackson-core-asl:1.1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: org.codehaus.jackson:jackson-mapper-asl:1.1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: sbinary:sbinary:0.3" level="project" />
@ -49,6 +45,8 @@
<orderEntry type="library" exported="" name="Maven: commons-collections:commons-collections:3.2.1" level="project" />
<orderEntry type="library" exported="" name="Maven: high-scale-lib:high-scale-lib:1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: commons-lang:commons-lang:2.4" level="project" />
<orderEntry type="library" exported="" name="Maven: se.foldleft:cassidy:0.1" level="project" />
<orderEntry type="library" exported="" name="Maven: commons-pool:commons-pool:1.5.1" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-comet-webserver:1.8.6.3" level="project" />
<orderEntry type="library" name="Maven: com.sun.grizzly:grizzly-servlet-webserver:1.9.9" level="project" />
<orderEntry type="library" name="Maven: com.sun.grizzly:grizzly-http:1.9.9" level="project" />
@ -64,6 +62,10 @@
<orderEntry type="library" name="Maven: com.sun.jersey:jersey-json:1.0.3" level="project" />
<orderEntry type="library" name="Maven: org.codehaus.jettison:jettison:1.0.1" level="project" />
<orderEntry type="library" name="Maven: stax:stax-api:1.0.1" level="project" />
<orderEntry type="library" name="Maven: com.sun.xml.bind:jaxb-impl:2.1.10" level="project" />
<orderEntry type="library" name="Maven: javax.xml.bind:jaxb-api:2.1" level="project" />
<orderEntry type="library" name="Maven: javax.xml.stream:stax-api:1.0-2" level="project" />
<orderEntry type="library" name="Maven: javax.activation:activation:1.1" level="project" />
<orderEntry type="library" name="Maven: org.codehaus.jackson:jackson-asl:0.9.4" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.jersey.contribs:jersey-scala:1.1.2-ea-SNAPSHOT" level="project" />
<orderEntry type="library" exported="" name="Maven: org.atmosphere:atmosphere-core:0.3" level="project" />

View file

@ -10,15 +10,15 @@
<parent>
<artifactId>akka</artifactId>
<groupId>${akka.groupId}</groupId>
<version>${akka.version}</version>
<groupId>se.scalablesolutions.akka</groupId>
<version>0.5</version>
</parent>
<dependencies>
<dependency>
<groupId>${akka.groupId}</groupId>
<artifactId>akka-kernel</artifactId>
<version>${akka.version}</version>
<groupId>se.scalablesolutions.akka</groupId>
<version>0.5</version>
</dependency>
<dependency>
<groupId>com.sun.grizzly</groupId>
@ -65,7 +65,7 @@
</dependencies>
<build>
<sourceDirectory>src/main</sourceDirectory>
<sourceDirectory>src/main/java</sourceDirectory>
<testSourceDirectory>src/test/java</testSourceDirectory>
<plugins>
<plugin>
@ -84,7 +84,7 @@
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<excludes>
<exclude>**/Abstract*</exclude>
<exclude>**/*Persistent*</exclude>
</excludes>
</configuration>
</plugin>

View file

@ -21,7 +21,7 @@ public class ActiveObjectGuiceConfiguratorTest extends TestCase {
protected void setUp() {
se.scalablesolutions.akka.kernel.Kernel$.MODULE$.config();
EventBasedThreadPoolDispatcher dispatcher = new EventBasedThreadPoolDispatcher();
EventBasedThreadPoolDispatcher dispatcher = new EventBasedThreadPoolDispatcher("name");
dispatcher
.withNewThreadPoolWithBoundedBlockingQueue(100)
.setCorePoolSize(16)

View file

@ -9,10 +9,10 @@ public class AllTest extends TestCase {
TestSuite suite = new TestSuite("All Java tests");
suite.addTestSuite(InMemoryStateTest.class);
suite.addTestSuite(InMemNestedStateTest.class);
suite.addTestSuite(PersistentStateTest.class);
suite.addTestSuite(PersistentNestedStateTest.class);
//suite.addTestSuite(PersistentStateTest.class);
//suite.addTestSuite(PersistentNestedStateTest.class);
suite.addTestSuite(RemoteInMemoryStateTest.class);
suite.addTestSuite(RemotePersistentStateTest.class);
//suite.addTestSuite(RemotePersistentStateTest.class);
suite.addTestSuite(ActiveObjectGuiceConfiguratorTest.class);
//suite.addTestSuite(RestTest.class);
return suite;

View file

@ -48,13 +48,9 @@
<orderEntry type="library" exported="" name="Maven: asm:asm:3.1" level="project" />
<orderEntry type="library" exported="" name="Maven: aopalliance:aopalliance:1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: org.apache.camel:camel-core:2.0-SNAPSHOT" level="project" />
<orderEntry type="library" exported="" name="Maven: commons-logging:commons-logging-api:1.1" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.xml.bind:jaxb-api:2.1" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.xml.stream:stax-api:1.0-2" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.activation:activation:1.1" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.xml.bind:jaxb-impl:2.1.6" level="project" />
<orderEntry type="library" exported="" name="Maven: org.jboss.netty:netty:3.1.0.CR1" level="project" />
<orderEntry type="library" exported="" name="Maven: org.jboss.netty:netty:3.1.0.GA" level="project" />
<orderEntry type="library" exported="" name="Maven: org.apache:zookeeper:3.1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: org.scala-tools:javautils:2.7.4-0.1" level="project" />
<orderEntry type="library" exported="" name="Maven: org.codehaus.jackson:jackson-core-asl:1.1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: org.codehaus.jackson:jackson-mapper-asl:1.1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: sbinary:sbinary:0.3" level="project" />
@ -65,6 +61,8 @@
<orderEntry type="library" exported="" name="Maven: commons-collections:commons-collections:3.2.1" level="project" />
<orderEntry type="library" exported="" name="Maven: high-scale-lib:high-scale-lib:1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: commons-lang:commons-lang:2.4" level="project" />
<orderEntry type="library" exported="" name="Maven: se.foldleft:cassidy:0.1" level="project" />
<orderEntry type="library" exported="" name="Maven: commons-pool:commons-pool:1.5.1" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-comet-webserver:1.8.6.3" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-servlet-webserver:1.8.6.3" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-http:1.8.6.3" level="project" />
@ -81,6 +79,10 @@
<orderEntry type="library" exported="" name="Maven: com.sun.jersey:jersey-json:1.1.1-ea" level="project" />
<orderEntry type="library" exported="" name="Maven: org.codehaus.jettison:jettison:1.1" level="project" />
<orderEntry type="library" exported="" name="Maven: stax:stax-api:1.0.1" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.xml.bind:jaxb-impl:2.1.12" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.xml.bind:jaxb-api:2.1" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.xml.stream:stax-api:1.0-2" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.activation:activation:1.1" level="project" />
<orderEntry type="library" exported="" name="Maven: org.codehaus.jackson:jackson-asl:0.9.4" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.jersey.contribs:jersey-scala:1.1.2-ea-SNAPSHOT" level="project" />
<orderEntry type="library" exported="" name="Maven: org.atmosphere:atmosphere-core:0.3" level="project" />

View file

@ -10,37 +10,48 @@
<parent>
<artifactId>akka</artifactId>
<groupId>${akka.groupId}</groupId>
<version>${akka.version}</version>
<groupId>se.scalablesolutions.akka</groupId>
<version>0.5</version>
<relativePath>../pom.xml</relativePath>
</parent>
<!-- Core deps -->
<dependencies>
<dependency>
<groupId>${akka.groupId}</groupId>
<artifactId>akka-util-java</artifactId>
<version>${akka.version}</version>
<groupId>se.scalablesolutions.akka</groupId>
<version>0.5</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
<version>2.7.5</version>
</dependency>
<dependency>
<groupId>org.codehaus.aspectwerkz</groupId>
<artifactId>aspectwerkz-nodeps-jdk5</artifactId>
<version>2.1</version>
</dependency>
<dependency>
<groupId>org.codehaus.aspectwerkz</groupId>
<artifactId>aspectwerkz-jdk5</artifactId>
<version>2.1</version>
</dependency>
<dependency>
<groupId>com.twitter</groupId>
<artifactId>scala-stats</artifactId>
<version>1.0</version>
</dependency>
<dependency>
<groupId>net.lag</groupId>
<artifactId>configgy</artifactId>
<version>1.3</version>
</dependency>
<dependency>
<!--dependency>
<groupId>org.guiceyfruit</groupId>
<artifactId>guiceyfruit-core</artifactId>
<version>2.0</version>
</dependency>
</dependency-->
<dependency>
<groupId>org.guiceyfruit</groupId>
<artifactId>guice-core</artifactId>
@ -54,13 +65,18 @@
<dependency>
<groupId>org.jboss.netty</groupId>
<artifactId>netty</artifactId>
<version>3.1.0.CR1</version>
<version>3.1.0.GA</version>
</dependency>
<dependency>
<groupId>org.apache</groupId>
<artifactId>zookeeper</artifactId>
<version>3.1.0</version>
</dependency>
<dependency>
<groupId>org.scala-tools</groupId>
<artifactId>javautils</artifactId>
<version>2.7.4-0.1</version>
</dependency>
<!-- For Protocol/Serialization -->
<dependency>
@ -94,11 +110,18 @@
<version>0.3</version>
</dependency>
<!-- For Mongo -->
<dependency>
<groupId>com.mongodb</groupId>
<artifactId>mongo</artifactId>
<version>0.6</version>
</dependency>
<!-- For Cassandra -->
<dependency>
<groupId>org.apache.cassandra</groupId>
<artifactId>cassandra</artifactId>
<version>0.4.0-dev</version>
<version>0.4.0-trunk</version>
</dependency>
<dependency>
<groupId>com.facebook</groupId>
@ -110,7 +133,7 @@
<artifactId>fb303</artifactId>
<version>1.0</version>
</dependency>
<dependency>
<!--dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<version>3.2.1</version>
@ -119,19 +142,19 @@
<groupId>high-scale-lib</groupId>
<artifactId>high-scale-lib</artifactId>
<version>1.0</version>
</dependency>
<dependency>
</dependency-->
<!--dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>2.4</version>
</dependency>
</dependency-->
<dependency>
<groupId>se.foldleft</groupId>
<artifactId>cassidy</artifactId>
<version>0.1</version>
<groupId>commons-pool</groupId>
<artifactId>commons-pool</artifactId>
<version>1.5.1</version>
</dependency>
<!-- For Jersey -->
<!-- For Jersey & Atmosphere -->
<dependency>
<groupId>com.sun.grizzly</groupId>
<artifactId>grizzly-comet-webserver</artifactId>
@ -202,74 +225,16 @@
<version>0.9.5</version>
<scope>test</scope>
</dependency>
<!--dependency>
<groupId>com.jteigen.scalatest</groupId>
<artifactId>junit4runner</artifactId>
<version>1.0</version>
<scope>test</scope>
</dependency-->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.5</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId>
<version>1.1.0-ea</version>
</dependency>
</dependencies>
<build>
<sourceDirectory>src/main/scala</sourceDirectory>
<testSourceDirectory>src/test/scala</testSourceDirectory>
<plugins>
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>compile</goal>
<goal>testCompile</goal>
</goals>
</execution>
</executions>
<configuration>
<args>
<arg>-target:jvm-1.5</arg>
<!--<arg>-unchecked</arg>-->
</args>
<scalaVersion>${scala.version}</scalaVersion>
<vscaladocVersion>1.1</vscaladocVersion>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-eclipse-plugin</artifactId>
<configuration>
<downloadSources>true</downloadSources>
<buildcommands>
<buildcommand>
ch.epfl.lamp.sdt.core.scalabuilder
</buildcommand>
</buildcommands>
<additionalProjectnatures>
<projectnature>
ch.epfl.lamp.sdt.core.scalanature
</projectnature>
</additionalProjectnatures>
<classpathContainers>
<classpathContainer>
org.eclipse.jdt.launching.JRE_CONTAINER
</classpathContainer>
<classpathContainer>
ch.epfl.lamp.sdt.launching.SCALA_CONTAINER
</classpathContainer>
</classpathContainers>
</configuration>
</plugin>
<plugin>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
@ -287,34 +252,41 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.dstovall</groupId>
<artifactId>onejar-maven-plugin</artifactId>
<version>1.4.1</version>
<executions>
<execution>
<configuration>
<mainClass>se.scalablesolutions.akka.kernel.Kernel</mainClass>
<onejarVersion>0.96</onejarVersion>
<attachToBuild>true</attachToBuild>
<classifier>onejar</classifier>
</configuration>
<goals>
<goal>one-jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
<resources>
<resource>
<filtering>false</filtering>
<directory>src/main/resources</directory>
<directory>../config</directory>
<includes>
<include>akka.conf</include>
<include>akka-reference.conf</include>
</includes>
</resource>
<resource>
<filtering>false</filtering>
<directory>src/main/scala</directory>
<directory>src/main/resources</directory>
<includes>
<include>**</include>
<include>META-INF/*</include>
</includes>
<excludes>
<exclude>**/*.scala</exclude>
</excludes>
</resource>
</resources>
</build>
<reporting>
<plugins>
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<configuration>
<vscaladocVersion>1.1</vscaladocVersion>
<scalaVersion>${scala.version}</scalaVersion>
</configuration>
</plugin>
</plugins>
</reporting>
</project>

View file

@ -0,0 +1,8 @@
<!DOCTYPE aspectwerkz PUBLIC "-//AspectWerkz//DTD//EN" "http://aspectwerkz.codehaus.org/dtd/aspectwerkz2.dtd">
<aspectwerkz>
<system id="akka">
<package name="se.scalablesolutions.akka.kernel.actor">
<aspect class="ActiveObjectAspect" />
</package>
</system>
</aspectwerkz>

View file

@ -1,9 +0,0 @@
<!DOCTYPE aspectwerkz PUBLIC
"-//AspectWerkz//DTD 2.0//EN"
"http://aspectwerkz.codehaus.org/dtd/aspectwerkz_2_0.dtd">
<aspectwerkz>
<system id="akka">
<aspect class="se.scalablesolutions.akka.kernel.config.ConfigurationAspect"/>
</system>
</aspectwerkz>

View file

@ -12,50 +12,62 @@ import javax.ws.rs.core.UriBuilder
import java.io.File
import java.net.URLClassLoader
import net.lag.configgy.{Config, Configgy, RuntimeEnvironment}
import net.lag.configgy.{Config, Configgy, RuntimeEnvironment, ParseException}
import kernel.jersey.AkkaCometServlet
import kernel.nio.RemoteServer
import kernel.state.CassandraStorage
import kernel.util.Logging
import kernel.management.Management
/**
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
object Kernel extends Logging {
@volatile private var hasBooted = false
Boot.HOME
val VERSION = "0.6"
val HOME = {
val home = System.getenv("AKKA_HOME")
if (home == null) None
else Some(home)
}
val config = setupConfig
val CONFIG_VERSION = config.getString("akka.version", "0")
if (VERSION != CONFIG_VERSION) throw new IllegalStateException("Akka JAR version [" + VERSION + "] is different than the provided config ('akka.conf') version [" + CONFIG_VERSION + "]")
val BOOT_CLASSES = config.getList("akka.boot")
val RUN_REMOTE_SERVICE = config.getBool("akka.remote.service", true)
val RUN_MANAGEMENT_SERVICE = config.getBool("akka.management.service", true)
val STORAGE_SYSTEM = config.getString("akka.storage.system", "cassandra")
val RUN_REST_SERVICE = config.getBool("akka.rest.service", true)
val REST_HOSTNAME = kernel.Kernel.config.getString("akka.rest.hostname", "localhost")
val REST_URL = "http://" + REST_HOSTNAME
val REST_PORT = kernel.Kernel.config.getInt("akka.rest.port", 9998)
// FIXME add API to shut server down gracefully
@volatile private var hasBooted = false
private var remoteServer: RemoteServer = _
private var jerseySelectorThread: SelectorThread = _
private val startTime = System.currentTimeMillis
def main(args: Array[String]) = boot
private var applicationLoader: Option[ClassLoader] = None
def main(args: Array[String]) = boot
def boot = synchronized {
if (!hasBooted) {
printBanner
log.info("Starting Akka kernel...")
log.info("Starting Akka...")
runApplicationBootClasses
if (RUN_REMOTE_SERVICE) startRemoteService
if (RUN_MANAGEMENT_SERVICE) startManagementService
STORAGE_SYSTEM match {
case "cassandra" => startCassandra
case "terracotta" => throw new UnsupportedOperationException("terracotta storage backend is not yet supported")
case "mongodb" => throw new UnsupportedOperationException("mongodb storage backend is not yet supported")
case "redis" => throw new UnsupportedOperationException("redis storage backend is not yet supported")
case "voldemort" => throw new UnsupportedOperationException("voldemort storage backend is not yet supported")
case "tokyo-cabinet" => throw new UnsupportedOperationException("tokyo-cabinet storage backend is not yet supported")
@ -64,9 +76,8 @@ object Kernel extends Logging {
if (RUN_REST_SERVICE) startJersey
runApplicationBootClasses
log.info("Akka kernel started successfully")
Thread.currentThread.setContextClassLoader(getClass.getClassLoader)
log.info("Akka started successfully")
hasBooted = true
}
}
@ -74,50 +85,68 @@ object Kernel extends Logging {
def uptime = (System.currentTimeMillis - startTime) / 1000
def setupConfig: Config = {
try {
Configgy.configure(akka.Boot.CONFIG + "/akka.conf")
val runtime = new RuntimeEnvironment(getClass)
//runtime.load(args)
val config = Configgy.config
config.registerWithJmx("com.scalablesolutions.akka.config")
// FIXME fix Configgy JMX subscription to allow management
// config.subscribe { c => configure(c.getOrElse(new Config)) }
config
} catch {
case e: net.lag.configgy.ParseException => throw new Error("Could not retreive the akka.conf config file. Make sure you have set the AKKA_HOME environment variable to the root of the distribution.")
}
if (HOME.isDefined) {
try {
val configFile = HOME.get + "/config/akka.conf"
Configgy.configure(configFile)
log.info("AKKA_HOME is defined to [%s], config loaded from [%s].", HOME.get, configFile)
} catch {
case e: ParseException => throw new IllegalStateException("'akka.conf' config file can not be found in [" + HOME + "/config/akka.conf] - aborting. Either add it in the 'config' directory or add it to the classpath.")
}
} else {
try {
Configgy.configureFromResource("akka.conf", getClass.getClassLoader)
log.info("Config loaded from the application classpath.")
} catch {
case e: ParseException => throw new IllegalStateException("'$AKKA_HOME/config/akka.conf' could not be found and no 'akka.conf' can be found on the classpath - aborting. . Either add it in the '$AKKA_HOME/config' directory or add it to the classpath.")
}
}
val config = Configgy.config
config.registerWithJmx("com.scalablesolutions.akka")
// FIXME fix Configgy JMX subscription to allow management
// config.subscribe { c => configure(c.getOrElse(new Config)) }
config
}
private[akka] def runApplicationBootClasses = {
val HOME = try { System.getenv("AKKA_HOME") } catch { case e: NullPointerException => throw new IllegalStateException("AKKA_HOME system variable needs to be set. Should point to the root of the Akka distribution.") }
//val CLASSES = HOME + "/kernel/target/classes" // FIXME remove for dist
//val LIB = HOME + "/lib"
val CONFIG = HOME + "/config"
val DEPLOY = HOME + "/deploy"
val DEPLOY_DIR = new File(DEPLOY)
if (!DEPLOY_DIR.exists) { log.error("Could not find a deploy directory at [" + DEPLOY + "]"); System.exit(-1) }
val toDeploy = for (f <- DEPLOY_DIR.listFiles().toArray.toList.asInstanceOf[List[File]]) yield f.toURL
log.info("Deploying applications from [%s]: [%s]", DEPLOY, toDeploy.toArray.toList)
val loader = new URLClassLoader(toDeploy.toArray, getClass.getClassLoader)
if (BOOT_CLASSES.isEmpty) throw new IllegalStateException("No boot class specificed. Add an application boot class to the 'akka.conf' file such as 'boot = \"com.biz.myapp.Boot\"")
new management.RestfulJMXBoot // add the REST/JMX service
val loader =
if (HOME.isDefined) {
val CONFIG = HOME.get + "/config"
val DEPLOY = HOME.get + "/deploy"
val DEPLOY_DIR = new File(DEPLOY)
if (!DEPLOY_DIR.exists) { log.error("Could not find a deploy directory at [" + DEPLOY + "]"); System.exit(-1) }
val toDeploy = for (f <- DEPLOY_DIR.listFiles().toArray.toList.asInstanceOf[List[File]]) yield f.toURL
//val toDeploy = DEPLOY_DIR.toURL :: (for (f <- DEPLOY_DIR.listFiles().toArray.toList.asInstanceOf[List[File]]) yield f.toURL)
log.info("Deploying applications from [%s]: [%s]", DEPLOY, toDeploy.toArray.toList)
new URLClassLoader(toDeploy.toArray, getClass.getClassLoader)
} else if (getClass.getClassLoader.getResourceAsStream("akka.conf") != null) {
getClass.getClassLoader
} else throw new IllegalStateException("AKKA_HOME is not defined and no 'akka.conf' can be found on the classpath, aborting")
for (clazz <- BOOT_CLASSES) {
log.info("Booting with boot class [%s]", clazz)
log.info("Loading boot class [%s]", clazz)
loader.loadClass(clazz).newInstance
}
applicationLoader = Some(loader)
}
private[akka] def startRemoteService = {
// FIXME manage remote serve thread for graceful shutdown
val remoteServerThread = new Thread(new Runnable() {
def run = RemoteServer.start
}, "akka remote service")
def run = RemoteServer.start(applicationLoader)
}, "Akka Remote Service")
remoteServerThread.start
}
private[akka] def startManagementService = {
Management("se.scalablesolutions.akka.management")
log.info("Management service started successfully.")
}
private[akka] def startCassandra = if (config.getBool("akka.storage.cassandra.service", true)) {
System.setProperty("cassandra", "")
System.setProperty("storage-config", akka.Boot.CONFIG + "/")
if (HOME.isDefined) System.setProperty("storage-config", HOME.get + "/config/")
else if (System.getProperty("storage-config", "NIL") == "NIL") throw new IllegalStateException("AKKA_HOME and -Dstorage-config=... is not set. Can't start up Cassandra. Either set AKKA_HOME or set the -Dstorage-config=... variable to the directory with the Cassandra storage-conf.xml file.")
CassandraStorage.start
}
@ -131,7 +160,7 @@ object Kernel extends Logging {
adapter.setHandleStaticResources(true)
adapter.setServletInstance(new AkkaCometServlet)
adapter.setContextPath(uri.getPath)
adapter.setRootFolder(System.getenv("AKKA_HOME") + "/deploy/root")
if (HOME.isDefined) adapter.setRootFolder(HOME.get + "/deploy/root")
log.info("REST service root path: [" + adapter.getRootFolder + "] and context path [" + adapter.getContextPath + "] ")
val ah = new com.sun.grizzly.arp.DefaultAsyncHandler
@ -157,7 +186,7 @@ object Kernel extends Logging {
(____ /__|_ \__|_ \(____ /
\/ \/ \/ \/
""")
log.info(" Running version " + kernel.Kernel.config.getString("akka.version", "awesome"))
log.info(" Running version " + VERSION)
log.info("==============================")
}

View file

@ -14,9 +14,11 @@ import kernel.config.ScalaConfig._
import kernel.util._
import serialization.Serializer
import org.codehaus.aspectwerkz.intercept.{Advisable, AroundAdvice}
import org.codehaus.aspectwerkz.intercept.{Advisable, AroundAdvice, Advice}
import org.codehaus.aspectwerkz.joinpoint.{MethodRtti, JoinPoint}
import org.codehaus.aspectwerkz.proxy.Proxy
import org.codehaus.aspectwerkz.annotation.{Aspect, Around}
import org.codehaus.aspectwerkz.aspect.management.Aspects
sealed class ActiveObjectException(msg: String) extends RuntimeException(msg)
class ActiveObjectInvocationTimeoutException(msg: String) extends ActiveObjectException(msg)
@ -141,6 +143,7 @@ class ActiveObjectFactory {
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
object ActiveObject {
val MATCH_ALL = "execution(* *.*(..))"
val AKKA_CAMEL_ROUTING_SCHEME = "akka"
@ -217,24 +220,28 @@ object ActiveObject {
}
private[kernel] def newInstance[T](target: Class[T], actor: Dispatcher, remoteAddress: Option[InetSocketAddress], timeout: Long): T = {
//if (getClass.getClassLoader.getResourceAsStream("META-INF/aop.xml") != null) println("000000000000000000000 FOUND AOP")
if (remoteAddress.isDefined) actor.makeRemote(remoteAddress.get)
val proxy = Proxy.newInstance(target, false, true)
actor.initialize(target, proxy)
// FIXME switch to weaving in the aspect at compile time
proxy.asInstanceOf[Advisable].aw_addAdvice(
MATCH_ALL, new ActorAroundAdvice(target, proxy, actor, remoteAddress, timeout))
actor.timeout = timeout
actor.start
AspectInitRegistry.register(proxy, AspectInit(target, actor, remoteAddress, timeout))
proxy.asInstanceOf[T]
}
private[kernel] def newInstance[T](intf: Class[T], target: AnyRef, actor: Dispatcher, remoteAddress: Option[InetSocketAddress], timeout: Long): T = {
//if (getClass.getClassLoader.getResourceAsStream("META-INF/aop.xml") != null) println("000000000000000000000 FOUND AOP")
if (remoteAddress.isDefined) actor.makeRemote(remoteAddress.get)
val proxy = Proxy.newInstance(Array(intf), Array(target), false, true)
actor.initialize(target.getClass, target)
proxy.asInstanceOf[Advisable].aw_addAdvice(
MATCH_ALL, new ActorAroundAdvice(intf, target, actor, remoteAddress, timeout))
actor.timeout = timeout
actor.start
AspectInitRegistry.register(proxy, AspectInit(intf, actor, remoteAddress, timeout))
proxy.asInstanceOf[T]
}
private[kernel] def supervise(restartStrategy: RestartStrategy, components: List[Supervise]): Supervisor = {
object factory extends SupervisorFactory {
override def getSupervisorConfig = SupervisorConfig(restartStrategy, components)
@ -245,20 +252,46 @@ object ActiveObject {
}
}
object AspectInitRegistry {
private val inits = new java.util.concurrent.ConcurrentHashMap[AnyRef, AspectInit]
def initFor(target: AnyRef) = {
val init = inits.get(target)
inits.remove(target)
init
}
def register(target: AnyRef, init: AspectInit) = inits.put(target, init)
}
sealed case class AspectInit(
val target: Class[_],
val actor: Dispatcher,
val remoteAddress: Option[InetSocketAddress],
val timeout: Long)
/**
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
@serializable
sealed class ActorAroundAdvice(val target: Class[_],
val targetInstance: AnyRef,
val actor: Dispatcher,
val remoteAddress: Option[InetSocketAddress],
val timeout: Long) extends AroundAdvice {
val id = target.getName
actor.timeout = timeout
actor.start
def invoke(joinpoint: JoinPoint): AnyRef = dispatch(joinpoint)
@Aspect("perInstance")
sealed class ActiveObjectAspect {
@volatile var isInitialized = false
var target: Class[_] = _
var actor: Dispatcher = _
var remoteAddress: Option[InetSocketAddress] = _
var timeout: Long = _
@Around("execution(* *..*(..))")
def invoke(joinpoint: JoinPoint): AnyRef = {
if (!isInitialized) {
val init = AspectInitRegistry.initFor(joinpoint.getThis)
target = init.target
actor = init.actor
remoteAddress = init.remoteAddress
timeout = init.timeout
isInitialized = true
}
dispatch(joinpoint)
}
private def dispatch(joinpoint: JoinPoint) = {
if (remoteAddress.isDefined) remoteDispatch(joinpoint)

View file

@ -8,14 +8,17 @@ import com.google.protobuf.ByteString
import java.net.InetSocketAddress
import java.util.concurrent.CopyOnWriteArraySet
import kernel.reactor._
import kernel.config.ScalaConfig._
import kernel.stm.TransactionManagement
import kernel.util.Helpers.ReadWriteLock
import kernel.nio.protobuf.RemoteProtocol.RemoteRequest
import kernel.util.Logging
import reactor._
import config.ScalaConfig._
import stm.TransactionManagement
import util.Helpers.ReadWriteLock
import nio.protobuf.RemoteProtocol.RemoteRequest
import util.Logging
import serialization.{Serializer, Serializable, SerializationProtocol}
import nio.{RemoteProtocolBuilder, RemoteClient, RemoteServer, RemoteRequestIdFactory}
import management.Management
import com.twitter.service.Stats
sealed abstract class LifecycleMessage
case class Init(config: AnyRef) extends LifecycleMessage
@ -42,14 +45,17 @@ class ActorMessageInvoker(val actor: Actor) extends MessageInvoker {
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
object Actor {
val TIMEOUT = kernel.Kernel.config.getInt("akka.actor.timeout", 5000)
val SERIALIZE_MESSAGES = kernel.Kernel.config.getBool("akka.actor.serialize-messages", false)
val TIMEOUT = Kernel.config.getInt("akka.actor.timeout", 5000)
val SERIALIZE_MESSAGES = Kernel.config.getBool("akka.actor.serialize-messages", false)
}
/**
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
@serializable trait Actor extends Logging with TransactionManagement {
trait Actor extends Logging with TransactionManagement {
Stats.getCounter("NrOfActors").incr
ActorRegistry.register(this)
@volatile private[this] var isRunning: Boolean = false
private[this] val remoteFlagLock = new ReadWriteLock
private[this] val transactionalFlagLock = new ReadWriteLock
@ -64,6 +70,8 @@ object Actor {
protected[this] val linkedActors = new CopyOnWriteArraySet[Actor]
protected[actor] var lifeCycleConfig: Option[LifeCycle] = None
val name = this.getClass.getName
// ====================================
// ==== USER CALLBACKS TO OVERRIDE ====
// ====================================
@ -96,7 +104,7 @@ object Actor {
* </pre>
*/
protected[kernel] var dispatcher: MessageDispatcher = {
val dispatcher = Dispatchers.newEventBasedThreadPoolDispatcher
val dispatcher = Dispatchers.newEventBasedThreadPoolDispatcher(getClass.getName)
mailbox = dispatcher.messageQueue
dispatcher.registerHandler(this, new ActorMessageInvoker(this))
dispatcher
@ -529,6 +537,8 @@ object Actor {
}
private[this] def handleTrapExit(dead: Actor, reason: Throwable): Unit = {
if (Management.RECORD_STATS) Stats.getCounter("NrOfFailures_" + dead.name).incr
if (trapExit) {
if (faultHandler.isDefined) {
faultHandler.get match {
@ -546,6 +556,7 @@ object Actor {
linkedActors.toArray.toList.asInstanceOf[List[Actor]].foreach(_.restart(reason))
private[Actor] def restart(reason: AnyRef) = synchronized {
if (Management.RECORD_STATS) Stats.getCounter("NrOfRestarts_" + name).incr
lifeCycleConfig match {
case None => throw new IllegalStateException("Server [" + id + "] does not have a life-cycle defined.")

View file

@ -0,0 +1,33 @@
/**
* Copyright (C) 2009 Scalable Solutions.
*/
package se.scalablesolutions.akka.kernel.actor
import kernel.util.Logging
import scala.collection.jcl.HashMap
/**
* Registry holding all actor instances, mapped by class..
*
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
object ActorRegistry extends Logging {
private val actors = new HashMap[String, List[Actor]]
def actorsFor(clazz: Class[_]): List[Actor] = synchronized {
actors.get(clazz.getName) match {
case None => Nil
case Some(instances) => instances
}
}
def register(actor: Actor) = synchronized {
val name = actor.getClass.getName
actors.get(name) match {
case Some(instances) => actors + (name -> (actor :: instances))
case None => actors + (name -> (actor :: Nil))
}
}
}

View file

@ -19,7 +19,6 @@ extends IoCComponentProviderFactory with Logging {
override def getComponentProvider(clazz: Class[_]): IoCComponentProvider = getComponentProvider(null, clazz)
override def getComponentProvider(context: ComponentContext, clazz: Class[_]): IoCComponentProvider = {
//log.info("ProviderFactory: resolve => " + clazz.getName)
configurators.find(_.isDefined(clazz)).map(_ => new ActorComponentProvider(clazz, configurators)).getOrElse(null)
configurators.find(_.isDefined(clazz)).map(_ => new ActorComponentProvider(clazz, configurators)).getOrElse(null)
}
}
}

View file

@ -0,0 +1,187 @@
/**
* Copyright (C) 2009 Scalable Solutions.
*/
package se.scalablesolutions.akka.kernel.management
import com.twitter.service.Stats
import scala.collection.jcl
import scala.collection.mutable.ArrayBuffer
import java.util.concurrent.ThreadPoolExecutor
import java.lang.management.ManagementFactory
import javax.{management => jmx}
import javax.management.remote.{JMXConnectorServerFactory, JMXServiceURL}
import kernel.Kernel.config
import kernel.util.Logging
/**
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
object Management extends Logging {
val RECORD_STATS = config.getBool("akka.management.record-stats", true)
private var name = "se.scalablesolutions.akka"
private val mbeanServer = ManagementFactory.getPlatformMBeanServer
def apply() = {}
def apply(packageName: String) = name = packageName
java.rmi.registry.LocateRegistry.createRegistry(1099)
JMXConnectorServerFactory.newJMXConnectorServer(
new JMXServiceURL("service:jmx:rmi:///jndi/rmi://localhost:1099/jmxrmi"),
null,
mbeanServer).start
registerMBean(new StatisticsMBean, "Stats")
def registerMBean(mbean: jmx.DynamicMBean, mbeanType: String) = {
val objectName = new jmx.ObjectName(name + ":type=" + mbeanType)
try { mbeanServer.getMBeanInfo(objectName) } catch {
case e: jmx.InstanceNotFoundException =>
mbeanServer.registerMBean(mbean, objectName)
}
}
def getStats(reset: Boolean) = {
var statistics = new ArrayBuffer[Tuple2[String, String]]
statistics += (("current time", (System.currentTimeMillis / 1000).toString))
statistics += (("akka version", Kernel.VERSION))
statistics += (("uptime", Kernel.uptime.toString))
for ((key, value) <- Stats.getJvmStats) statistics += (key, value.toString)
for ((key, value) <- Stats.getCounterStats) statistics += (key, value.toString)
for ((key, value) <- Stats.getTimingStats(reset)) statistics += (key, value.toString)
for ((key, value) <- Stats.getGaugeStats(reset)) statistics += (key, value.toString)
val report = {for ((key, value) <- statistics) yield "STAT %s %s".format(key, value)}.mkString("", "\r\n", "\r\n")
log.info("=========================================\n\t--- Statistics Report ---\n%s=========================================", report)
report
}
}
/**
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
class StatisticsMBean extends jmx.DynamicMBean {
def getMBeanInfo = new jmx.MBeanInfo(
"se.scalablesolutions.akka.kernel.management.StatisticsMBean",
"runtime statistics",
getAttributeInfo,
null, null, null,
new jmx.ImmutableDescriptor("immutableInfo=false"))
def getAttribute(name: String): AnyRef = {
val segments = name.split("_", 2)
segments(0) match {
case "counter" =>
Stats.getCounterStats()(segments(1)).asInstanceOf[java.lang.Long]
case "timing" =>
val prefix = segments(1).split("_", 2)
val timing = Stats.getTimingStats(false)(prefix(1))
val x = prefix(0) match {
case "min" => timing.minimum
case "max" => timing.maximum
case "count" => timing.count
case "average" => timing.average
}
x.asInstanceOf[java.lang.Integer]
case "gauge" =>
Stats.getGaugeStats(false)(segments(1)).asInstanceOf[java.lang.Double]
}
}
def getAttributes(names: Array[String]): jmx.AttributeList = {
val rv = new jmx.AttributeList
for (name <- names) rv.add(new jmx.Attribute(name, getAttribute(name)))
rv
}
def invoke(actionName: String, params: Array[Object], signature: Array[String]): AnyRef = throw new UnsupportedOperationException
def setAttribute(attr: jmx.Attribute): Unit = throw new UnsupportedOperationException
def setAttributes(attrs: jmx.AttributeList): jmx.AttributeList = throw new UnsupportedOperationException
private def getAttributeInfo: Array[jmx.MBeanAttributeInfo] = {
(Stats.getCounterStats.keys.map { name =>
List(new jmx.MBeanAttributeInfo("counter_" + name, "java.lang.Long", "counter", true, false, false))
} ++ Stats.getTimingStats(false).keys.map { name =>
List("min", "max", "average", "count") map { prefix =>
new jmx.MBeanAttributeInfo("timing_" + prefix + "_" + name, "java.lang.Integer", "timing", true, false, false)
}
} ++ Stats.getGaugeStats(false).keys.map { name =>
List(new jmx.MBeanAttributeInfo("gauge_" + name, "java.lang.Long", "gauge", true, false, false))
}).toList.flatten[jmx.MBeanAttributeInfo].toArray
}
}
/**
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
class ThreadPoolMBean(threadPool: ThreadPoolExecutor) extends jmx.DynamicMBean {
val operations: Array[jmx.MBeanOperationInfo] = Array(
new jmx.MBeanOperationInfo("purge", "",
Array(), "void", jmx.MBeanOperationInfo.ACTION),
new jmx.MBeanOperationInfo("shutdown", "",
Array(), "void", jmx.MBeanOperationInfo.ACTION),
new jmx.MBeanOperationInfo("setCorePoolSize", "",
Array(new jmx.MBeanParameterInfo("corePoolSize", "java.lang.Integer", "")), "void", jmx.MBeanOperationInfo.ACTION),
new jmx.MBeanOperationInfo("setMaximumPoolSize", "",
Array(new jmx.MBeanParameterInfo("maximumPoolSize", "java.lang.Integer", "")), "void", jmx.MBeanOperationInfo.ACTION),
)
def getMBeanInfo = new jmx.MBeanInfo(
"se.scalablesolutions.akka.kernel.management.ThreadPoolMBean",
"runtime management",
getAttributeInfo,
null, operations, null,
new jmx.ImmutableDescriptor("immutableInfo=false"))
def getAttribute(name: String): AnyRef = name match {
case "getActiveCount" => threadPool.getActiveCount.asInstanceOf[AnyRef]
case "getCompletedTaskCount" => threadPool.getCompletedTaskCount.asInstanceOf[AnyRef]
case "getCorePoolSize" => threadPool.getCorePoolSize.asInstanceOf[AnyRef]
case "getLargestPoolSize" => threadPool.getLargestPoolSize.asInstanceOf[AnyRef]
case "getMaximumPoolSize" => threadPool.getMaximumPoolSize.asInstanceOf[AnyRef]
case "getPoolSize" => threadPool.getPoolSize.asInstanceOf[AnyRef]
case "getTaskCount" => threadPool.getTaskCount.asInstanceOf[AnyRef]
}
private def getAttributeInfo: Array[jmx.MBeanAttributeInfo] = {
Array(
new jmx.MBeanAttributeInfo("getCorePoolSize", "java.lang.Int", "", true, false, false),
new jmx.MBeanAttributeInfo("getMaximumPoolSize", "java.lang.Int", "", true, false, false),
new jmx.MBeanAttributeInfo("getActiveCount", "java.lang.Int", "", true, false, false),
new jmx.MBeanAttributeInfo("getCompletedTaskCount", "java.lang.Long", "", true, false, false),
new jmx.MBeanAttributeInfo("getLargestPoolSize", "java.lang.Int", "", true, false, false),
new jmx.MBeanAttributeInfo("getPoolSize", "java.lang.Int", "", true, false, false),
new jmx.MBeanAttributeInfo("getTaskCount", "java.lang.Long", "", true, false, false))
}
def getAttributes(names: Array[String]): jmx.AttributeList = {
val rv = new jmx.AttributeList
for (name <- names) rv.add(new jmx.Attribute(name, getAttribute(name)))
rv
}
def invoke(actionName: String, params: Array[Object], signature: Array[String]): AnyRef = {
try {
actionName match {
case "purge" => threadPool.purge
case "shutdown" => threadPool.shutdown
case "setCorePoolSize" =>
params match {
case Array(corePoolSize: java.lang.Integer) => threadPool.setCorePoolSize(corePoolSize.intValue)
case _ => throw new Exception("Bad signature " + params.toList.toString)
}
case "setMaximumPoolSize" =>
params match {
case Array(maximumPoolSize: java.lang.Integer) => threadPool.setMaximumPoolSize(maximumPoolSize.intValue)
case _ => throw new Exception("Bad signature " + params.toList.toString)
}
}
} catch { case e: Exception => throw new jmx.MBeanException(e) }
"Success"
}
def setAttribute(attr: jmx.Attribute): Unit = throw new UnsupportedOperationException
def setAttributes(attrs: jmx.AttributeList): jmx.AttributeList = throw new UnsupportedOperationException
}

View file

@ -0,0 +1,96 @@
/**
* Copyright (C) 2009 Scalable Solutions.
*/
package se.scalablesolutions.akka.kernel.management
import se.scalablesolutions.akka.kernel.actor.{SupervisorFactory, Actor}
import se.scalablesolutions.akka.kernel.config.ScalaConfig._
import se.scalablesolutions.akka.kernel.util.Logging
import javax.ws.rs.core.MultivaluedMap
import javax.ws.rs.{GET, POST, Path, QueryParam, Produces, WebApplicationException, Consumes}
import javax.management._
import javax.management.remote.{JMXConnector, JMXConnectorFactory, JMXServiceURL}
import javax.servlet.http.{HttpServletRequest, HttpServletResponse}
import java.util.concurrent.ConcurrentHashMap
/**
* REST interface to Akka's JMX service.
* <p/>
* Here is an example that retreives the current number of Actors.
* <pre>
* http://localhost:9998/jmx
* ?service=service:jmx:rmi:///jndi/rmi://localhost:1099/jmxrmi
* &component=se.scalablesolutions.akka:type=Stats
* &attribute=counter_NrOfActors
* </pre>
*/
@Path("/jmx")
class RestfulJMX extends Actor with Logging {
private case class Request(service: String, component: String, attribute: String)
private val connectors = new ConcurrentHashMap[String, JMXConnector]
@GET
@Produces(Array("text/plain"))
def queryJMX(
@QueryParam("service") service: String,
@QueryParam("component") component: String,
@QueryParam("attribute") attribute: String): String=
(this !! Request(service, component, attribute)).getOrElse("Error in REST JMX management service")
override def receive: PartialFunction[Any, Unit] = {
case Request(service, component, attribute) => reply(retrieveAttribute(service, component, attribute))
}
private def retrieveAttribute(service: String, component: String, attribute: String): String = {
try {
var connector = connectors.putIfAbsent(service, JMXConnectorFactory.connect(new JMXServiceURL(service)))
connector.getMBeanServerConnection.getAttribute(new ObjectName(component), attribute).toString
} catch {
case e: Exception =>
if (connectors.contains(service)) connectors.remove(service)
throw e
}
}
}
/**
* REST interface to Akka's statistics recorder.
* <p/>
* Here is an example that retreives a statistics report.
* <pre>
* http://localhost:9998/stats?reset=true
* </pre>
*/
@Path("/stats")
class StatisticsReporter extends Actor with Logging {
private case class Stats(reset: Boolean)
@GET
@Produces(Array("text/html"))
def stats(@QueryParam("reset") reset: String): scala.xml.Elem =
(this !! Stats(java.lang.Boolean.valueOf(reset).booleanValue)).getOrElse(<h3>Error in REST JMX management service</h3>)
override def receive: PartialFunction[Any, Unit] = {
case Stats(reset) => reply(<pre>{Management.getStats(reset)}</pre>)
}
}
class RestfulJMXBoot extends Logging {
log.info("Booting Restful JMX servivce")
object factory extends SupervisorFactory {
override def getSupervisorConfig: SupervisorConfig = {
SupervisorConfig(
RestartStrategy(OneForOne, 3, 100),
Supervise(
new RestfulJMX,
LifeCycle(Permanent, 100)) ::
Supervise(
new StatisticsReporter,
LifeCycle(Permanent, 100)) ::
Nil)
}
}
factory.newSupervisor.startSupervisor
}

View file

@ -0,0 +1,171 @@
/**
* Copyright (C) 2009 Scalable Solutions.
*/
package se.scalablesolutions.akka.kernel.management
import javax.management._
import java.lang.management._
/*
object ScalaJMX {
val mbeanServer = ManagementFactory.getPlatformMBeanServer
def register(t: AnyRef, i: Class, name: ObjectName) = mbeanServer.registerMBean(new StandardMBean(t, i), name)
def registerBean(bean: DynamicMBean, name: ObjectName): ObjectInstance = mbeanServer.registerMBean(bean, name)
def register(t: AnyRef, name: String): ObjectInstance = register(t, beanClass(t), name)
def info(name: ObjectName): SBean = mbeanServer.getMBeanInfo(name)
def bean(name: ObjectName): SBeanInfo = convBeanInfo(name, mbeanServer.getMBeanInfo(name))
def invoke(name: ObjectName, operationName: String, params: Array[Object], signature: Array[String]): Object =
mbeanServer.invoke(name, operationName, params, signature)
def call(name: ObjectName, operationName: String): Object = invoke(name, operationName, Array[Object](), Array[String]())
def get(name: ObjectName, attribute: String) = mbeanServer.getAttribute(name, attribute)
def set(name: ObjectName, attribute: String, value: Object) = mbeanServer.setAttribute(name, new Attribute(attribute, value))
implicit def instanceToName(oi: ObjectInstance) = oi.getObjectName()
implicit def stringToName(name: String) = ObjectName.getInstance(name)
implicit def convBean(bi: MBeanInfo):SBean = SBean(bi.getClassName(), bi.getDescription(), bi.getAttributes(), bi.getNotifications(), bi.getOperations(), bi.getConstructors())
implicit def seqToArr(seq: Seq[AnyRef]): Array[Object] = seq.toArray
def convBeanInfo(name: ObjectName, bi: MBeanInfo):SBeanInfo = new SBeanInfo(name, bi.getClassName(), bi.getDescription(), bi.getAttributes(), bi.getNotifications(), bi.getOperations(), bi.getConstructors())
implicit def convAttrs(attrs: Array[MBeanAttributeInfo]): Seq[SAttr] =
for (val a <- attrs) yield a
implicit def convParams(params: Array[MBeanParameterInfo]): Seq[SParameter] =
for (val p <- params) yield p
implicit def convNotes(notes: Array[MBeanNotificationInfo]): Seq[SNotification] =
for (val p <- notes) yield p
implicit def convCons(cons: Array[MBeanConstructorInfo]): Seq[SConstructor] =
for (val p <- cons) yield p
implicit def convOps(cons: Array[MBeanOperationInfo]): Seq[SOperation] =
for (val p <- cons) yield p
implicit def convAttr(attr: MBeanAttributeInfo) = SAttr(attr.getName(), attr.getDescription(), attr.getType(), attr.isIs(), attr.isReadable(), attr.isWritable())
implicit def convNote(note: MBeanNotificationInfo) = SNotification(note.getName(), note.getDescription(), note.getNotifTypes())
implicit def convOp(op: MBeanOperationInfo):SOperation = SOperation(op.getName(), op.getDescription(), op.getImpact(), op.getReturnType(), op.getSignature())
implicit def convCon(con: MBeanConstructorInfo):SConstructor = SConstructor(con getName, con getDescription, con getSignature)
implicit def convParam(p: MBeanParameterInfo) = SParameter(p getName, p getDescription, p getType)
private def beanClass(t: AnyRef) = Class.forName(t.getClass().getName() + "MBean")
}
class MBean(mbeanInterface: String) extends StandardMBean(Class.forName(mbeanInterface))
abstract class SFeature(val name: String, val description: String)
case class SBean(className: String, description: String,
attrs: Seq[SAttr], notes: Seq[SNotification],
ops: Seq[SOperation], cons: Seq[SConstructor]) {
def writable = attrs.toList.filter(sa => sa.writable)
}
class SBeanInfo(name: ObjectName, className: String, description: String,
attrs: Seq[SAttr], notes: Seq[SNotification],
ops: Seq[SOperation], cons: Seq[SConstructor])
extends SBean(className, description, attrs, notes, ops, cons) {
def get(attribute: String) = SJMX.get(name, attribute)
def set(attribute: String, value: Object) = SJMX.set(name, attribute, value)
def call(opName: String) = SJMX.call(name, opName)
}
case class SAttr(
override val name: String,
override val description: String,
jmxType: String, isIs: boolean, readable: boolean, writable: boolean
) extends SFeature(name, description)
case class SNotification(
override val name: String,
override val description: String,
notifTypes: Array[String]) extends SFeature(name, description)
case class SOperation(
override val name: String,
override val description: String,
impact: int,
returnType: String,
signature: Seq[SParameter]) extends SFeature(name, description)
case class SParameter(
override val name: String,
override val description: String,
jmxType: String) extends SFeature(name, description)
case class SConstructor(
override val name: String,
override val description: String,
signature: Seq[SParameter]) extends SFeature(name, description)
*/
/*
package com.soletta.spipe;
import javax.management.{StandardMBean,ObjectName,MBeanInfo};
class SPipe extends MBean("com.soletta.spipe.SPipeMBean") with SPipeMBean {
import Console.println;
import SJMX._;
private var desc: String = "Yipe!";
def go = {
val oname: ObjectName = "default:name=SPipe";
val instance = SJMX.registerBean(this, oname);
set(oname, "Factor", "Hello!");
println(get(oname, "Factor"));
val SBean(n, d, Seq(_, a2, a3, _*), _, ops, _) = info(oname);
println("Bean name is " + n + ", description is " + d);
println("Second attribute is " + a2);
println("Third attribute is " + a3);
println("Writable attributes are " + info(oname).writable);
println("Ops: " + ops);
val x =
<bean name={n} description={d}>
{ops.toList.map(o => <operation name={o.name} description={o.description}/>)}
</bean> ;
println(x);
val inf = bean(oname);
inf.call("start");
println(inf.get("Factor"));
}
def getName = "SPipe!";
def setDescription(d: String) = desc = d;
override def getDescription() = desc;
def getFactor = desc;
def setFactor(s: String) = desc = s;
def isHappy = true;
override def getDescription(info: MBeanInfo) = desc;
}
object PipeMain {
def main(args: Array[String]): unit = {
(new SPipe) go;
}
}
trait SPipeMBean {
def getName: String;
def getDescription: String = getName;
def setDescription(d: String): unit;
def getFactor: String;
def setFactor(s: String): unit;
def isHappy: boolean;
def start() = { Console.println("Starting"); }
def stop() = { }
*/

View file

@ -12,6 +12,7 @@ import kernel.actor.{Exit, Actor}
import kernel.reactor.{DefaultCompletableFutureResult, CompletableFutureResult}
import serialization.{Serializer, Serializable, SerializationProtocol}
import kernel.util.Logging
import kernel.management.Management
import org.jboss.netty.bootstrap.ClientBootstrap
import org.jboss.netty.channel._
@ -21,6 +22,8 @@ import org.jboss.netty.handler.codec.protobuf.{ProtobufDecoder, ProtobufEncoder}
import scala.collection.mutable.HashMap
import com.twitter.service.Stats
/**
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
@ -44,6 +47,10 @@ object RemoteClient extends Logging {
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
class RemoteClient(hostname: String, port: Int) extends Logging {
val name = "RemoteClient@" + hostname
val NR_OF_BYTES_SENT = Stats.getCounter("NrOfBytesSent_" + name)
val NR_OF_MESSAGES_SENT = Stats.getCounter("NrOfMessagesSent_" + name)
@volatile private var isRunning = false
private val futures = new ConcurrentHashMap[Long, CompletableFutureResult]
private val supervisors = new ConcurrentHashMap[String, Actor]
@ -55,7 +62,7 @@ class RemoteClient(hostname: String, port: Int) extends Logging {
private val bootstrap = new ClientBootstrap(channelFactory)
bootstrap.setPipelineFactory(new RemoteClientPipelineFactory(futures, supervisors))
bootstrap.setPipelineFactory(new RemoteClientPipelineFactory(name, futures, supervisors))
bootstrap.setOption("tcpNoDelay", true)
bootstrap.setOption("keepAlive", true)
@ -84,6 +91,10 @@ class RemoteClient(hostname: String, port: Int) extends Logging {
}
def send(request: RemoteRequest): Option[CompletableFutureResult] = if (isRunning) {
if (Management.RECORD_STATS) {
NR_OF_BYTES_SENT.incr(request.getSerializedSize)
NR_OF_MESSAGES_SENT.incr
}
if (request.getIsOneWay) {
connection.getChannel.write(request)
None
@ -111,15 +122,16 @@ class RemoteClient(hostname: String, port: Int) extends Logging {
/**
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
class RemoteClientPipelineFactory(futures: ConcurrentMap[Long, CompletableFutureResult],
supervisors: ConcurrentMap[String, Actor]) extends ChannelPipelineFactory {
class RemoteClientPipelineFactory(name: String,
futures: ConcurrentMap[Long, CompletableFutureResult],
supervisors: ConcurrentMap[String, Actor]) extends ChannelPipelineFactory {
def getPipeline: ChannelPipeline = {
val p = Channels.pipeline()
p.addLast("frameDecoder", new LengthFieldBasedFrameDecoder(1048576, 0, 4, 0, 4));
p.addLast("protobufDecoder", new ProtobufDecoder(RemoteReply.getDefaultInstance));
p.addLast("frameEncoder", new LengthFieldPrepender(4));
p.addLast("protobufEncoder", new ProtobufEncoder());
p.addLast("handler", new RemoteClientHandler(futures, supervisors))
p.addLast("handler", new RemoteClientHandler(name, futures, supervisors))
p
}
}
@ -128,10 +140,14 @@ class RemoteClientPipelineFactory(futures: ConcurrentMap[Long, CompletableFuture
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
@ChannelPipelineCoverage { val value = "all" }
class RemoteClientHandler(val futures: ConcurrentMap[Long, CompletableFutureResult],
class RemoteClientHandler(val name: String,
val futures: ConcurrentMap[Long, CompletableFutureResult],
val supervisors: ConcurrentMap[String, Actor])
extends SimpleChannelUpstreamHandler with Logging {
val NR_OF_BYTES_RECEIVED = Stats.getCounter("NrOfBytesReceived_" + name)
val NR_OF_MESSAGES_RECEIVED = Stats.getCounter("NrOfMessagesReceived_" + name)
override def handleUpstream(ctx: ChannelHandlerContext, event: ChannelEvent) = {
if (event.isInstanceOf[ChannelStateEvent] && event.asInstanceOf[ChannelStateEvent].getState != ChannelState.INTEREST_OPS) {
log.debug(event.toString)
@ -144,6 +160,10 @@ class RemoteClientHandler(val futures: ConcurrentMap[Long, CompletableFutureResu
val result = event.getMessage
if (result.isInstanceOf[RemoteReply]) {
val reply = result.asInstanceOf[RemoteReply]
if (Management.RECORD_STATS) {
NR_OF_MESSAGES_RECEIVED.incr
NR_OF_BYTES_RECEIVED.incr(reply.getSerializedSize)
}
log.debug("Received RemoteReply[\n%s]", reply.toString)
val future = futures.get(reply.getId)
if (reply.getIsSuccessful) {
@ -159,7 +179,7 @@ class RemoteClientHandler(val futures: ConcurrentMap[Long, CompletableFutureResu
}
future.completeWithException(null, parseException(reply))
}
futures.remove(reply.getId)
futures.remove(reply.getId)
} else throw new IllegalArgumentException("Unknown message received in remote client handler: " + result)
} catch {
case e: Exception =>

View file

@ -13,6 +13,7 @@ import kernel.util._
import protobuf.RemoteProtocol
import protobuf.RemoteProtocol.{RemoteReply, RemoteRequest}
import serialization.{Serializer, Serializable, SerializationProtocol}
import kernel.management.Management
import org.jboss.netty.bootstrap.ServerBootstrap
import org.jboss.netty.channel._
@ -20,22 +21,28 @@ import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory
import org.jboss.netty.handler.codec.frame.{LengthFieldBasedFrameDecoder, LengthFieldPrepender}
import org.jboss.netty.handler.codec.protobuf.{ProtobufDecoder, ProtobufEncoder}
import com.twitter.service.Stats
/**
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
class RemoteServer extends Logging {
def start = RemoteServer.start
def start = RemoteServer.start(None)
}
/**
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
object RemoteServer extends Logging {
val HOSTNAME = kernel.Kernel.config.getString("akka.remote.hostname", "localhost")
val PORT = kernel.Kernel.config.getInt("akka.remote.port", 9999)
val CONNECTION_TIMEOUT_MILLIS = kernel.Kernel.config.getInt("akka.remote.connection-timeout", 1000)
import kernel.Kernel.config
val HOSTNAME = config.getString("akka.remote.hostname", "localhost")
val PORT = config.getInt("akka.remote.port", 9999)
val CONNECTION_TIMEOUT_MILLIS = config.getInt("akka.remote.connection-timeout", 1000)
val name = "RemoteServer@" + HOSTNAME
@volatile private var isRunning = false
@volatile private var isConfigured = false
private val factory = new NioServerSocketChannelFactory(
Executors.newCachedThreadPool,
@ -44,18 +51,15 @@ object RemoteServer extends Logging {
private val activeObjectFactory = new ActiveObjectFactory
private val bootstrap = new ServerBootstrap(factory)
// FIXME provide different codecs (Thrift, Avro, Protobuf, JSON)
private val handler = new RemoteServerHandler
bootstrap.setPipelineFactory(new RemoteServerPipelineFactory)
bootstrap.setOption("child.tcpNoDelay", true)
bootstrap.setOption("child.keepAlive", true)
bootstrap.setOption("child.reuseAddress", true)
bootstrap.setOption("child.connectTimeoutMillis", CONNECTION_TIMEOUT_MILLIS)
def start = synchronized {
def start(loader: Option[ClassLoader]) = synchronized {
if (!isRunning) {
log.info("Starting remote server at [%s:%s]", HOSTNAME, PORT)
bootstrap.setPipelineFactory(new RemoteServerPipelineFactory(name, loader))
bootstrap.setOption("child.tcpNoDelay", true)
bootstrap.setOption("child.keepAlive", true)
bootstrap.setOption("child.reuseAddress", true)
bootstrap.setOption("child.connectTimeoutMillis", CONNECTION_TIMEOUT_MILLIS)
bootstrap.bind(new InetSocketAddress(HOSTNAME, PORT))
isRunning = true
}
@ -65,14 +69,14 @@ object RemoteServer extends Logging {
/**
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
class RemoteServerPipelineFactory extends ChannelPipelineFactory {
class RemoteServerPipelineFactory(name: String, loader: Option[ClassLoader]) extends ChannelPipelineFactory {
def getPipeline: ChannelPipeline = {
val p = Channels.pipeline()
p.addLast("frameDecoder", new LengthFieldBasedFrameDecoder(1048576, 0, 4, 0, 4))
p.addLast("protobufDecoder", new ProtobufDecoder(RemoteProtocol.RemoteRequest.getDefaultInstance))
p.addLast("frameEncoder", new LengthFieldPrepender(4))
p.addLast("protobufEncoder", new ProtobufEncoder)
p.addLast("handler", new RemoteServerHandler)
p.addLast("handler", new RemoteServerHandler(name, loader))
p
}
}
@ -81,7 +85,12 @@ class RemoteServerPipelineFactory extends ChannelPipelineFactory {
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
@ChannelPipelineCoverage { val value = "all" }
class RemoteServerHandler extends SimpleChannelUpstreamHandler with Logging {
class RemoteServerHandler(val name: String, val applicationLoader: Option[ClassLoader]) extends SimpleChannelUpstreamHandler with Logging {
val NR_OF_BYTES_SENT = Stats.getCounter("NrOfBytesSent_" + name)
val NR_OF_BYTES_RECEIVED = Stats.getCounter("NrOfBytesReceived_" + name)
val NR_OF_MESSAGES_SENT = Stats.getCounter("NrOfMessagesSent_" + name)
val NR_OF_MESSAGES_RECEIVED = Stats.getCounter("NrOfMessagesReceived_" + name)
private val activeObjectFactory = new ActiveObjectFactory
private val activeObjects = new ConcurrentHashMap[String, AnyRef]
private val actors = new ConcurrentHashMap[String, Actor]
@ -106,6 +115,10 @@ class RemoteServerHandler extends SimpleChannelUpstreamHandler with Logging {
}
private def handleRemoteRequest(request: RemoteRequest, channel: Channel) = {
if (Management.RECORD_STATS) {
NR_OF_MESSAGES_RECEIVED.incr
NR_OF_BYTES_RECEIVED.incr(request.getSerializedSize)
}
log.debug("Received RemoteRequest[\n%s]", request.toString)
if (request.getIsActor) dispatchToActor(request, channel)
else dispatchToActiveObject(request, channel)
@ -128,7 +141,12 @@ class RemoteServerHandler extends SimpleChannelUpstreamHandler with Logging {
.setIsActor(true)
RemoteProtocolBuilder.setMessage(result, replyBuilder)
if (request.hasSupervisorUuid) replyBuilder.setSupervisorUuid(request.getSupervisorUuid)
channel.write(replyBuilder.build)
val replyMessage = replyBuilder.build
channel.write(replyMessage)
if (Management.RECORD_STATS) {
NR_OF_MESSAGES_SENT.incr
NR_OF_BYTES_SENT.incr(replyMessage.getSerializedSize)
}
} catch {
case e: Throwable =>
log.error("Could not invoke remote actor [%s] due to: %s", request.getTarget, e)
@ -139,7 +157,12 @@ class RemoteServerHandler extends SimpleChannelUpstreamHandler with Logging {
.setIsSuccessful(false)
.setIsActor(true)
if (request.hasSupervisorUuid) replyBuilder.setSupervisorUuid(request.getSupervisorUuid)
channel.write(replyBuilder.build)
val replyMessage = replyBuilder.build
channel.write(replyMessage)
if (Management.RECORD_STATS) {
NR_OF_MESSAGES_SENT.incr
NR_OF_BYTES_SENT.incr(replyMessage.getSerializedSize)
}
}
}
}
@ -165,7 +188,12 @@ class RemoteServerHandler extends SimpleChannelUpstreamHandler with Logging {
.setIsActor(false)
RemoteProtocolBuilder.setMessage(result, replyBuilder)
if (request.hasSupervisorUuid) replyBuilder.setSupervisorUuid(request.getSupervisorUuid)
channel.write(replyBuilder.build)
val replyMessage = replyBuilder.build
channel.write(replyMessage)
if (Management.RECORD_STATS) {
NR_OF_MESSAGES_SENT.incr
NR_OF_BYTES_SENT.incr(replyMessage.getSerializedSize)
}
}
} catch {
case e: InvocationTargetException =>
@ -176,8 +204,13 @@ class RemoteServerHandler extends SimpleChannelUpstreamHandler with Logging {
.setException(e.getCause.getClass.getName + "$" + e.getCause.getMessage)
.setIsSuccessful(false)
.setIsActor(false)
if (request.hasSupervisorUuid) replyBuilder.setSupervisorUuid(request.getSupervisorUuid)
channel.write(replyBuilder.build)
if (request.hasSupervisorUuid) replyBuilder.setSupervisorUuid(request.getSupervisorUuid)
val replyMessage = replyBuilder.build
channel.write(replyMessage)
if (Management.RECORD_STATS) {
NR_OF_MESSAGES_SENT.incr
NR_OF_BYTES_SENT.incr(replyMessage.getSerializedSize)
}
case e: Throwable =>
log.error("Could not invoke remote active object [%s :: %s] due to: %s", request.getMethod, request.getTarget, e)
e.printStackTrace
@ -186,8 +219,13 @@ class RemoteServerHandler extends SimpleChannelUpstreamHandler with Logging {
.setException(e.getClass.getName + "$" + e.getMessage)
.setIsSuccessful(false)
.setIsActor(false)
if (request.hasSupervisorUuid) replyBuilder.setSupervisorUuid(request.getSupervisorUuid)
channel.write(replyBuilder.build)
if (request.hasSupervisorUuid) replyBuilder.setSupervisorUuid(request.getSupervisorUuid)
val replyMessage = replyBuilder.build
channel.write(replyMessage)
if (Management.RECORD_STATS) {
NR_OF_MESSAGES_SENT.incr
NR_OF_BYTES_SENT.incr(replyMessage.getSerializedSize)
}
}
}
@ -223,8 +261,9 @@ class RemoteServerHandler extends SimpleChannelUpstreamHandler with Logging {
private def createActiveObject(name: String, timeout: Long): AnyRef = {
val activeObjectOrNull = activeObjects.get(name)
if (activeObjectOrNull == null) {
val clazz = Class.forName(name)
try {
val clazz = if (applicationLoader.isDefined) applicationLoader.get.loadClass(name)
else Class.forName(name)
val newInstance = activeObjectFactory.newInstance(clazz, timeout).asInstanceOf[AnyRef]
activeObjects.put(name, newInstance)
newInstance
@ -240,8 +279,9 @@ class RemoteServerHandler extends SimpleChannelUpstreamHandler with Logging {
private def createActor(name: String, timeout: Long): Actor = {
val actorOrNull = actors.get(name)
if (actorOrNull == null) {
val clazz = Class.forName(name)
try {
val clazz = if (applicationLoader.isDefined) applicationLoader.get.loadClass(name)
else Class.forName(name)
val newInstance = clazz.newInstance.asInstanceOf[Actor]
newInstance.timeout = timeout
actors.put(name, newInstance)

View file

@ -49,17 +49,17 @@ class DispatcherFactory {
* Creates an event based dispatcher serving multiple (millions) of actors through a thread pool.
* Has a fluent builder interface for configuring its semantics.
*/
def newEventBasedThreadPoolDispatcher = new EventBasedThreadPoolDispatcher
def newConcurrentEventBasedThreadPoolDispatcher = new EventBasedThreadPoolDispatcher(true)
def newEventBasedThreadPoolDispatcher(name: String) = new EventBasedThreadPoolDispatcher(name)
def newConcurrentEventBasedThreadPoolDispatcher(name: String) = new EventBasedThreadPoolDispatcher(name, true)
/**
* Creates an event based dispatcher serving multiple (millions) of actors through a single thread.
*/
def newEventBasedSingleThreadDispatcher = new EventBasedSingleThreadDispatcher
def newEventBasedSingleThreadDispatcher(name: String) = new EventBasedSingleThreadDispatcher(name)
/**
* Creates an thread based dispatcher serving a single actor through the same single thread.
* E.g. each actor consumes its own thread.
*/
def newThreadBasedDispatcher(actor: Actor) = new ThreadBasedDispatcher(actor)
}
}

View file

@ -10,9 +10,14 @@
*/
package se.scalablesolutions.akka.kernel.reactor
import kernel.management.Management
import java.util.{LinkedList, Queue, List}
class EventBasedSingleThreadDispatcher extends MessageDispatcherBase {
import com.twitter.service.Stats
class EventBasedSingleThreadDispatcher(name: String) extends MessageDispatcherBase(name) {
val NR_OF_PROCESSED_MESSAGES = Stats.getCounter("NrOfProcessedMessage_" + name)
def start = if (!active) {
active = true
val messageDemultiplexer = new EventBasedSingleThreadDemultiplexer(queue)
@ -22,12 +27,14 @@ class EventBasedSingleThreadDispatcher extends MessageDispatcherBase {
try {
messageDemultiplexer.select
} catch { case e: InterruptedException => active = false }
val selectedInvocations = messageDemultiplexer.acquireSelectedInvocations.iterator
while (selectedInvocations.hasNext) {
val invocation = selectedInvocations.next
val selectedInvocations = messageDemultiplexer.acquireSelectedInvocations
if (Management.RECORD_STATS) NR_OF_PROCESSED_MESSAGES.incr(selectedInvocations.size)
val iter = selectedInvocations.iterator
while (iter.hasNext) {
val invocation = iter.next
val invoker = messageHandlers.get(invocation.sender)
if (invoker != null) invoker.invoke(invocation)
selectedInvocations.remove
iter.remove
}
}
}

View file

@ -4,12 +4,16 @@
package se.scalablesolutions.akka.kernel.reactor
import kernel.management.{Management, ThreadPoolMBean}
import java.util.concurrent._
import locks.ReentrantLock
import atomic.{AtomicLong, AtomicInteger}
import ThreadPoolExecutor.CallerRunsPolicy
import java.util.{Collection, HashSet, HashMap, LinkedList, List}
import com.twitter.service.Stats
/**
* Implements the Reactor pattern as defined in: [http://www.cs.wustl.edu/~schmidt/PDF/reactor-siemens.pdf].<br/>
* See also this article: [http://today.java.net/cs/user/print/a/350].
@ -56,16 +60,17 @@ import java.util.{Collection, HashSet, HashMap, LinkedList, List}
*
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
class EventBasedThreadPoolDispatcher(private val concurrentMode: Boolean) extends MessageDispatcherBase {
def this() = this(false)
class EventBasedThreadPoolDispatcher(name: String, private val concurrentMode: Boolean) extends MessageDispatcherBase(name) {
def this(name: String) = this(name, false)
val NR_OF_PROCESSED_MESSAGES = Stats.getCounter("NrOfProcessedMessages_" + name)
private val NR_START_THREADS = 16
private val NR_MAX_THREADS = 128
private val KEEP_ALIVE_TIME = 60000L // default is one minute
private var inProcessOfBuilding = false
private var executor: ExecutorService = _
private var threadPoolBuilder: ThreadPoolExecutor = _
private val threadFactory = new MonitorableThreadFactory("akka")
private val threadFactory = new MonitorableThreadFactory("akka:" + name)
private var boundedExecutorBound = -1
private val busyInvokers = new HashSet[AnyRef]
@ -74,6 +79,7 @@ class EventBasedThreadPoolDispatcher(private val concurrentMode: Boolean) extend
def start = if (!active) {
active = true
Management.registerMBean(new ThreadPoolMBean(threadPoolBuilder), "ThreadPool_" + name)
/**
* This dispatcher code is based on code from the actorom actor framework by Sergio Bossa [http://code.google.com/p/actorom/].
@ -89,6 +95,7 @@ class EventBasedThreadPoolDispatcher(private val concurrentMode: Boolean) extend
} catch { case e: InterruptedException => active = false }
val selectedInvocations = messageDemultiplexer.acquireSelectedInvocations
val reservedInvocations = reserve(selectedInvocations)
if (Management.RECORD_STATS) NR_OF_PROCESSED_MESSAGES.incr(reservedInvocations.size)
val it = reservedInvocations.entrySet.iterator
while (it.hasNext) {
val entry = it.next
@ -157,6 +164,7 @@ class EventBasedThreadPoolDispatcher(private val concurrentMode: Boolean) extend
ensureNotActive
verifyNotInConstructionPhase
inProcessOfBuilding = false
blockingQueue = queue
threadPoolBuilder = new ThreadPoolExecutor(NR_START_THREADS, NR_MAX_THREADS, KEEP_ALIVE_TIME, MILLISECONDS, queue)
this
}
@ -169,7 +177,8 @@ class EventBasedThreadPoolDispatcher(private val concurrentMode: Boolean) extend
def withNewThreadPoolWithBoundedBlockingQueue(bound: Int): EventBasedThreadPoolDispatcher = synchronized {
ensureNotActive
verifyNotInConstructionPhase
threadPoolBuilder = new ThreadPoolExecutor(NR_START_THREADS, NR_MAX_THREADS, KEEP_ALIVE_TIME, MILLISECONDS, new LinkedBlockingQueue[Runnable], threadFactory)
blockingQueue = new LinkedBlockingQueue[Runnable]
threadPoolBuilder = new ThreadPoolExecutor(NR_START_THREADS, NR_MAX_THREADS, KEEP_ALIVE_TIME, MILLISECONDS, blockingQueue, threadFactory)
boundedExecutorBound = bound
this
}
@ -177,28 +186,32 @@ class EventBasedThreadPoolDispatcher(private val concurrentMode: Boolean) extend
def withNewThreadPoolWithLinkedBlockingQueueWithCapacity(capacity: Int): EventBasedThreadPoolDispatcher = synchronized {
ensureNotActive
verifyNotInConstructionPhase
threadPoolBuilder = new ThreadPoolExecutor(NR_START_THREADS, NR_MAX_THREADS, KEEP_ALIVE_TIME, MILLISECONDS, new LinkedBlockingQueue[Runnable](capacity), threadFactory, new CallerRunsPolicy)
blockingQueue = new LinkedBlockingQueue[Runnable](capacity)
threadPoolBuilder = new ThreadPoolExecutor(NR_START_THREADS, NR_MAX_THREADS, KEEP_ALIVE_TIME, MILLISECONDS, blockingQueue, threadFactory, new CallerRunsPolicy)
this
}
def withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity: EventBasedThreadPoolDispatcher = synchronized {
ensureNotActive
verifyNotInConstructionPhase
threadPoolBuilder = new ThreadPoolExecutor(NR_START_THREADS, NR_MAX_THREADS, KEEP_ALIVE_TIME, MILLISECONDS, new LinkedBlockingQueue[Runnable], threadFactory, new CallerRunsPolicy)
blockingQueue = new LinkedBlockingQueue[Runnable]
threadPoolBuilder = new ThreadPoolExecutor(NR_START_THREADS, NR_MAX_THREADS, KEEP_ALIVE_TIME, MILLISECONDS, blockingQueue, threadFactory, new CallerRunsPolicy)
this
}
def withNewThreadPoolWithSynchronousQueueWithFairness(fair: Boolean): EventBasedThreadPoolDispatcher = synchronized {
ensureNotActive
verifyNotInConstructionPhase
threadPoolBuilder = new ThreadPoolExecutor(NR_START_THREADS, NR_MAX_THREADS, KEEP_ALIVE_TIME, MILLISECONDS, new SynchronousQueue[Runnable](fair), threadFactory, new CallerRunsPolicy)
blockingQueue = new SynchronousQueue[Runnable](fair)
threadPoolBuilder = new ThreadPoolExecutor(NR_START_THREADS, NR_MAX_THREADS, KEEP_ALIVE_TIME, MILLISECONDS, blockingQueue, threadFactory, new CallerRunsPolicy)
this
}
def withNewThreadPoolWithArrayBlockingQueueWithCapacityAndFairness(capacity: Int, fair: Boolean): EventBasedThreadPoolDispatcher = synchronized {
ensureNotActive
verifyNotInConstructionPhase
threadPoolBuilder = new ThreadPoolExecutor(NR_START_THREADS, NR_MAX_THREADS, KEEP_ALIVE_TIME, MILLISECONDS, new ArrayBlockingQueue[Runnable](capacity, fair), threadFactory, new CallerRunsPolicy)
blockingQueue = new ArrayBlockingQueue[Runnable](capacity, fair)
threadPoolBuilder = new ThreadPoolExecutor(NR_START_THREADS, NR_MAX_THREADS, KEEP_ALIVE_TIME, MILLISECONDS, blockingQueue, threadFactory, new CallerRunsPolicy)
this
}
@ -311,13 +324,7 @@ class BoundedExecutorDecorator(val executor: ExecutorService, bound: Int) extend
def invokeAll[T](callables: Collection[_ <: Callable[T]]) = executor.invokeAll(callables)
def invokeAll[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = executor.invokeAll(callables, l, timeUnit)
def invokeAny[T](callables: Collection[_ <: Callable[T]]) = executor.invokeAny(callables)
def invokeAny[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = executor.invokeAny(callables, l, timeUnit)
/*
def invokeAll[T](callables: Collection[Callable[T]]) = executor.invokeAll(callables)
def invokeAll[T](callables: Collection[Callable[T]], l: Long, timeUnit: TimeUnit) = executor.invokeAll(callables, l, timeUnit)
def invokeAny[T](callables: Collection[Callable[T]]) = executor.invokeAny(callables)
def invokeAny[T](callables: Collection[Callable[T]], l: Long, timeUnit: TimeUnit) = executor.invokeAny(callables, l, timeUnit)
*/
def invokeAny[T](callables: Collection[_ <: Callable[T]], l: Long, timeUnit: TimeUnit) = executor.invokeAny(callables, l, timeUnit)
}
/**

View file

@ -4,20 +4,31 @@
package se.scalablesolutions.akka.kernel.reactor
import kernel.management.Management
import java.util.{LinkedList, Queue, List}
import java.util.concurrent.TimeUnit
import java.util.concurrent.{TimeUnit, BlockingQueue}
import java.util.HashMap
trait MessageDispatcherBase extends MessageDispatcher {
import com.twitter.service.Stats
abstract class MessageDispatcherBase(val name: String) extends MessageDispatcher {
//val CONCURRENT_MODE = kernel.Kernel.config.getBool("akka.actor.concurrent-mode", false)
val MILLISECONDS = TimeUnit.MILLISECONDS
val queue = new ReactiveMessageQueue
val queue = new ReactiveMessageQueue(name)
var blockingQueue: BlockingQueue[Runnable] = _
@volatile protected var active: Boolean = false
protected val messageHandlers = new HashMap[AnyRef, MessageInvoker]
protected var selectorThread: Thread = _
protected val guard = new Object
if (Management.RECORD_STATS) {
Stats.makeGauge("SizeOfBlockingQueue_" + name) {
guard.synchronized { blockingQueue.size.toDouble }
}
}
def messageQueue = queue
def registerHandler(key: AnyRef, handler: MessageInvoker) = guard.synchronized {
@ -40,10 +51,16 @@ trait MessageDispatcherBase extends MessageDispatcher {
protected def doShutdown = {}
}
class ReactiveMessageQueue extends MessageQueue {
class ReactiveMessageQueue(name: String) extends MessageQueue {
private[kernel] val queue: Queue[MessageInvocation] = new LinkedList[MessageInvocation]
@volatile private var interrupted = false
if (Management.RECORD_STATS) {
Stats.makeGauge("SizeOfReactiveQueue_" + name) {
queue.synchronized { queue.size.toDouble }
}
}
def append(handle: MessageInvocation) = queue.synchronized {
queue.offer(handle)
queue.notifyAll
@ -64,4 +81,4 @@ class ReactiveMessageQueue extends MessageQueue {
interrupted = true
queue.notifyAll
}
}
}

View file

@ -4,18 +4,24 @@
package se.scalablesolutions.akka.kernel.reactor
import com.twitter.service.Stats
import java.util.concurrent.LinkedBlockingQueue
import java.util.Queue
import kernel.actor.{Actor, ActorMessageInvoker}
import kernel.management.Management
/**
* Dedicates a unique thread for each actor passed in as reference. Served through its messageQueue.
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
class ThreadBasedDispatcher private[kernel] (val messageHandler: MessageInvoker) extends MessageDispatcher {
def this(actor: Actor) = this(new ActorMessageInvoker(actor))
class ThreadBasedDispatcher private[kernel] (val name: String, val messageHandler: MessageInvoker) extends MessageDispatcher {
def this(actor: Actor) = this(actor.getClass.getName, new ActorMessageInvoker(actor))
private val queue = new BlockingMessageQueue
val NR_OF_PROCESSED_MESSAGES = Stats.getCounter("NrOfProcessedMessages_" + name)
private val queue = new BlockingMessageQueue(name)
private var selectorThread: Thread = _
@volatile private var active: Boolean = false
@ -27,6 +33,7 @@ class ThreadBasedDispatcher private[kernel] (val messageHandler: MessageInvoker)
override def run = {
while (active) {
try {
if (Management.RECORD_STATS) NR_OF_PROCESSED_MESSAGES.incr
messageHandler.invoke(queue.take)
} catch { case e: InterruptedException => active = false }
}
@ -44,7 +51,13 @@ class ThreadBasedDispatcher private[kernel] (val messageHandler: MessageInvoker)
def unregisterHandler(key: AnyRef) = throw new UnsupportedOperationException
}
class BlockingMessageQueue extends MessageQueue {
class BlockingMessageQueue(name: String) extends MessageQueue {
if (Management.RECORD_STATS) {
Stats.makeGauge("SizeOfBlockingQueue_" + name) {
queue.size.toDouble
}
}
// FIXME: configure the LBQ
private val queue = new LinkedBlockingQueue[MessageInvocation]
def append(handle: MessageInvocation) = queue.put(handle)
@ -52,4 +65,4 @@ class BlockingMessageQueue extends MessageQueue {
def take: MessageInvocation = queue.take
def read(destination: Queue[MessageInvocation]) = throw new UnsupportedOperationException
def interrupt = throw new UnsupportedOperationException
}
}

View file

@ -77,7 +77,6 @@ object Serializer {
message.toBuilder().mergeFrom(bytes).build
}
// For Java
def in(bytes: Array[Byte], clazz: Class[_]): AnyRef = {
if (clazz == null) throw new IllegalArgumentException("Protobuf message can't be null")
in(bytes, Some(clazz))

View file

@ -0,0 +1,240 @@
/**
* Copyright (C) 2009 Scalable Solutions.
*/
package se.scalablesolutions.akka.kernel.state
import java.io.{Flushable, Closeable}
import util.Logging
import util.Helpers._
import serialization.Serializer
import kernel.Kernel.config
import org.apache.cassandra.db.ColumnFamily
import org.apache.cassandra.service._
import org.apache.thrift.transport._
import org.apache.thrift.protocol._
/**
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
trait CassandraSession extends Closeable with Flushable {
import scala.collection.jcl.Conversions._
import org.scala_tools.javautils.Imports._
import java.util.{Map => JMap}
protected val client: Cassandra.Client
protected val keyspace: String
val obtainedAt: Long
val consistencyLevel: Int
val schema: JMap[String, JMap[String, String]]
/**
* Count is always the max number of results to return.
So it means, starting with `start`, or the first one if start is
empty, go until you hit `finish` or `count`, whichever comes first.
Empty is not a legal column name so if finish is empty it is ignored
and only count is used.
We don't offer a numeric offset since that can't be supported
efficiently with a log-structured merge disk format.
*/
def /(key: String, columnParent: ColumnParent, start: Array[Byte], end: Array[Byte], ascending: Boolean, count: Int): List[Column] =
/(key, columnParent, start, end, ascending, count, consistencyLevel)
def /(key: String, columnParent: ColumnParent, start: Array[Byte], end: Array[Byte], ascending: Boolean, count: Int, consistencyLevel: Int): List[Column] =
client.get_slice(keyspace, key, columnParent, start, end, ascending, count, consistencyLevel).toList
def /(key: String, columnParent: ColumnParent, colNames: List[Array[Byte]]): List[Column] =
/(key, columnParent, colNames, consistencyLevel)
def /(key: String, columnParent: ColumnParent, colNames: List[Array[Byte]], consistencyLevel: Int): List[Column] =
client.get_slice_by_names(keyspace, key, columnParent, colNames.asJava, consistencyLevel).toList
def |(key: String, colPath: ColumnPath): Option[Column] =
|(key, colPath, consistencyLevel)
def |(key: String, colPath: ColumnPath, consistencyLevel: Int): Option[Column] =
client.get_column(keyspace, key, colPath, consistencyLevel)
def |#(key: String, columnParent: ColumnParent): Int =
|#(key, columnParent, consistencyLevel)
def |#(key: String, columnParent: ColumnParent, consistencyLevel: Int): Int =
client.get_column_count(keyspace, key, columnParent, consistencyLevel)
def ++|(key: String, colPath: ColumnPath, value: Array[Byte]): Unit =
++|(key, colPath, value, obtainedAt, consistencyLevel)
def ++|(key: String, colPath: ColumnPath, value: Array[Byte], timestamp: Long): Unit =
++|(key, colPath, value, timestamp, consistencyLevel)
def ++|(key: String, colPath: ColumnPath, value: Array[Byte], timestamp: Long, consistencyLevel: Int) =
client.insert(keyspace, key, colPath, value, timestamp, consistencyLevel)
def ++|(batch: BatchMutation): Unit =
++|(batch, consistencyLevel)
def ++|(batch: BatchMutation, consistencyLevel: Int): Unit =
client.batch_insert(keyspace, batch, consistencyLevel)
def --(key: String, columnPathOrParent: ColumnPathOrParent, timestamp: Long): Unit =
--(key, columnPathOrParent, timestamp, consistencyLevel)
def --(key: String, columnPathOrParent: ColumnPathOrParent, timestamp: Long, consistencyLevel: Int): Unit =
client.remove(keyspace, key, columnPathOrParent, timestamp, consistencyLevel)
def /^(key: String, columnFamily: String, start: Array[Byte], end: Array[Byte], ascending: Boolean, count: Int): List[SuperColumn] =
/^(key, columnFamily, start, end, ascending, count, consistencyLevel)
def /^(key: String, columnFamily: String, start: Array[Byte], end: Array[Byte], ascending: Boolean, count: Int, consistencyLevel: Int): List[SuperColumn] =
client.get_slice_super(keyspace, key, columnFamily, start, end, ascending, count, consistencyLevel).toList
def /^(key: String, columnFamily: String, superColNames: List[Array[Byte]]): List[SuperColumn] =
/^(key, columnFamily, superColNames, consistencyLevel)
def /^(key: String, columnFamily: String, superColNames: List[Array[Byte]], consistencyLevel: Int): List[SuperColumn] =
client.get_slice_super_by_names(keyspace, key, columnFamily, superColNames.asJava, consistencyLevel).toList
def |^(key: String, superColumnPath: SuperColumnPath): Option[SuperColumn] =
|^(key, superColumnPath, consistencyLevel)
def |^(key: String, superColumnPath: SuperColumnPath, consistencyLevel: Int): Option[SuperColumn] =
client.get_super_column(keyspace, key, superColumnPath, consistencyLevel)
def ++|^(batch: BatchMutationSuper): Unit =
++|^(batch, consistencyLevel)
def ++|^(batch: BatchMutationSuper, consistencyLevel: Int): Unit =
client.batch_insert_super_column(keyspace, batch, consistencyLevel)
def getRange(key: String, columnParent: ColumnParent, start: Array[Byte], end: Array[Byte], ascending: Boolean, count: Int): List[Column] =
getRange(key, columnParent, start, end, ascending, count, consistencyLevel)
def getRange(key: String, columnParent: ColumnParent, start: Array[Byte], end: Array[Byte], ascending: Boolean, count: Int, consistencyLevel: Int): List[Column] =
client.get_slice(keyspace, key, columnParent, start, end, ascending, count, consistencyLevel).toList
def getRange(key: String, columnParent: ColumnParent, colNames: List[Array[Byte]]): List[Column] =
getRange(key, columnParent, colNames, consistencyLevel)
def getRange(key: String, columnParent: ColumnParent, colNames: List[Array[Byte]], consistencyLevel: Int): List[Column] =
client.get_slice_by_names(keyspace, key, columnParent, colNames.asJava, consistencyLevel).toList
def getColumn(key: String, colPath: ColumnPath): Option[Column] =
getColumn(key, colPath, consistencyLevel)
def getColumn(key: String, colPath: ColumnPath, consistencyLevel: Int): Option[Column] =
client.get_column(keyspace, key, colPath, consistencyLevel)
def getColumnCount(key: String, columnParent: ColumnParent): Int =
getColumnCount(key, columnParent, consistencyLevel)
def getColumnCount(key: String, columnParent: ColumnParent, consistencyLevel: Int): Int =
client.get_column_count(keyspace, key, columnParent, consistencyLevel)
def insertColumn(key: String, colPath: ColumnPath, value: Array[Byte]): Unit =
insertColumn(key, colPath, value, obtainedAt, consistencyLevel)
def insertColumn(key: String, colPath: ColumnPath, value: Array[Byte], timestamp: Long): Unit =
insertColumn(key, colPath, value, timestamp, consistencyLevel)
def insertColumn(key: String, colPath: ColumnPath, value: Array[Byte], timestamp: Long, consistencyLevel: Int) =
client.insert(keyspace, key, colPath, value, timestamp, consistencyLevel)
def insertColumn(batch: BatchMutation): Unit =
insertColumn(batch, consistencyLevel)
def insertColumn(batch: BatchMutation, consistencyLevel: Int): Unit =
client.batch_insert(keyspace, batch, consistencyLevel)
def removeColumn(key: String, columnPathOrParent: ColumnPathOrParent, timestamp: Long): Unit =
removeColumn(key, columnPathOrParent, timestamp, consistencyLevel)
def removeColumn(key: String, columnPathOrParent: ColumnPathOrParent, timestamp: Long, consistencyLevel: Int): Unit =
client.remove(keyspace, key, columnPathOrParent, timestamp, consistencyLevel)
def getSuperRange(key: String, columnFamily: String, start: Array[Byte], end: Array[Byte], ascending: Boolean, count: Int): List[SuperColumn] =
getSuperRange(key, columnFamily, start, end, ascending, count, consistencyLevel)
def getSuperRange(key: String, columnFamily: String, start: Array[Byte], end: Array[Byte], ascending: Boolean, count: Int, consistencyLevel: Int): List[SuperColumn] =
client.get_slice_super(keyspace, key, columnFamily, start, end, ascending, count, consistencyLevel).toList
def getSuperRange(key: String, columnFamily: String, superColNames: List[Array[Byte]]): List[SuperColumn] =
getSuperRange(key, columnFamily, superColNames, consistencyLevel)
def getSuperRange(key: String, columnFamily: String, superColNames: List[Array[Byte]], consistencyLevel: Int): List[SuperColumn] =
client.get_slice_super_by_names(keyspace, key, columnFamily, superColNames.asJava, consistencyLevel).toList
def getSuperColumn(key: String, superColumnPath: SuperColumnPath): Option[SuperColumn] =
getSuperColumn(key, superColumnPath, consistencyLevel)
def getSuperColumn(key: String, superColumnPath: SuperColumnPath, consistencyLevel: Int): Option[SuperColumn] =
client.get_super_column(keyspace, key, superColumnPath, consistencyLevel)
def insertSuperColumn(batch: BatchMutationSuper): Unit =
insertSuperColumn(batch, consistencyLevel)
def insertSuperColumn(batch: BatchMutationSuper, consistencyLevel: Int): Unit =
client.batch_insert_super_column(keyspace, batch, consistencyLevel)
def keys(columnFamily: String, startsWith: String, stopsAt: String, maxResults: Option[Int]): List[String] =
client.get_key_range(keyspace, columnFamily, startsWith, stopsAt, maxResults.getOrElse(-1)).toList
}
class CassandraSessionPool[T <: TTransport](
space: String,
transportPool: Pool[T],
inputProtocol: Protocol,
outputProtocol: Protocol,
consistency: Int) extends Closeable with Logging {
def this(space: String, transportPool: Pool[T], ioProtocol: Protocol, consistency: Int) =
this (space, transportPool, ioProtocol, ioProtocol, consistency)
def newSession: CassandraSession = newSession(consistency)
def newSession(consistencyLevel: Int): CassandraSession = {
val socket = transportPool.borrowObject
val cassandraClient = new Cassandra.Client(inputProtocol(socket), outputProtocol(socket))
val cassandraSchema = cassandraClient.describe_keyspace(space)
new CassandraSession {
val keyspace = space
val client = cassandraClient
val obtainedAt = System.currentTimeMillis
val consistencyLevel = consistency
val schema = cassandraSchema
log.debug("Creating %s", toString)
def flush = socket.flush
def close = transportPool.returnObject(socket)
override def toString = "[CassandraSession]\n\tkeyspace = " + keyspace + "\n\tschema = " + schema
}
}
def withSession[T](body: CassandraSession => T) = {
val session = newSession(consistency)
try {
val result = body(session)
session.flush
result
} finally {
session.close
}
}
def close = transportPool.close
}
sealed abstract class Protocol(val factory: TProtocolFactory) {
def apply(transport: TTransport) = factory.getProtocol(transport)
}
object Protocol {
object Binary extends Protocol(new TBinaryProtocol.Factory)
object SimpleJSON extends Protocol(new TSimpleJSONProtocol.Factory)
object JSON extends Protocol(new TJSONProtocol.Factory)
}

View file

@ -4,42 +4,46 @@
package se.scalablesolutions.akka.kernel.state
import java.io.File
import java.io.{Flushable, Closeable}
import kernel.util.Logging
import serialization.{Serializer, Serializable, SerializationProtocol}
import util.Logging
import util.Helpers._
import serialization.Serializer
import kernel.Kernel.config
import org.apache.cassandra.config.DatabaseDescriptor
import org.apache.cassandra.db.ColumnFamily
import org.apache.cassandra.service._
import org.apache.thrift.server.TThreadPoolServer
import org.apache.thrift.protocol.TBinaryProtocol
import org.apache.thrift.transport.TServerSocket
import org.apache.thrift.transport.TTransportFactory
import org.apache.thrift.TProcessorFactory
import org.apache.thrift.transport._
import org.apache.thrift.protocol._
/**
* NOTE: requires command line options:
* <br/>
* <code>-Dcassandra -Dstorage-config=config/ -Dpidfile=akka.pid</code>
* <p/>
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
object CassandraStorage extends Logging {
val TABLE_NAME = "akka"
val MAP_COLUMN_FAMILY = "map"
val VECTOR_COLUMN_FAMILY = "vector"
val REF_COLUMN_FAMILY = "ref:item"
object CassandraStorage extends MapStorage with VectorStorage with Logging {
val KEYSPACE = "akka"
val MAP_COLUMN_PARENT = new ColumnParent("map", null)
val VECTOR_COLUMN_PARENT = new ColumnParent("vector", null)
val REF_COLUMN_PARENT = new ColumnParent("ref", null)
val REF_KEY = "item".getBytes("UTF-8")
val CASSANDRA_SERVER_HOSTNAME = config.getString("akka.storage.cassandra.hostname", "127.0.0.1")
val CASSANDRA_SERVER_PORT = config.getInt("akka.storage.cassandra.port", 9160)
val CONSISTENCY_LEVEL = config.getInt("akka.storage.cassandra.consistency-level", 1)
val IS_ASCENDING = true
val RUN_THRIFT_SERVICE = kernel.Kernel.config.getBool("akka.storage.cassandra.thrift-server.service", false)
val BLOCKING_CALL = {
if (kernel.Kernel.config.getBool("akka.storage.cassandra.blocking", true)) 0
else 1
}
@volatile private[this] var isRunning = false
private[this] val protocol: Protocol = Protocol.Binary
/* {
config.getString("akka.storage.cassandra.procotol", "binary") match {
case "binary" => Protocol.Binary
case "json" => Protocol.JSON
case "simple-json" => Protocol.SimpleJSON
case unknown => throw new UnsupportedOperationException("Unknown storage serialization protocol [" + unknown + "]")
}
}
*/
private[this] val serializer: Serializer = {
kernel.Kernel.config.getString("akka.storage.cassandra.storage-format", "java") match {
case "scala-json" => Serializer.ScalaJSON
@ -51,193 +55,403 @@ object CassandraStorage extends Logging {
case unknown => throw new UnsupportedOperationException("Unknown storage serialization protocol [" + unknown + "]")
}
}
// TODO: is this server thread-safe or needed to be wrapped up in an actor?
private[this] val server = classOf[CassandraServer].newInstance.asInstanceOf[CassandraServer]
private[this] var thriftServer: CassandraThriftServer = _
private[this] var sessions: Option[CassandraSessionPool[_]] = None
def start = synchronized {
if (!isRunning) {
try {
server.start
sessions = Some(new CassandraSessionPool(
KEYSPACE,
StackPool(SocketProvider(CASSANDRA_SERVER_HOSTNAME, CASSANDRA_SERVER_PORT)),
protocol,
CONSISTENCY_LEVEL))
log.info("Cassandra persistent storage has started up successfully");
} catch {
case e =>
log.error("Could not start up Cassandra persistent storage")
throw e
}
if (RUN_THRIFT_SERVICE) {
thriftServer = new CassandraThriftServer(server)
thriftServer.start
}
isRunning
}
}
def stop = if (isRunning) {
//server.storageService.shutdown
if (RUN_THRIFT_SERVICE) thriftServer.stop
def stop = synchronized {
if (isRunning && sessions.isDefined) sessions.get.close
}
// ===============================================================
// For Ref
// ===============================================================
def insertRefStorageFor(name: String, element: AnyRef) = {
server.insert(
TABLE_NAME,
name,
REF_COLUMN_FAMILY,
serializer.out(element),
System.currentTimeMillis,
BLOCKING_CALL)
}
def getRefStorageFor(name: String): Option[AnyRef] = {
try {
val column = server.get_column(TABLE_NAME, name, REF_COLUMN_FAMILY)
Some(serializer.in(column.value, None))
} catch {
case e =>
e.printStackTrace
None
def insertRefStorageFor(name: String, element: AnyRef) = if (sessions.isDefined) {
sessions.get.withSession {
_ ++| (name,
new ColumnPath(REF_COLUMN_PARENT.getColumn_family, null, REF_KEY),
serializer.out(element),
System.currentTimeMillis,
CONSISTENCY_LEVEL)
}
}
} else throw new IllegalStateException("CassandraStorage is not started")
// ===============================================================
// For Vector
// ===============================================================
def insertVectorStorageEntryFor(name: String, element: AnyRef) = {
server.insert(
TABLE_NAME,
name,
VECTOR_COLUMN_FAMILY + ":" + getVectorStorageSizeFor(name),
serializer.out(element),
System.currentTimeMillis,
BLOCKING_CALL)
}
def getVectorStorageEntryFor(name: String, index: Int): AnyRef = {
def getRefStorageFor(name: String): Option[AnyRef] = if (sessions.isDefined) {
try {
val column = server.get_column(TABLE_NAME, name, VECTOR_COLUMN_FAMILY + ":" + index)
serializer.in(column.value, None)
} catch {
case e =>
e.printStackTrace
throw new Predef.NoSuchElementException(e.getMessage)
}
}
def getVectorStorageRangeFor(name: String, start: Int, count: Int): List[AnyRef] =
server.get_slice(TABLE_NAME, name, VECTOR_COLUMN_FAMILY, IS_ASCENDING, count)
.toArray.toList.asInstanceOf[List[Tuple2[String, AnyRef]]].map(tuple => tuple._2)
def getVectorStorageSizeFor(name: String): Int =
server.get_column_count(TABLE_NAME, name, VECTOR_COLUMN_FAMILY)
// ===============================================================
// For Map
// ===============================================================
def insertMapStorageEntryFor(name: String, key: String, value: AnyRef) = {
server.insert(
TABLE_NAME,
name,
MAP_COLUMN_FAMILY + ":" + key,
serializer.out(value),
System.currentTimeMillis,
BLOCKING_CALL)
}
def insertMapStorageEntriesFor(name: String, entries: List[Tuple2[String, AnyRef]]) = {
import java.util.{Map, HashMap, List, ArrayList}
val columns: Map[String, List[column_t]] = new HashMap
for (entry <- entries) {
val cls: List[column_t] = new ArrayList
cls.add(new column_t(entry._1, serializer.out(entry._2), System.currentTimeMillis))
columns.put(MAP_COLUMN_FAMILY, cls)
}
server.batch_insert(new batch_mutation_t(
TABLE_NAME,
name,
columns),
BLOCKING_CALL)
}
def getMapStorageEntryFor(name: String, key: AnyRef): Option[AnyRef] = {
try {
val column = server.get_column(TABLE_NAME, name, MAP_COLUMN_FAMILY + ":" + key)
Some(serializer.in(column.value, None))
val column: Option[Column] = sessions.get.withSession {
_ | (name, new ColumnPath(REF_COLUMN_PARENT.getColumn_family, null, REF_KEY))
}
if (column.isDefined) Some(serializer.in(column.get.value, None))
else None
} catch {
case e =>
e.printStackTrace
None
}
} else throw new IllegalStateException("CassandraStorage is not started")
// ===============================================================
// For Vector
// ===============================================================
override def insertVectorStorageEntryFor(name: String, element: AnyRef) = if (sessions.isDefined) {
sessions.get.withSession {
_ ++| (name,
new ColumnPath(VECTOR_COLUMN_PARENT.getColumn_family, null, intToBytes(getVectorStorageSizeFor(name))),
serializer.out(element),
System.currentTimeMillis,
CONSISTENCY_LEVEL)
}
} else throw new IllegalStateException("CassandraStorage is not started")
override def insertVectorStorageEntriesFor(name: String, elements: List[AnyRef]) = {
}
def getMapStorageFor(name: String): List[Tuple2[String, AnyRef]] = {
val columns = server.get_columns_since(TABLE_NAME, name, MAP_COLUMN_FAMILY, -1)
override def getVectorStorageEntryFor(name: String, index: Int): AnyRef = if (sessions.isDefined) {
val column: Option[Column] = sessions.get.withSession {
_ | (name, new ColumnPath(VECTOR_COLUMN_PARENT.getColumn_family, null, intToBytes(index)))
}
if (column.isDefined) serializer.in(column.get.value, None)
else throw new NoSuchElementException("No element for vector [" + name + "] and index [" + index + "]")
} else throw new IllegalStateException("CassandraStorage is not started")
override def getVectorStorageRangeFor(name: String, start: Option[Int], finish: Option[Int], count: Int): List[AnyRef] = if (sessions.isDefined) {
val startBytes = if (start.isDefined) intToBytes(start.get) else null
val finishBytes = if (finish.isDefined) intToBytes(finish.get) else null
val columns: List[Column] = sessions.get.withSession {
_ / (name,
VECTOR_COLUMN_PARENT,
startBytes, finishBytes,
IS_ASCENDING,
count,
CONSISTENCY_LEVEL)
}
columns.map(column => serializer.in(column.value, None))
} else throw new IllegalStateException("CassandraStorage is not started")
override def getVectorStorageSizeFor(name: String): Int = if (sessions.isDefined) {
sessions.get.withSession {
_ |# (name, VECTOR_COLUMN_PARENT)
}
} else throw new IllegalStateException("CassandraStorage is not started")
// ===============================================================
// For Map
// ===============================================================
override def insertMapStorageEntryFor(name: String, key: AnyRef, element: AnyRef) = if (sessions.isDefined) {
sessions.get.withSession {
_ ++| (name,
new ColumnPath(MAP_COLUMN_PARENT.getColumn_family, null, serializer.out(key)),
serializer.out(element),
System.currentTimeMillis,
CONSISTENCY_LEVEL)
}
} else throw new IllegalStateException("CassandraStorage is not started")
override def insertMapStorageEntriesFor(name: String, entries: List[Tuple2[AnyRef, AnyRef]]) = if (sessions.isDefined) {
val cf2columns: java.util.Map[String, java.util.List[Column]] = new java.util.HashMap
for (entry <- entries) {
val columns: java.util.List[Column] = new java.util.ArrayList
columns.add(new Column(serializer.out(entry._1), serializer.out(entry._2), System.currentTimeMillis))
cf2columns.put(MAP_COLUMN_PARENT.getColumn_family, columns)
}
sessions.get.withSession {
_ ++| (new BatchMutation(name, cf2columns), CONSISTENCY_LEVEL)
}
} else throw new IllegalStateException("CassandraStorage is not started")
override def getMapStorageEntryFor(name: String, key: AnyRef): Option[AnyRef] = if (sessions.isDefined) {
try {
val column: Option[Column] = sessions.get.withSession {
_ | (name, new ColumnPath(MAP_COLUMN_PARENT.getColumn_family, null, serializer.out(key)))
}
if (column.isDefined) Some(serializer.in(column.get.value, None))
else None
} catch {
case e =>
e.printStackTrace
None
}
} else throw new IllegalStateException("CassandraStorage is not started")
override def getMapStorageFor(name: String): List[Tuple2[AnyRef, AnyRef]] = if (sessions.isDefined) {
throw new UnsupportedOperationException
/*
val columns = server.get_columns_since(name, MAP_COLUMN_FAMILY, -1)
.toArray.toList.asInstanceOf[List[org.apache.cassandra.service.column_t]]
for {
column <- columns
col = (column.columnName, serializer.in(column.value, None))
col = (column.columnName, column.value)
} yield col
}
def getMapStorageSizeFor(name: String): Int =
server.get_column_count(TABLE_NAME, name, MAP_COLUMN_FAMILY)
*/
} else throw new IllegalStateException("CassandraStorage is not started")
def removeMapStorageFor(name: String) =
server.remove(TABLE_NAME, name, MAP_COLUMN_FAMILY, System.currentTimeMillis, BLOCKING_CALL)
def getMapStorageRangeFor(name: String, start: Int, count: Int): List[Tuple2[String, AnyRef]] = {
server.get_slice(TABLE_NAME, name, MAP_COLUMN_FAMILY, IS_ASCENDING, count)
.toArray.toList.asInstanceOf[List[Tuple2[String, AnyRef]]]
}
}
class CassandraThriftServer(server: CassandraServer) extends Logging {
case object Start
case object Stop
private[this] val serverEngine: TThreadPoolServer = try {
val pidFile = kernel.Kernel.config.getString("akka.storage.cassandra.thrift-server.pidfile", "akka.pid")
if (pidFile != null) new File(pidFile).deleteOnExit();
val listenPort = DatabaseDescriptor.getThriftPort
val processor = new Cassandra.Processor(server)
val tServerSocket = new TServerSocket(listenPort)
val tProtocolFactory = new TBinaryProtocol.Factory
val options = new TThreadPoolServer.Options
options.minWorkerThreads = 64
new TThreadPoolServer(new TProcessorFactory(processor),
tServerSocket,
new TTransportFactory,
new TTransportFactory,
tProtocolFactory,
tProtocolFactory,
options)
} catch {
case e =>
log.error("Could not start up Cassandra thrift service")
throw e
}
import scala.actors.Actor._
private[this] val serverDaemon = actor {
receive {
case Start =>
serverEngine.serve
log.info("Cassandra thrift service has starting up successfully")
case Stop =>
log.info("Cassandra thrift service is shutting down...")
serverEngine.stop
override def getMapStorageSizeFor(name: String): Int = if (sessions.isDefined) {
sessions.get.withSession {
_ |# (name, MAP_COLUMN_PARENT)
}
}
} else throw new IllegalStateException("CassandraStorage is not started")
def start = serverDaemon ! Start
def stop = serverDaemon ! Stop
override def removeMapStorageFor(name: String): Unit = removeMapStorageFor(name, null)
override def removeMapStorageFor(name: String, key: AnyRef): Unit = if (sessions.isDefined) {
val keyBytes = if (key == null) null else serializer.out(key)
sessions.get.withSession {
_ -- (name,
new ColumnPathOrParent(MAP_COLUMN_PARENT.getColumn_family, null, keyBytes),
System.currentTimeMillis,
CONSISTENCY_LEVEL)
}
} else throw new IllegalStateException("CassandraStorage is not started")
override def getMapStorageRangeFor(name: String, start: Option[AnyRef], finish: Option[AnyRef], count: Int):
List[Tuple2[AnyRef, AnyRef]] = if (sessions.isDefined) {
val startBytes = if (start.isDefined) serializer.out(start.get) else null
val finishBytes = if (finish.isDefined) serializer.out(finish.get) else null
val columns: List[Column] = sessions.get.withSession {
_ / (name, MAP_COLUMN_PARENT, startBytes, finishBytes, IS_ASCENDING, count, CONSISTENCY_LEVEL)
}
columns.map(column => (column.name, serializer.in(column.value, None)))
} else throw new IllegalStateException("CassandraStorage is not started")
}
/**
* NOTE: requires command line options:
* <br/>
* <code>-Dcassandra -Dstorage-config=config/ -Dpidfile=akka.pid</code>
* <p/>
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*
object EmbeddedCassandraStorage extends Logging {
val KEYSPACE = "akka"
val MAP_COLUMN_FAMILY = "map"
val VECTOR_COLUMN_FAMILY = "vector"
val REF_COLUMN_FAMILY = "ref:item"
val IS_ASCENDING = true
val RUN_THRIFT_SERVICE = kernel.Kernel.config.getBool("akka.storage.cassandra.thrift-server.service", false)
val CONSISTENCY_LEVEL = {
if (kernel.Kernel.config.getBool("akka.storage.cassandra.blocking", true)) 0
else 1 }
@volatile private[this] var isRunning = false
private[this] val serializer: Serializer = {
kernel.Kernel.config.getString("akka.storage.cassandra.storage-format", "java") match {
case "scala-json" => Serializer.ScalaJSON
case "java-json" => Serializer.JavaJSON
case "protobuf" => Serializer.Protobuf
case "java" => Serializer.Java
case "sbinary" => throw new UnsupportedOperationException("SBinary serialization protocol is not yet supported for storage")
case "avro" => throw new UnsupportedOperationException("Avro serialization protocol is not yet supported for storage")
case unknown => throw new UnsupportedOperationException("Unknown storage serialization protocol [" + unknown + "]")
}
}
// TODO: is this server thread-safe or needed to be wrapped up in an actor?
private[this] val server = classOf[CassandraServer].newInstance.asInstanceOf[CassandraServer]
private[this] var thriftServer: CassandraThriftServer = _
def start = synchronized {
if (!isRunning) {
try {
server.start
log.info("Cassandra persistent storage has started up successfully");
} catch {
case e =>
log.error("Could not start up Cassandra persistent storage")
throw e
}
if (RUN_THRIFT_SERVICE) {
thriftServer = new CassandraThriftServer(server)
thriftServer.start
}
isRunning
}
}
def stop = if (isRunning) {
//server.storageService.shutdown
if (RUN_THRIFT_SERVICE) thriftServer.stop
}
// ===============================================================
// For Ref
// ===============================================================
def insertRefStorageFor(name: String, element: AnyRef) = {
server.insert(
KEYSPACE,
name,
REF_COLUMN_FAMILY,
element,
System.currentTimeMillis,
CONSISTENCY_LEVEL)
}
def getRefStorageFor(name: String): Option[AnyRef] = {
try {
val column = server.get_column(KEYSPACE, name, REF_COLUMN_FAMILY)
Some(serializer.in(column.value, None))
} catch {
case e =>
e.printStackTrace
None }
}
// ===============================================================
// For Vector
// ===============================================================
def insertVectorStorageEntryFor(name: String, element: AnyRef) = {
server.insert(
KEYSPACE,
name,
VECTOR_COLUMN_FAMILY + ":" + getVectorStorageSizeFor(name),
element,
System.currentTimeMillis,
CONSISTENCY_LEVEL)
}
def getVectorStorageEntryFor(name: String, index: Int): AnyRef = {
try {
val column = server.get_column(KEYSPACE, name, VECTOR_COLUMN_FAMILY + ":" + index)
serializer.in(column.value, None)
} catch {
case e =>
e.printStackTrace
throw new Predef.NoSuchElementException(e.getMessage)
}
}
def getVectorStorageRangeFor(name: String, start: Int, count: Int): List[AnyRef] =
server.get_slice(KEYSPACE, name, VECTOR_COLUMN_FAMILY, IS_ASCENDING, count)
.toArray.toList.asInstanceOf[List[Tuple2[String, AnyRef]]].map(tuple => tuple._2)
def getVectorStorageSizeFor(name: String): Int =
server.get_column_count(KEYSPACE, name, VECTOR_COLUMN_FAMILY)
// ===============================================================
// For Map
// ===============================================================
def insertMapStorageEntryFor(name: String, key: String, value: AnyRef) = {
server.insert(
KEYSPACE, name,
MAP_COLUMN_FAMILY + ":" + key,
serializer.out(value),
System.currentTimeMillis,
CONSISTENCY_LEVEL)
}
def insertMapStorageEntriesFor(name: String, entries: List[Tuple2[String, AnyRef]]) = {
import java.util.{ Map, HashMap, List, ArrayList }
val columns: Map[String, List[column_t]] = new HashMap
for (entry <- entries) {
val cls: List[column_t] = new ArrayList
cls.add(new column_t(entry._1, serializer.out(entry._2), System.currentTimeMillis))
columns.put(MAP_COLUMN_FAMILY, cls)
}
server.batch_insert(new BatchMutation(
KEYSPACE, name,
columns),
CONSISTENCY_LEVEL)
}
def getMapStorageEntryFor(name: String, key: AnyRef): Option[AnyRef] = {
try {
val column = server.get_column(KEYSPACE, name, MAP_COLUMN_FAMILY + ":" + key)
Some(serializer.in(column.value, None))
} catch {
case e =>
e.printStackTrace
None
}
}
def getMapStorageFor(name: String): List[Tuple2[String, AnyRef]] = {
val columns = server.get_columns_since(KEYSPACE, name, MAP_COLUMN_FAMILY, -1)
.toArray.toList.asInstanceOf[List[org.apache.cassandra.service.column_t]]
for {
column <- columns
col = (column.columnName, serializer.in(column.value, None))
} yield col
}
def getMapStorageSizeFor(name: String): Int =
server.get_column_count(KEYSPACE, name, MAP_COLUMN_FAMILY)
def removeMapStorageFor(name: String) =
server.remove(KEYSPACE, name, MAP_COLUMN_FAMILY, System.currentTimeMillis, CONSISTENCY_LEVEL)
def getMapStorageRangeFor(name: String, start: Int, count: Int): List[Tuple2[String, AnyRef]] = {
server.get_slice(KEYSPACE, name, MAP_COLUMN_FAMILY, IS_ASCENDING, count)
.toArray.toList.asInstanceOf[List[Tuple2[String, AnyRef]]]
}
}
class CassandraThriftServer(server: CassandraServer) extends Logging {
case object Start
case object Stop
private[this] val serverEngine: TThreadPoolServer = try {
val pidFile = kernel.Kernel.config.getString("akka.storage.cassandra.thrift-server.pidfile", "akka.pid")
if (pidFile != null) new File(pidFile).deleteOnExit();
val listenPort = DatabaseDescriptor.getThriftPort
val processor = new Cassandra.Processor(server)
val tServerSocket = new TServerSocket(listenPort)
val tProtocolFactory = new TBinaryProtocol.Factory
val options = new TThreadPoolServer.Options
options.minWorkerThreads = 64
new TThreadPoolServer(new TProcessorFactory(processor),
tServerSocket,
new TTransportFactory,
new TTransportFactory,
tProtocolFactory,
tProtocolFactory,
options)
} catch {
case e =>
log.error("Could not start up Cassandra thrift service")
throw e
}
import scala.actors.Actor._
private[this] val serverDaemon = actor {
receive {
case Start =>
serverEngine.serve
log.info("Cassandra thrift service has starting up successfully")
case Stop =>
log.info("Cassandra thrift service is shutting down...")
serverEngine.stop
}
}
def start = serverDaemon ! Start
def stop = serverDaemon ! Stop
}
*/

View file

@ -0,0 +1,258 @@
package se.scalablesolutions.akka.kernel.state
import com.mongodb._
import se.scalablesolutions.akka.kernel.util.Logging
import serialization.{Serializer}
import kernel.Kernel.config
import java.util.{Map=>JMap, List=>JList, ArrayList=>JArrayList}
object MongoStorage extends MapStorage
with VectorStorage with Logging {
// enrich with null safe findOne
class RichDBCollection(value: DBCollection) {
def findOneNS(o: DBObject): Option[DBObject] = {
value.findOne(o) match {
case null => None
case x => Some(x)
}
}
}
implicit def enrichDBCollection(c: DBCollection) = new RichDBCollection(c)
val KEY = "key"
val VALUE = "value"
val COLLECTION = "akka_coll"
val MONGODB_SERVER_HOSTNAME =
config.getString("akka.storage.mongodb.hostname", "127.0.0.1")
val MONGODB_SERVER_DBNAME =
config.getString("akka.storage.mongodb.dbname", "testdb")
val MONGODB_SERVER_PORT =
config.getInt("akka.storage.mongodb.port", 27017)
val db = new Mongo(MONGODB_SERVER_HOSTNAME,
MONGODB_SERVER_PORT, MONGODB_SERVER_DBNAME)
val coll = db.getCollection(COLLECTION)
// @fixme: make this pluggable
private[this] val serializer: Serializer = Serializer.ScalaJSON
override def insertMapStorageEntryFor(name: String,
key: AnyRef, value: AnyRef) {
insertMapStorageEntriesFor(name, List((key, value)))
}
override def insertMapStorageEntriesFor(name: String,
entries: List[Tuple2[AnyRef, AnyRef]]) {
import java.util.{Map, HashMap}
val m: Map[AnyRef, AnyRef] = new HashMap
for ((k, v) <- entries) {
m.put(k, serializer.out(v))
}
nullSafeFindOne(name) match {
case None =>
coll.insert(new BasicDBObject().append(KEY, name).append(VALUE, m))
case Some(dbo) => {
// collate the maps
val o = dbo.get(VALUE).asInstanceOf[Map[AnyRef, AnyRef]]
o.putAll(m)
// remove existing reference
removeMapStorageFor(name)
// and insert
coll.insert(new BasicDBObject().append(KEY, name).append(VALUE, o))
}
}
}
override def removeMapStorageFor(name: String) = {
val q = new BasicDBObject
q.put(KEY, name)
coll.remove(q)
}
override def removeMapStorageFor(name: String, key: AnyRef) = {
nullSafeFindOne(name) match {
case None =>
case Some(dbo) => {
val orig = dbo.get(VALUE).asInstanceOf[DBObject].toMap
orig.remove(key.asInstanceOf[String])
// remove existing reference
removeMapStorageFor(name)
// and insert
coll.insert(new BasicDBObject().append(KEY, name).append(VALUE, orig))
}
}
}
override def getMapStorageEntryFor(name: String,
key: AnyRef): Option[AnyRef] = {
getValueForKey(name, key.asInstanceOf[String])
}
override def getMapStorageSizeFor(name: String): Int = {
nullSafeFindOne(name) match {
case None => 0
case Some(dbo) =>
dbo.get(VALUE).asInstanceOf[JMap[String, AnyRef]].keySet.size
}
}
override def getMapStorageFor(name: String): List[Tuple2[AnyRef, AnyRef]] = {
val m =
nullSafeFindOne(name) match {
case None =>
throw new Predef.NoSuchElementException(name + " not present")
case Some(dbo) =>
dbo.get(VALUE).asInstanceOf[JMap[String, AnyRef]]
}
val n =
List(m.keySet.toArray: _*).asInstanceOf[List[String]]
val vals =
for(s <- n)
yield (s, serializer.in(m.get(s).asInstanceOf[Array[Byte]], None))
vals.asInstanceOf[List[Tuple2[String, AnyRef]]]
}
override def getMapStorageRangeFor(name: String, start: Option[AnyRef],
finish: Option[AnyRef],
count: Int): List[Tuple2[AnyRef, AnyRef]] = {
val m =
nullSafeFindOne(name) match {
case None =>
throw new Predef.NoSuchElementException(name + " not present")
case Some(dbo) =>
dbo.get(VALUE).asInstanceOf[JMap[String, AnyRef]]
}
/**
* <tt>count</tt> is the max number of results to return. Start with
* <tt>start</tt> or 0 (if <tt>start</tt> is not defined) and go until
* you hit <tt>finish</tt> or <tt>count</tt>.
*/
val s = if (start.isDefined) start.get.asInstanceOf[Int] else 0
val cnt =
if (finish.isDefined) {
val f = finish.get.asInstanceOf[Int]
if (f >= s) Math.min(count, (f - s)) else count
}
else count
val n =
List(m.keySet.toArray: _*).asInstanceOf[List[String]].sort((e1, e2) => (e1 compareTo e2) < 0).slice(s, s + cnt)
val vals =
for(s <- n)
yield (s, serializer.in(m.get(s).asInstanceOf[Array[Byte]], None))
vals.asInstanceOf[List[Tuple2[String, AnyRef]]]
}
private def getValueForKey(name: String, key: String): Option[AnyRef] = {
try {
nullSafeFindOne(name) match {
case None => None
case Some(dbo) =>
Some(serializer.in(
dbo.get(VALUE)
.asInstanceOf[JMap[String, AnyRef]]
.get(key).asInstanceOf[Array[Byte]], None))
}
} catch {
case e =>
throw new Predef.NoSuchElementException(e.getMessage)
}
}
override def insertVectorStorageEntriesFor(name: String, elements: List[AnyRef]) = {
val q = new BasicDBObject
q.put(KEY, name)
val currentList =
coll.findOneNS(q) match {
case None =>
new JArrayList[AnyRef]
case Some(dbo) =>
dbo.get(VALUE).asInstanceOf[JArrayList[AnyRef]]
}
if (!currentList.isEmpty) {
// record exists
// remove before adding
coll.remove(q)
}
// add to the current list
elements.map(serializer.out(_)).foreach(currentList.add(_))
coll.insert(
new BasicDBObject()
.append(KEY, name)
.append(VALUE, currentList)
)
}
override def insertVectorStorageEntryFor(name: String, element: AnyRef) = {
insertVectorStorageEntriesFor(name, List(element))
}
override def getVectorStorageEntryFor(name: String, index: Int): AnyRef = {
try {
val o =
nullSafeFindOne(name) match {
case None =>
throw new Predef.NoSuchElementException(name + " not present")
case Some(dbo) =>
dbo.get(VALUE).asInstanceOf[JList[AnyRef]]
}
serializer.in(
o.get(index).asInstanceOf[Array[Byte]],
None
)
} catch {
case e =>
throw new Predef.NoSuchElementException(e.getMessage)
}
}
override def getVectorStorageRangeFor(name: String,
start: Option[Int], finish: Option[Int], count: Int): List[AnyRef] = {
try {
val o =
nullSafeFindOne(name) match {
case None =>
throw new Predef.NoSuchElementException(name + " not present")
case Some(dbo) =>
dbo.get(VALUE).asInstanceOf[JList[AnyRef]]
}
// pick the subrange and make a Scala list
val l =
List(o.subList(start.get, start.get + count).toArray: _*)
for(e <- l)
yield serializer.in(e.asInstanceOf[Array[Byte]], None)
} catch {
case e =>
throw new Predef.NoSuchElementException(e.getMessage)
}
}
override def getVectorStorageSizeFor(name: String): Int = {
nullSafeFindOne(name) match {
case None => 0
case Some(dbo) =>
dbo.get(VALUE).asInstanceOf[JList[AnyRef]].size
}
}
private def nullSafeFindOne(name: String): Option[DBObject] = {
val o = new BasicDBObject
o.put(KEY, name)
coll.findOneNS(o)
}
}

View file

@ -0,0 +1,96 @@
/**
* Copyright (C) 2009 Scalable Solutions.
*/
package se.scalablesolutions.akka.kernel.state
import org.apache.commons.pool._
import org.apache.commons.pool.impl._
import org.apache.thrift.transport._
trait Pool[T] extends java.io.Closeable {
def borrowObject: T
def returnObject(t: T): Unit
def invalidateObject(t: T): Unit
def addObject: Unit
def getNumIdle: Int
def getNumActive: Int
def clear: Unit
def setFactory(factory: PoolItemFactory[T]): Unit
}
trait PoolFactory[T] {
def createPool: Pool[T]
}
trait PoolItemFactory[T] {
def makeObject: T
def destroyObject(t: T): Unit
def validateObject(t: T): Boolean
def activateObject(t: T): Unit
def passivateObject(t: T): Unit
}
trait PoolBridge[T, OP <: ObjectPool] extends Pool[T] {
val impl: OP
override def borrowObject: T = impl.borrowObject.asInstanceOf[T]
override def returnObject(t: T) = impl.returnObject(t)
override def invalidateObject(t: T) = impl.invalidateObject(t)
override def addObject = impl.addObject
override def getNumIdle: Int = impl.getNumIdle
override def getNumActive: Int = impl.getNumActive
override def clear: Unit = impl.clear
override def close: Unit = impl.close
override def setFactory(factory: PoolItemFactory[T]) = impl.setFactory(toPoolableObjectFactory(factory))
def toPoolableObjectFactory[T](pif: PoolItemFactory[T]) = new PoolableObjectFactory {
def makeObject: Object = pif.makeObject.asInstanceOf[Object]
def destroyObject(o: Object): Unit = pif.destroyObject(o.asInstanceOf[T])
def validateObject(o: Object): Boolean = pif.validateObject(o.asInstanceOf[T])
def activateObject(o: Object): Unit = pif.activateObject(o.asInstanceOf[T])
def passivateObject(o: Object): Unit = pif.passivateObject(o.asInstanceOf[T])
}
}
object StackPool {
def apply[T](factory: PoolItemFactory[T]) = new PoolBridge[T,StackObjectPool] {
val impl = new StackObjectPool(toPoolableObjectFactory(factory))
}
def apply[T](factory: PoolItemFactory[T], maxIdle: Int) = new PoolBridge[T,StackObjectPool] {
val impl = new StackObjectPool(toPoolableObjectFactory(factory),maxIdle)
}
def apply[T](factory: PoolItemFactory[T], maxIdle: Int, initIdleCapacity: Int) = new PoolBridge[T,StackObjectPool] {
val impl = new StackObjectPool(toPoolableObjectFactory(factory),maxIdle,initIdleCapacity)
}
}
object SoftRefPool {
def apply[T](factory: PoolItemFactory[T]) = new PoolBridge[T,SoftReferenceObjectPool] {
val impl = new SoftReferenceObjectPool(toPoolableObjectFactory(factory))
}
def apply[T](factory: PoolItemFactory[T], initSize: Int) = new PoolBridge[T,SoftReferenceObjectPool] {
val impl = new SoftReferenceObjectPool(toPoolableObjectFactory(factory),initSize)
}
}
trait TransportFactory[T <: TTransport] extends PoolItemFactory[T] {
def createTransport: T
def makeObject: T = createTransport
def destroyObject(transport: T): Unit = transport.close
def validateObject(transport: T) = transport.isOpen
def activateObject(transport: T): Unit = if( !transport.isOpen ) transport.open else ()
def passivateObject(transport: T): Unit = transport.flush
}
case class SocketProvider(val host: String, val port: Int) extends TransportFactory[TSocket] {
def createTransport = {
val t = new TSocket(host, port)
t.open
t
}
}

View file

@ -18,6 +18,7 @@ abstract class PersistentStorageConfig extends TransactionalStateConfig
case class CassandraStorageConfig extends PersistentStorageConfig
case class TerracottaStorageConfig extends PersistentStorageConfig
case class TokyoCabinetStorageConfig extends PersistentStorageConfig
case class MongoStorageConfig extends PersistentStorageConfig
/**
* Scala API.
@ -39,14 +40,16 @@ object TransactionalState extends TransactionalState
* </pre>
*/
class TransactionalState {
def newPersistentMap(config: PersistentStorageConfig): TransactionalMap[String, AnyRef] = config match {
def newPersistentMap(config: PersistentStorageConfig): TransactionalMap[AnyRef, AnyRef] = config match {
case CassandraStorageConfig() => new CassandraPersistentTransactionalMap
case MongoStorageConfig() => new MongoPersistentTransactionalMap
case TerracottaStorageConfig() => throw new UnsupportedOperationException
case TokyoCabinetStorageConfig() => throw new UnsupportedOperationException
}
def newPersistentVector(config: PersistentStorageConfig): TransactionalVector[AnyRef] = config match {
case CassandraStorageConfig() => new CassandraPersistentTransactionalVector
case MongoStorageConfig() => new MongoPersistentTransactionalVector
case TerracottaStorageConfig() => throw new UnsupportedOperationException
case TokyoCabinetStorageConfig() => throw new UnsupportedOperationException
}
@ -142,7 +145,7 @@ abstract class PersistentTransactionalMap[K, V] extends TransactionalMap[K, V] {
// FIXME: need to handle remove in another changeSet
protected[kernel] val changeSet = new HashMap[K, V]
def getRange(start: Int, count: Int)
def getRange(start: Option[AnyRef], count: Int)
def begin
def commit
@ -155,27 +158,38 @@ abstract class PersistentTransactionalMap[K, V] extends TransactionalMap[K, V] {
None // always return None to speed up writes (else need to go to DB to get
}
override def remove(key: K) = {
verifyTransaction
changeSet -= key
}
override def -=(key: K) = remove(key)
override def update(key: K, value: V) = put(key, value)
}
/**
* Implements a persistent transactional map based on the Cassandra distributed P2P key-value storage.
* Implementation of <tt>PersistentTransactionalMap</tt> for every concrete
* storage will have the same workflow. This abstracts the workflow.
*
* Subclasses just need to provide the actual concrete instance for the
* abstract val <tt>storage</tt>.
*
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
class CassandraPersistentTransactionalMap extends PersistentTransactionalMap[String, AnyRef] {
abstract class TemplatePersistentTransactionalMap extends PersistentTransactionalMap[AnyRef, AnyRef] {
override def getRange(start: Int, count: Int) = {
// to be concretized in subclasses
val storage: MapStorage
override def remove(key: AnyRef) = {
verifyTransaction
if (changeSet.contains(key)) changeSet -= key
else storage.removeMapStorageFor(uuid, key)
}
override def getRange(start: Option[AnyRef], count: Int) =
getRange(start, None, count)
def getRange(start: Option[AnyRef], finish: Option[AnyRef], count: Int) = {
verifyTransaction
try {
CassandraStorage.getMapStorageRangeFor(uuid, start, count)
storage.getMapStorageRangeFor(uuid, start, finish, count)
} catch {
case e: Exception => Nil
}
@ -183,7 +197,7 @@ class CassandraPersistentTransactionalMap extends PersistentTransactionalMap[Str
// ---- For Transactional ----
override def commit = {
CassandraStorage.insertMapStorageEntriesFor(uuid, changeSet.toList)
storage.insertMapStorageEntriesFor(uuid, changeSet.toList)
changeSet.clear
}
@ -191,16 +205,16 @@ class CassandraPersistentTransactionalMap extends PersistentTransactionalMap[Str
override def clear = {
verifyTransaction
try {
CassandraStorage.removeMapStorageFor(uuid)
storage.removeMapStorageFor(uuid)
} catch {
case e: Exception => {}
}
}
override def contains(key: String): Boolean = {
override def contains(key: AnyRef): Boolean = {
try {
verifyTransaction
CassandraStorage.getMapStorageEntryFor(uuid, key).isDefined
storage.getMapStorageEntryFor(uuid, key).isDefined
} catch {
case e: Exception => false
}
@ -209,19 +223,19 @@ class CassandraPersistentTransactionalMap extends PersistentTransactionalMap[Str
override def size: Int = {
verifyTransaction
try {
CassandraStorage.getMapStorageSizeFor(uuid)
storage.getMapStorageSizeFor(uuid)
} catch {
case e: Exception => 0
}
}
// ---- For scala.collection.mutable.Map ----
override def get(key: String): Option[AnyRef] = {
override def get(key: AnyRef): Option[AnyRef] = {
verifyTransaction
// if (changeSet.contains(key)) changeSet.get(key)
// else {
val result = try {
CassandraStorage.getMapStorageEntryFor(uuid, key)
storage.getMapStorageEntryFor(uuid, key)
} catch {
case e: Exception => None
}
@ -229,16 +243,16 @@ class CassandraPersistentTransactionalMap extends PersistentTransactionalMap[Str
//}
}
override def elements: Iterator[Tuple2[String, AnyRef]] = {
override def elements: Iterator[Tuple2[AnyRef, AnyRef]] = {
//verifyTransaction
new Iterator[Tuple2[String, AnyRef]] {
private val originalList: List[Tuple2[String, AnyRef]] = try {
CassandraStorage.getMapStorageFor(uuid)
new Iterator[Tuple2[AnyRef, AnyRef]] {
private val originalList: List[Tuple2[AnyRef, AnyRef]] = try {
storage.getMapStorageFor(uuid)
} catch {
case e: Throwable => Nil
}
private var elements = originalList.reverse
override def next: Tuple2[String, AnyRef]= synchronized {
override def next: Tuple2[AnyRef, AnyRef]= synchronized {
val element = elements.head
elements = elements.tail
element
@ -248,6 +262,25 @@ class CassandraPersistentTransactionalMap extends PersistentTransactionalMap[Str
}
}
/**
* Implements a persistent transactional map based on the Cassandra distributed P2P key-value storage.
*
* @author <a href="http://debasishg.blogspot.com">Debasish Ghosh</a>
*/
class CassandraPersistentTransactionalMap extends TemplatePersistentTransactionalMap {
val storage = CassandraStorage
}
/**
* Implements a persistent transactional map based on the MongoDB distributed P2P key-value storage.
*
* @author <a href="http://debasishg.blogspot.com">Debasish Ghosh</a>
*/
class MongoPersistentTransactionalMap extends TemplatePersistentTransactionalMap {
val storage = MongoStorage
}
/**
* Base for all transactional vector implementations.
*
@ -344,27 +377,32 @@ abstract class PersistentTransactionalVector[T] extends TransactionalVector[T] {
}
/**
* Implements a persistent transactional vector based on the Cassandra distributed P2P key-value storage.
* Implements a template for a concrete persistent transactional vector based storage.
*
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
* @author <a href="http://debasishg.blogspot.com">Debasish Ghosh</a>
*/
class CassandraPersistentTransactionalVector extends PersistentTransactionalVector[AnyRef] {
abstract class TemplatePersistentTransactionalVector extends PersistentTransactionalVector[AnyRef] {
val storage: VectorStorage
// ---- For TransactionalVector ----
override def get(index: Int): AnyRef = {
verifyTransaction
if (changeSet.size > index) changeSet(index)
else CassandraStorage.getVectorStorageEntryFor(uuid, index)
else storage.getVectorStorageEntryFor(uuid, index)
}
override def getRange(start: Int, count: Int): List[AnyRef] = {
override def getRange(start: Int, count: Int): List[AnyRef] =
getRange(Some(start), None, count)
def getRange(start: Option[Int], finish: Option[Int], count: Int): List[AnyRef] = {
verifyTransaction
CassandraStorage.getVectorStorageRangeFor(uuid, start, count)
storage.getVectorStorageRangeFor(uuid, start, finish, count)
}
override def length: Int = {
verifyTransaction
CassandraStorage.getVectorStorageSizeFor(uuid)
storage.getVectorStorageSizeFor(uuid)
}
override def apply(index: Int): AnyRef = get(index)
@ -381,11 +419,29 @@ class CassandraPersistentTransactionalVector extends PersistentTransactionalVect
// ---- For Transactional ----
override def commit = {
// FIXME: should use batch function once the bug is resolved
for (element <- changeSet) CassandraStorage.insertVectorStorageEntryFor(uuid, element)
for (element <- changeSet) storage.insertVectorStorageEntryFor(uuid, element)
changeSet.clear
}
}
/**
* Implements a persistent transactional vector based on the Cassandra distributed P2P key-value storage.
*
* @author <a href="http://debasishg.blogspot.com">Debaissh Ghosh</a>
*/
class CassandraPersistentTransactionalVector extends TemplatePersistentTransactionalVector {
val storage = CassandraStorage
}
/**
* Implements a persistent transactional vector based on the MongoDB distributed P2P key-value storage.
*
* @author <a href="http://debasishg.blogspot.com">Debaissh Ghosh</a>
*/
class MongoPersistentTransactionalVector extends TemplatePersistentTransactionalVector {
val storage = MongoStorage
}
/**
* Implements a transactional reference.
*

View file

@ -0,0 +1,27 @@
package se.scalablesolutions.akka.kernel.state
// abstracts persistence storage
trait Storage {
}
// for Maps
trait MapStorage extends Storage {
def insertMapStorageEntriesFor(name: String, entries: List[Tuple2[AnyRef, AnyRef]])
def insertMapStorageEntryFor(name: String, key: AnyRef, value: AnyRef)
def removeMapStorageFor(name: String)
def removeMapStorageFor(name: String, key: AnyRef)
def getMapStorageEntryFor(name: String, key: AnyRef): Option[AnyRef]
def getMapStorageSizeFor(name: String): Int
def getMapStorageFor(name: String): List[Tuple2[AnyRef, AnyRef]]
def getMapStorageRangeFor(name: String, start: Option[AnyRef],
finish: Option[AnyRef], count: Int): List[Tuple2[AnyRef, AnyRef]]
}
// for vectors
trait VectorStorage extends Storage {
def insertVectorStorageEntryFor(name: String, element: AnyRef)
def insertVectorStorageEntriesFor(name: String, elements: List[AnyRef])
def getVectorStorageEntryFor(name: String, index: Int): AnyRef
def getVectorStorageRangeFor(name: String, start: Option[Int], finish: Option[Int], count: Int): List[AnyRef]
def getVectorStorageSizeFor(name: String): Int
}

View file

@ -4,15 +4,9 @@
package se.scalablesolutions.akka.kernel.util
import java.io.UnsupportedEncodingException
import java.security.{NoSuchAlgorithmException, MessageDigest}
import java.security.MessageDigest
import java.util.concurrent.locks.ReentrantReadWriteLock
import scala.actors._
import scala.actors.Actor._
import net.lag.logging.Logger
class SystemFailure(cause: Throwable) extends RuntimeException(cause)
/**
@ -20,7 +14,18 @@ class SystemFailure(cause: Throwable) extends RuntimeException(cause)
*/
object Helpers extends Logging {
def getDigestFor(s: String) = {
implicit def null2Option[T](t: T): Option[T] = if (t != null) Some(t) else None
def intToBytes(value: Int): Array[Byte] = {
val bytes = new Array[Byte](4)
bytes(0) = (value >>> 24).asInstanceOf[Byte]
bytes(1) = (value >>> 16).asInstanceOf[Byte]
bytes(2) = (value >>> 8).asInstanceOf[Byte]
bytes(3) = value.asInstanceOf[Byte]
bytes
}
def getMD5For(s: String) = {
val digest = MessageDigest.getInstance("MD5")
digest.update(s.getBytes("ASCII"))
val bytes = digest.digest
@ -59,51 +64,5 @@ object Helpers extends Logging {
}
}
}
// ================================================
// implicit conversion between regular actor and actor with a type future
implicit def actorWithFuture(a: Actor) = new ActorWithTypedFuture(a)
abstract class FutureWithTimeout[T](ch: InputChannel[T]) extends Future[T](ch) {
def receiveWithin(timeout: Int) : Option[T]
override def respond(f: T => Unit): Unit = throw new UnsupportedOperationException("Does not support the Responder API")
}
def receiveOrFail[T](future: => FutureWithTimeout[T], timeout: Int, errorHandler: => T): T = {
future.receiveWithin(timeout) match {
case None => errorHandler
case Some(reply) => reply
}
}
class ActorWithTypedFuture(a: Actor) {
require(a != null)
def !!![A](msg: Any): FutureWithTimeout[A] = {
val ftch = new Channel[A](Actor.self)
a.send(msg, ftch.asInstanceOf[OutputChannel[Any]])
new FutureWithTimeout[A](ftch) {
def apply() =
if (isSet) value.get.asInstanceOf[A]
else ch.receive {
case a =>
value = Some(a)
value.get.asInstanceOf[A]
}
def isSet = receiveWithin(0).isDefined
def receiveWithin(timeout: Int): Option[A] = value match {
case None => ch.receiveWithin(timeout) {
case TIMEOUT =>
log.debug("Future timed out while waiting for actor [%s]", a)
None
case a =>
value = Some(a)
value.asInstanceOf[Option[A]]
}
case a => a.asInstanceOf[Option[A]]
}
}
}
}
}

View file

@ -16,7 +16,7 @@ object AllTest extends TestCase {
suite.addTestSuite(classOf[EventBasedThreadPoolDispatcherTest])
suite.addTestSuite(classOf[ActorSpec])
suite.addTestSuite(classOf[RemoteActorSpec])
suite.addTestSuite(classOf[PersistentActorSpec])
//suite.addTestSuite(classOf[PersistentActorSpec])
suite.addTestSuite(classOf[InMemoryActorSpec])
//suite.addTestSuite(classOf[TransactionClasherSpec])
suite

View file

@ -55,7 +55,7 @@ class EventBasedSingleThreadDispatcherTest extends TestCase {
val guardLock = new ReentrantLock
val handleLatch = new CountDownLatch(100)
val key = "key"
val dispatcher = new EventBasedSingleThreadDispatcher
val dispatcher = new EventBasedSingleThreadDispatcher("name")
dispatcher.registerHandler(key, new TestMessageHandle(handleLatch))
dispatcher.start
for (i <- 0 until 100) {
@ -69,7 +69,7 @@ class EventBasedSingleThreadDispatcherTest extends TestCase {
val handleLatch = new CountDownLatch(2)
val key1 = "key1"
val key2 = "key2"
val dispatcher = new EventBasedSingleThreadDispatcher
val dispatcher = new EventBasedSingleThreadDispatcher("name")
dispatcher.registerHandler(key1, new TestMessageHandle(handleLatch))
dispatcher.registerHandler(key2, new TestMessageHandle(handleLatch))
dispatcher.start
@ -83,7 +83,7 @@ class EventBasedSingleThreadDispatcherTest extends TestCase {
val handleLatch = new CountDownLatch(200)
val key1 = "key1"
val key2 = "key2"
val dispatcher = new EventBasedSingleThreadDispatcher
val dispatcher = new EventBasedSingleThreadDispatcher("name")
dispatcher.registerHandler(key1, new MessageInvoker {
var currentValue = -1;
def invoke(message: MessageInvocation) {

View file

@ -37,7 +37,7 @@ class EventBasedThreadPoolDispatcherTest extends TestCase {
val guardLock = new ReentrantLock
val handleLatch = new CountDownLatch(10)
val key = "key"
val dispatcher = Dispatchers.newEventBasedThreadPoolDispatcher
val dispatcher = Dispatchers.newEventBasedThreadPoolDispatcher("name")
dispatcher.withNewThreadPoolWithBoundedBlockingQueue(100)
.setCorePoolSize(2)
.setMaxPoolSize(4)
@ -76,7 +76,7 @@ class EventBasedThreadPoolDispatcherTest extends TestCase {
val handlersBarrier = new CyclicBarrier(3)
val key1 = "key1"
val key2 = "key2"
val dispatcher = Dispatchers.newEventBasedThreadPoolDispatcher
val dispatcher = Dispatchers.newEventBasedThreadPoolDispatcher("name")
dispatcher.withNewThreadPoolWithBoundedBlockingQueue(100)
.setCorePoolSize(2)
.setMaxPoolSize(4)
@ -121,7 +121,7 @@ class EventBasedThreadPoolDispatcherTest extends TestCase {
val handleLatch = new CountDownLatch(200)
val key1 = "key1"
val key2 = "key2"
val dispatcher = Dispatchers.newEventBasedThreadPoolDispatcher
val dispatcher = Dispatchers.newEventBasedThreadPoolDispatcher("name")
dispatcher.withNewThreadPoolWithBoundedBlockingQueue(100)
.setCorePoolSize(2)
.setMaxPoolSize(4)

View file

@ -1,91 +0,0 @@
/**
* Copyright (C) 2009 Scalable Solutions.
*/
package se.scalablesolutions.akka.kernel
import akka.kernel.config.ActiveObjectGuiceConfigurator
import kernel.config.ScalaConfig._
import com.sun.grizzly.http.SelectorThread
import com.sun.jersey.api.client.Client
import com.sun.jersey.core.header.MediaTypes
import com.sun.jersey.api.container.grizzly.GrizzlyWebContainerFactory
import javax.ws.rs.core.UriBuilder
import javax.ws.rs.{Produces, Path, GET}
import com.google.inject.{AbstractModule, Scopes}
import org.scalatest.Spec
import org.scalatest.matchers.ShouldMatchers
//simport com.jteigen.scalatest.JUnit4Runner
import org.junit.runner.RunWith
import org.junit.Test
import org.junit.Assert._
/**
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
//@RunWith(classOf[JUnit4Runner])
class JerseySpec extends Spec with ShouldMatchers {
describe("A Jersey REST service") {
it("should ...") {
/*
val selector = startJersey
selector.start
val conf = new ActiveObjectGuiceConfigurator
conf.configure(
RestartStrategy(AllForOne, 3, 5000),
Component(
classOf[resource.JerseyFoo],
LifeCycle(Permanent, 1000),
1000) ::
Nil).supervise
conf.getInstance(classOf[resource.JerseyFoo])
*/
/*
val client = Client.create
val webResource = client.resource(UriBuilder.fromUri("http://localhost/").port(9998).build)
//val webResource = client.resource("http://localhost:9998/foo")
val responseMsg = webResource.get(classOf[String])
responseMsg should equal ("Hello World")
selector.stopEndpoint
*/
}
}
def startJersey: SelectorThread = {
val initParams = new java.util.HashMap[String, String]
initParams.put("com.sun.jersey.config.property.packages", "se.scalablesolutions.akka.kernel")
GrizzlyWebContainerFactory.create(UriBuilder.fromUri("http://localhost/").port(9998).build(), initParams)
}
}
// @GET
// @Produces("application/json")
// @Path("/network/{id: [0-9]+}/{nid}")
// def getUserByNetworkId(@PathParam {val value = "id"} id: Int, @PathParam {val value = "nid"} networkId: String): User = {
// val q = em.createQuery("SELECT u FROM User u WHERE u.networkId = :id AND u.networkUserId = :nid")
// q.setParameter("id", id)
// q.setParameter("nid", networkId)
// q.getSingleResult.asInstanceOf[User]
// }
package resource {
import javax.ws.rs.{Produces, Path, GET}
class JerseyFoo {
@GET
@Produces(Array("application/json"))
def foo: String = { val ret = "JerseyFoo.foo"; println(ret); ret }
}
@Path("/foo")
class JerseyFooSub extends JerseyFoo
class JerseyBar {
def bar(msg: String) = msg + "return_bar "
}
}

View file

@ -0,0 +1,272 @@
package se.scalablesolutions.akka.kernel.state
import junit.framework.TestCase
import org.junit.{Test, Before}
import org.junit.Assert._
class MongoStorageSpec extends TestCase {
val changeSetV = new scala.collection.mutable.ArrayBuffer[AnyRef]
val changeSetM = new scala.collection.mutable.HashMap[AnyRef, AnyRef]
override def setUp = {
MongoStorage.coll.drop
}
@Test
def testVectorInsertForTransactionId = {
changeSetV += "debasish" // string
changeSetV += List(1, 2, 3) // Scala List
changeSetV += List(100, 200)
MongoStorage.insertVectorStorageEntriesFor("U-A1", changeSetV.toList)
assertEquals(
3,
MongoStorage.getVectorStorageSizeFor("U-A1"))
changeSetV.clear
// changeSetV should be reinitialized
changeSetV += List(12, 23, 45)
changeSetV += "maulindu"
MongoStorage.insertVectorStorageEntriesFor("U-A1", changeSetV.toList)
assertEquals(
5,
MongoStorage.getVectorStorageSizeFor("U-A1"))
// add more to the same changeSetV
changeSetV += "ramanendu"
changeSetV += Map(1 -> "dg", 2 -> "mc")
// add for a diff transaction
MongoStorage.insertVectorStorageEntriesFor("U-A2", changeSetV.toList)
assertEquals(
4,
MongoStorage.getVectorStorageSizeFor("U-A2"))
// previous transaction change set should remain same
assertEquals(
5,
MongoStorage.getVectorStorageSizeFor("U-A1"))
// test single element entry
MongoStorage.insertVectorStorageEntryFor("U-A1", Map(1->1, 2->4, 3->9))
assertEquals(
6,
MongoStorage.getVectorStorageSizeFor("U-A1"))
}
@Test
def testVectorFetchForKeys = {
// initially everything 0
assertEquals(
0,
MongoStorage.getVectorStorageSizeFor("U-A2"))
assertEquals(
0,
MongoStorage.getVectorStorageSizeFor("U-A1"))
// get some stuff
changeSetV += "debasish"
changeSetV += List(12, 13, 14)
MongoStorage.insertVectorStorageEntriesFor("U-A1", changeSetV.toList)
assertEquals(
2,
MongoStorage.getVectorStorageSizeFor("U-A1"))
assertEquals(
"debasish",
MongoStorage.getVectorStorageEntryFor("U-A1", 0).asInstanceOf[String])
assertEquals(
List(12, 13, 14),
MongoStorage.getVectorStorageEntryFor("U-A1", 1).asInstanceOf[List[Int]])
changeSetV.clear
changeSetV += Map(1->1, 2->4, 3->9)
changeSetV += BigInt(2310)
changeSetV += List(100, 200, 300)
MongoStorage.insertVectorStorageEntriesFor("U-A1", changeSetV.toList)
assertEquals(
5,
MongoStorage.getVectorStorageSizeFor("U-A1"))
val r =
MongoStorage.getVectorStorageRangeFor("U-A1", Some(1), None, 3)
assertEquals(3, r.size)
assertEquals(List(12, 13, 14), r(0).asInstanceOf[List[Int]])
}
@Test
def testVectorFetchForNonExistentKeys = {
try {
MongoStorage.getVectorStorageEntryFor("U-A1", 1)
fail("should throw an exception")
} catch {case e: Predef.NoSuchElementException => {}}
try {
MongoStorage.getVectorStorageRangeFor("U-A1", Some(2), None, 12)
fail("should throw an exception")
} catch {case e: Predef.NoSuchElementException => {}}
}
@Test
def testMapInsertForTransactionId = {
case class Foo(no: Int, name: String)
fillMap
// add some more to changeSet
changeSetM += "5" -> Foo(12, "dg")
changeSetM += "6" -> java.util.Calendar.getInstance.getTime
// insert all into Mongo
MongoStorage.insertMapStorageEntriesFor("U-M1", changeSetM.toList)
assertEquals(
6,
MongoStorage.getMapStorageSizeFor("U-M1"))
// individual insert api
MongoStorage.insertMapStorageEntryFor("U-M1", "7", "akka")
MongoStorage.insertMapStorageEntryFor("U-M1", "8", List(23, 25))
assertEquals(
8,
MongoStorage.getMapStorageSizeFor("U-M1"))
// add the same changeSet for another transaction
MongoStorage.insertMapStorageEntriesFor("U-M2", changeSetM.toList)
assertEquals(
6,
MongoStorage.getMapStorageSizeFor("U-M2"))
// the first transaction should remain the same
assertEquals(
8,
MongoStorage.getMapStorageSizeFor("U-M1"))
changeSetM.clear
}
@Test
def testMapContents = {
fillMap
MongoStorage.insertMapStorageEntriesFor("U-M1", changeSetM.toList)
MongoStorage.getMapStorageEntryFor("U-M1", "2") match {
case Some(x) => assertEquals("peter", x.asInstanceOf[String])
case None => fail("should fetch peter")
}
MongoStorage.getMapStorageEntryFor("U-M1", "4") match {
case Some(x) => assertEquals(3, x.asInstanceOf[List[Int]].size)
case None => fail("should fetch list")
}
MongoStorage.getMapStorageEntryFor("U-M1", "3") match {
case Some(x) => assertEquals(2, x.asInstanceOf[List[Int]].size)
case None => fail("should fetch list")
}
// get the entire map
val l: List[Tuple2[AnyRef, AnyRef]] =
MongoStorage.getMapStorageFor("U-M1")
assertEquals(4, l.size)
assertTrue(l.map(_._1).contains("1"))
assertTrue(l.map(_._1).contains("2"))
assertTrue(l.map(_._1).contains("3"))
assertTrue(l.map(_._1).contains("4"))
assertTrue(l.map(_._2).contains("john"))
// trying to fetch for a non-existent transaction will throw
try {
MongoStorage.getMapStorageFor("U-M2")
fail("should throw an exception")
} catch {case e: Predef.NoSuchElementException => {}}
changeSetM.clear
}
@Test
def testMapContentsByRange = {
fillMap
changeSetM += "5" -> Map(1 -> "dg", 2 -> "mc")
MongoStorage.insertMapStorageEntriesFor("U-M1", changeSetM.toList)
// specify start and count
val l: List[Tuple2[AnyRef, AnyRef]] =
MongoStorage.getMapStorageRangeFor(
"U-M1", Some(Integer.valueOf(2)), None, 3)
assertEquals(3, l.size)
assertEquals("3", l(0)._1.asInstanceOf[String])
assertEquals(List(100, 200), l(0)._2.asInstanceOf[List[Int]])
assertEquals("4", l(1)._1.asInstanceOf[String])
assertEquals(List(10, 20, 30), l(1)._2.asInstanceOf[List[Int]])
// specify start, finish and count where finish - start == count
assertEquals(3,
MongoStorage.getMapStorageRangeFor(
"U-M1", Some(Integer.valueOf(2)), Some(Integer.valueOf(5)), 3).size)
// specify start, finish and count where finish - start > count
assertEquals(3,
MongoStorage.getMapStorageRangeFor(
"U-M1", Some(Integer.valueOf(2)), Some(Integer.valueOf(9)), 3).size)
// do not specify start or finish
assertEquals(3,
MongoStorage.getMapStorageRangeFor(
"U-M1", None, None, 3).size)
// specify finish and count
assertEquals(3,
MongoStorage.getMapStorageRangeFor(
"U-M1", None, Some(Integer.valueOf(3)), 3).size)
// specify start, finish and count where finish < start
assertEquals(3,
MongoStorage.getMapStorageRangeFor(
"U-M1", Some(Integer.valueOf(2)), Some(Integer.valueOf(1)), 3).size)
changeSetM.clear
}
@Test
def testMapStorageRemove = {
fillMap
changeSetM += "5" -> Map(1 -> "dg", 2 -> "mc")
MongoStorage.insertMapStorageEntriesFor("U-M1", changeSetM.toList)
assertEquals(5,
MongoStorage.getMapStorageSizeFor("U-M1"))
// remove key "3"
MongoStorage.removeMapStorageFor("U-M1", "3")
assertEquals(4,
MongoStorage.getMapStorageSizeFor("U-M1"))
try {
MongoStorage.getMapStorageEntryFor("U-M1", "3")
fail("should throw exception")
} catch { case e => {}}
// remove the whole stuff
MongoStorage.removeMapStorageFor("U-M1")
try {
MongoStorage.getMapStorageFor("U-M1")
fail("should throw exception")
} catch { case e: NoSuchElementException => {}}
changeSetM.clear
}
private def fillMap = {
changeSetM += "1" -> "john"
changeSetM += "2" -> "peter"
changeSetM += "3" -> List(100, 200)
changeSetM += "4" -> List(10, 20, 30)
changeSetM
}
}

View file

@ -26,7 +26,6 @@ class RemoteActorSpecActorBidirectional extends Actor {
}
class RemoteActorSpec extends TestCase {
kernel.Kernel.config
new Thread(new Runnable() {
def run = {

View file

@ -49,7 +49,7 @@ class ThreadBasedDispatcherTest extends TestCase {
private def internalTestMessagesDispatchedToTheSameHandlerAreExecutedSequentially: Unit = {
val guardLock = new ReentrantLock
val handleLatch = new CountDownLatch(100)
val dispatcher = new ThreadBasedDispatcher(new TestMessageHandle(handleLatch))
val dispatcher = new ThreadBasedDispatcher("name", new TestMessageHandle(handleLatch))
dispatcher.start
for (i <- 0 until 100) {
dispatcher.messageQueue.append(new MessageInvocation("id", new Object, None, None))
@ -60,7 +60,7 @@ class ThreadBasedDispatcherTest extends TestCase {
private def internalTestMessagesDispatchedToHandlersAreExecutedInFIFOOrder: Unit = {
val handleLatch = new CountDownLatch(100)
val dispatcher = new ThreadBasedDispatcher(new MessageInvoker {
val dispatcher = new ThreadBasedDispatcher("name", new MessageInvoker {
var currentValue = -1;
def invoke(message: MessageInvocation) {
if (threadingIssueDetected.get) return

Binary file not shown.

BIN
lib/aspectwerkz-jdk5-2.1.jar Executable file

Binary file not shown.

252
lib/aspectwerkz2.dtd Executable file
View file

@ -0,0 +1,252 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--*****************************************************************************************************************************
AspectWerkz DTD 2.0
This DTD is provided as is. Some elements might have been deprecated since 0.7.4 (see comments)
This DTD might be incomplete - drop a mail on the user / dev mailing list
This DTD might clash with some of existing XML files since it imposes for some elements an order
(see aspect: introductions then pointcuts then advices)
The "unspecified" order of the aspectwerkz childs might change to the one similar of aspects
(transformation scope, introduction-def, advice-def, advices-def, abstract-aspect, aspect, package)
To use this DTD, start your defintion file with
<!DOCTYPE aspectwerkz PUBLIC
"-//AspectWerkz//DTD 2.0//EN"
"http://aspectwerkz.codehaus.org/dtd/aspectwerkz_2_0.dtd">
You can also use the "aliasing DTD" that matchs always the latest release of AspectWerkz
<!DOCTYPE aspectwerkz PUBLIC
"-//AspectWerkz//DTD//EN"
"http://aspectwerkz.codehaus.org/dtd/aspectwerkz2.dtd">
To not use this DTD, start your definition file with
<?xml version="1.0"?>
Change from 1.0 to 2.0
most of the changes implies non backward compatibility
aspect/introduce element only allows to introduce interface. Use new <mixin element.
introduce@deploymentModel is removed
new system/mixin top level element
deployment-scope
advisable
******************************************************************************************************************************-->
<!--*****************************************************************************************************************************
aspectwerkz
*********************************************************************************************************************************
Aspectwerkz defines the root element
One system defines a namespace. Usually systems are used with several aop.xml file to isolate system
following classloader hierarchies.
******************************************************************************************************************************-->
<!ELEMENT aspectwerkz (system*)>
<!--*****************************************************************************************************************************
system
*********************************************************************************************************************************
System allows isolation of AspectWerkz model
For now we only support a single system.
A system contains use-aspect syntax OR *-def *-ref syntax. The DTD cannot represent that.
Attributes:
id: unique name of the AspectWerkz system (replaces <aspectwerkz id=..> in 0.8.1 DTD)
base-package: provides the global package prefix
Notes:
Due to package nesting, we cannot define an order here
******************************************************************************************************************************-->
<!ELEMENT system (
(exclude | include | prepare | pointcut | deployment-scope | advisable)*,
(aspect | mixin | package)*
)>
<!ATTLIST system
id CDATA #REQUIRED
base-package CDATA #IMPLIED
>
<!--*****************************************************************************************************************************
exclude
*********************************************************************************************************************************
Declares one global package prefix. Any class in the scope cannot have aspects on it within this system
Attributes:
package: package prefix
Note: package does not support pattern, excepted ending ".*" like "foo.*", which is equivalent to "foo."
During weaving, a class is considered to be in the transformation scope with a classFQN.startsWith( ) test.
Note: if a class is both in exclude and include, it is considered excluded.
******************************************************************************************************************************-->
<!ELEMENT exclude EMPTY>
<!ATTLIST exclude
package CDATA #REQUIRED
>
<!--*****************************************************************************************************************************
include
*********************************************************************************************************************************
Declares one global package prefix. Any class not in the scope cannot have aspects on it within this system
Attributes:
package: package prefix
Note: package does not support pattern, excepted ending ".*" like "foo.*", which is equivalent to "foo."
During weaving, a class is considered to be in the transformation scope with a classFQN.startsWith( ) test.
******************************************************************************************************************************-->
<!ELEMENT include EMPTY>
<!ATTLIST include
package CDATA #REQUIRED
>
<!--*****************************************************************************************************************************
prepare
*********************************************************************************************************************************
deprecated
******************************************************************************************************************************-->
<!ELEMENT prepare EMPTY>
<!ATTLIST prepare
package CDATA #REQUIRED
>
<!--*****************************************************************************************************************************
package
*********************************************************************************************************************************
Declares a package prefix which applies to all FQN (aspect) and patterns (pointcuts, class)
contained in the package element.
TODO REMOVE or IMPL - Package elements can be nested to ease writing of package and subpackage
TODO REMOVE or IMPL - (package@name "bar" nested in package@name "foo" means "foo.bar").
Attributes:
name: package prefix
Notes:
Does not support pattern, excepted ending ".*" like "foo.*" which is equivalent to "foo." and to "foo"
******************************************************************************************************************************-->
<!ELEMENT package (
(aspect | mixin)*
)>
<!ATTLIST package
name CDATA #REQUIRED
>
<!--*****************************************************************************************************************************
aspect
*********************************************************************************************************************************
Declares aspect.
Attributes:
name: unique name of the aspect in the system. Defaults to FQN (see class)
class: FQN of Aspect (can have metdata or not)
container: FQN of AspectContainer, defaults to regular org.codehaus.aspectwerkz.aspect.DefaultAspectContainerStrategy
deployment-model: aspect life cycle, defaults to perJVM
valid values are: perJVM | perClass | perInstance | perthis(...pc expr) | pertarget(...pc expr)
******************************************************************************************************************************-->
<!ELEMENT aspect (
param*,
(pointcut | deployment-scope | advice | introduce)*
)>
<!ATTLIST aspect
name CDATA #IMPLIED
class CDATA #REQUIRED
container CDATA #IMPLIED
deployment-model CDATA #IMPLIED
>
<!--*****************************************************************************************************************************
param
*********************************************************************************************************************************
Declares param for a aspect element
******************************************************************************************************************************-->
<!ELEMENT param EMPTY>
<!ATTLIST param
name CDATA #REQUIRED
value CDATA #REQUIRED
>
<!--*****************************************************************************************************************************
pointcut
*********************************************************************************************************************************
Attributes:
name
expression (required unless element data is used)
Element data: equivalent to expression, but ignored if pointcut@expression is already specified
******************************************************************************************************************************-->
<!ELEMENT pointcut (#PCDATA)>
<!ATTLIST pointcut
name CDATA #REQUIRED
expression CDATA #IMPLIED
>
<!--*****************************************************************************************************************************
deployment-scope
*********************************************************************************************************************************
Attributes:
name
expression (required unless element data is used)
Element data: equivalent to expression, but ignored if deployment-scope@expression is already specified
******************************************************************************************************************************-->
<!ELEMENT deployment-scope (#PCDATA)>
<!ATTLIST deployment-scope
name CDATA #REQUIRED
expression CDATA #IMPLIED
>
<!--*****************************************************************************************************************************
advisable
*********************************************************************************************************************************
Attributes:
pointcut-type: the pointcut type: call, get, set, execution, handler, all, or any piped list like get|set
expression: the pointcut expression, can be within/withincode/or more fine grained, as long as consistent with pointcut-type
******************************************************************************************************************************-->
<!ELEMENT advisable EMPTY>
<!ATTLIST advisable
pointcut-type CDATA #REQUIRED
expression CDATA #REQUIRED
>
<!--*****************************************************************************************************************************
advice
*********************************************************************************************************************************
Allow for advice declaration for use in several aspects
Use nested bind-to element for multiple binding
Attributes:
name: method name implementing the advice
type: advice type (around | before | after | after finally | after returning | after throwing), "after xxx(TYPE varName)"
bind-to: pointcut expression
******************************************************************************************************************************-->
<!ELEMENT advice (
bind-to*
)>
<!ATTLIST advice
name CDATA #REQUIRED
type CDATA #REQUIRED
bind-to CDATA #IMPLIED
attribute CDATA #IMPLIED
>
<!--*****************************************************************************************************************************
bind-to
*********************************************************************************************************************************
Allow to bind an advice several times instead of using an OR expression
******************************************************************************************************************************-->
<!ELEMENT bind-to EMPTY>
<!ATTLIST bind-to
pointcut CDATA #REQUIRED
>
<!--*****************************************************************************************************************************
introduce
*********************************************************************************************************************************
Attributes:
class: interface introduction only
bind-to: pointcut expression (only within/hasField/hasMethod makes sense)
******************************************************************************************************************************-->
<!ELEMENT introduce (
bind-to*
)>
<!ATTLIST introduce
class CDATA #REQUIRED
bind-to CDATA #IMPLIED
>
<!--*****************************************************************************************************************************
mixin
*********************************************************************************************************************************
Declares mixin.
Attributes:
class: FQN of mixin
bind-to: pointcut where to bind the mixin, optional (can be specified in annotations)
deployment-model: mixin life cycle, defaults to perInstance
transient: true | false, defaults to false, to control mixin persistence alongside target instances
factory: FQN of MixinFactory, implements org.codehaus.aspectwerkz.aspect.MixinFactory, defaults to regular DefaultMixinFactory
TODO nested pc + package support ??
Elements:
param: name value pairs
******************************************************************************************************************************-->
<!ELEMENT mixin (param*)>
<!ATTLIST mixin
class CDATA #REQUIRED
bind-to CDATA #IMPLIED
deployment-model (perClass | perInstance | perJVM) #IMPLIED
transient (false | true) #IMPLIED
factory CDATA #IMPLIED
>

Binary file not shown.

Binary file not shown.

BIN
lib/cassandra-0.4.0-trunk.jar Executable file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
lib/commons-pool-1.5.1.jar Executable file

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
lib/javautils-2.7.4-0.1.jar Normal file

Binary file not shown.

Binary file not shown.

0
lib/jersey-core-1.1.1-ea.jar Executable file → Normal file
View file

0
lib/jersey-server-1.1.1-ea.jar Executable file → Normal file
View file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
lib/scala-stats-1.0.jar Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

277
pom.xml
View file

@ -1,4 +1,4 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
<project xmlns="http://maven.apache. isorg/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
@ -6,8 +6,8 @@
<name>Akka Actor Kernel</name>
<artifactId>akka</artifactId>
<groupId>${akka.groupId}</groupId>
<version>${akka.version}</version>
<groupId>se.scalablesolutions.akka</groupId>
<version>0.5</version>
<inceptionYear>2009</inceptionYear>
<packaging>pom</packaging>
@ -22,17 +22,47 @@
<module>kernel</module>
<module>fun-test-java</module>
<module>samples-scala</module>
<module>samples-lift</module>
<module>samples-java</module>
</modules>
<organization>
<name>Scalable Solutions AB</name>
<url>http://scalablesolutions.se</url>
</organization>
<scm>
<connection>scm:git:git://github.com/jboner/akka.git</connection>
<developerConnection>scm:git:git@github.com:jboner/akka.git</developerConnection>
<url>http://github.com/jboner/akka</url>
</scm>
<licenses>
<license>
<name>the Apache License, ASL Version 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0</url>
</license>
</licenses>
<developers>
<developer>
<id>jboner</id>
<name>Jonas Bon&#233;r</name>
<timezone>+1</timezone>
<email>jonas AT jonasboner DOTCOM</email>
<roles>
<role>Founder</role>
<role>Hacker</role>
<role>Despot</role>
</roles>
</developer>
</developers>
<repositories>
<repository>
<id>repo1.maven</id>
<name>Maven Main Repository</name>
<url>http://repo1.maven.org/maven2</url>
<!--<snapshots>
<enabled>true</enabled>
</snapshots>-->
</repository>
<repository>
<id>project.embedded.module</id>
@ -87,6 +117,11 @@
<enabled>false</enabled>
</releases>
</repository>
<repository>
<id>guice-maven</id>
<name>guice maven</name>
<url>http://guice-maven.googlecode.com/svn/trunk</url>
</repository>
<repository>
<id>google-maven-repository</id>
<name>Google Maven Repository</name>
@ -111,6 +146,10 @@
</repositories>
<pluginRepositories>
<pluginRepository>
<id>onejar-maven-plugin.googlecode.com</id>
<url>http://onejar-maven-plugin.googlecode.com/svn/mavenrepo</url>
</pluginRepository>
<pluginRepository>
<id>scala-tools.org</id>
<name>Scala-Tools Maven2 Repository</name>
@ -119,7 +158,79 @@
</pluginRepositories>
<build>
<sourceDirectory>src/main/scala</sourceDirectory>
<testSourceDirectory>src/test/scala</testSourceDirectory>
<plugins>
<plugin>
<groupId>org.mortbay.jetty</groupId>
<artifactId>maven-jetty-plugin</artifactId>
<configuration>
<contextPath>/</contextPath>
<scanIntervalSeconds>5</scanIntervalSeconds>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.0.2</version>
<configuration>
<source>1.5</source>
<target>1.5</target>
</configuration>
</plugin>
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<version>2.10.1</version>
<executions>
<execution>
<goals>
<goal>compile</goal>
<goal>testCompile</goal>
</goals>
</execution>
</executions>
<configuration>
<jvmArgs>
<jvmArg>-Xmx1024m</jvmArg>
</jvmArgs>
<args>
<!-- arg>-unchecked</arg -->
<arg>-deprecation</arg>
<arg>-Xno-varargs-conversion</arg>
</args>
<scalaVersion>${scala.version}</scalaVersion>
</configuration>
</plugin>
<plugin>
<inherited>true</inherited>
<artifactId>maven-source-plugin</artifactId>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-changes-plugin</artifactId>
<version>2.0</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.2</version>
<configuration>
<archive>
<manifestEntries>
<lift_version>${pom.version}</lift_version>
</manifestEntries>
</archive>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
@ -154,6 +265,158 @@
</configuration>
</plugin>
</plugins>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.felix</groupId>
<artifactId>maven-bundle-plugin</artifactId>
<version>2.0.0</version>
<extensions>true</extensions>
<configuration>
<instructions>
<Bundle-RequiredExecutionEnvironment>J2SE-1.5</Bundle-RequiredExecutionEnvironment>
<_versionpolicy>[$(@),$(version;=+;$(@)))</_versionpolicy>
</instructions>
</configuration>
<executions>
<execution>
<id>create-bundle</id>
<phase>package</phase>
<goals>
<goal>bundle</goal>
</goals>
</execution>
<execution>
<id>bundle-install</id>
<phase>install</phase>
<goals>
<goal>install</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</pluginManagement>
</build>
<reporting>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>taglist-maven-plugin</artifactId>
<version>2.3</version>
<configuration>
<tags>
<tag>FIXME</tag>
<tag>TODO</tag>
<tag>XXX</tag>
<tag>@todo</tag>
<tag>@deprecated</tag>
</tags>
</configuration>
</plugin>
<plugin>
<artifactId>maven-project-info-reports-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<version>2.9</version>
<configuration>
<jvmArgs>
<jvmArg>-Xmx1024m</jvmArg>
<jvmArg>-DpackageLinkDefs=file://${basedir}/../vscaladocs-packageLinkDefs.properties</jvmArg>
</jvmArgs>
<args>
<arg>-unchecked</arg></args>
<vscaladocVersion>1.2-SNAPSHOT</vscaladocVersion>
<scalaVersion>${scala.version}</scalaVersion>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-changes-plugin</artifactId>
<version>2.0-beta-3</version>
<reportSets>
<reportSet>
<reports>
<report>changes-report</report>
</reports>
</reportSet>
</reportSets>
<configuration>
<xmlPath>${basedir}/changes.xml</xmlPath>
</configuration>
</plugin>
<plugin>
<artifactId>maven-surefire-report-plugin</artifactId>
<configuration><!--showSuccess>false</showSuccess--></configuration>
<reportSets>
<reportSet>
<reports>
<report>report-only</report>
</reports>
</reportSet>
</reportSets>
</plugin>
</plugins>
</reporting>
<distributionManagement>
</distributionManagement>
<profiles>
<profile>
<id>release</id>
<!--
<build>
<plugins>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<descriptorRefs>
<descriptorRef>project</descriptorRef>
</descriptorRefs>
<tarLongFileMode>gnu</tarLongFileMode>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>attached</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
-->
<distributionManagement>
<repository>
<id>scala-tools.org</id>
<url>http://nexus.scala-tools.org/content/repositories/releases</url>
</repository>
<site>
<id>scala-tools.org</id>
<url>file://${user.home}/.m2/mvnsites/akka</url>
</site>
</distributionManagement>
</profile>
<profile>
<id>hudson</id>
<distributionManagement>
<repository>
<id>hudson.scala-tools.org</id>
<url>file:///home/scala-tools.org/www/repo-snapshots</url>
</repository>
<snapshotRepository>
<id>hudson.scala-tools.org</id>
<url>file:///home/scala-tools.org/www/repo-snapshots</url>
<uniqueVersion>false</uniqueVersion>
</snapshotRepository>
<site>
<id>hudson.scala-tools.org</id>
<url>file:///home/scala-tools.org/www/mvnsites-snapshots/akka</url>
</site>
</distributionManagement>
</profile>
</profiles>
</project>

View file

@ -32,13 +32,9 @@
<orderEntry type="library" exported="" name="Maven: asm:asm:3.1" level="project" />
<orderEntry type="library" exported="" name="Maven: aopalliance:aopalliance:1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: org.apache.camel:camel-core:2.0-SNAPSHOT" level="project" />
<orderEntry type="library" exported="" name="Maven: commons-logging:commons-logging-api:1.1" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.xml.bind:jaxb-api:2.1" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.xml.stream:stax-api:1.0-2" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.activation:activation:1.1" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.xml.bind:jaxb-impl:2.1.6" level="project" />
<orderEntry type="library" exported="" name="Maven: org.jboss.netty:netty:3.1.0.CR1" level="project" />
<orderEntry type="library" exported="" name="Maven: org.jboss.netty:netty:3.1.0.GA" level="project" />
<orderEntry type="library" exported="" name="Maven: org.apache:zookeeper:3.1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: org.scala-tools:javautils:2.7.4-0.1" level="project" />
<orderEntry type="library" exported="" name="Maven: org.codehaus.jackson:jackson-core-asl:1.1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: org.codehaus.jackson:jackson-mapper-asl:1.1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: sbinary:sbinary:0.3" level="project" />
@ -49,6 +45,8 @@
<orderEntry type="library" exported="" name="Maven: commons-collections:commons-collections:3.2.1" level="project" />
<orderEntry type="library" exported="" name="Maven: high-scale-lib:high-scale-lib:1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: commons-lang:commons-lang:2.4" level="project" />
<orderEntry type="library" exported="" name="Maven: se.foldleft:cassidy:0.1" level="project" />
<orderEntry type="library" exported="" name="Maven: commons-pool:commons-pool:1.5.1" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-comet-webserver:1.8.6.3" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-servlet-webserver:1.8.6.3" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-http:1.8.6.3" level="project" />
@ -65,6 +63,10 @@
<orderEntry type="library" exported="" name="Maven: com.sun.jersey:jersey-json:1.1.1-ea" level="project" />
<orderEntry type="library" exported="" name="Maven: org.codehaus.jettison:jettison:1.1" level="project" />
<orderEntry type="library" exported="" name="Maven: stax:stax-api:1.0.1" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.xml.bind:jaxb-impl:2.1.12" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.xml.bind:jaxb-api:2.1" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.xml.stream:stax-api:1.0-2" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.activation:activation:1.1" level="project" />
<orderEntry type="library" exported="" name="Maven: org.codehaus.jackson:jackson-asl:0.9.4" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.jersey.contribs:jersey-scala:1.1.2-ea-SNAPSHOT" level="project" />
<orderEntry type="library" exported="" name="Maven: org.atmosphere:atmosphere-core:0.3" level="project" />

View file

@ -9,20 +9,21 @@
<parent>
<artifactId>akka</artifactId>
<groupId>${akka.groupId}</groupId>
<version>${akka.version}</version>
<groupId>se.scalablesolutions.akka</groupId>
<version>0.5</version>
<relativePath>../pom.xml</relativePath>
</parent>
<dependencies>
<dependency>
<groupId>${akka.groupId}</groupId>
<artifactId>akka-kernel</artifactId>
<version>${akka.version}</version>
<groupId>se.scalablesolutions.akka</groupId>
<version>0.5</version>
</dependency>
<dependency>
<groupId>${akka.groupId}</groupId>
<artifactId>akka-util-java</artifactId>
<version>${akka.version}</version>
<groupId>se.scalablesolutions.akka</groupId>
<version>0.5</version>
</dependency>
<dependency>
<groupId>javax.ws.rs</groupId>

View file

@ -29,7 +29,7 @@ public class SimpleService {
private boolean hasStartedTicking = false;
private TransactionalState factory = new TransactionalState();
private TransactionalMap<String, Object> storage = factory.newPersistentMap(new CassandraStorageConfig());
private TransactionalMap<Object, Object> storage = factory.newPersistentMap(new CassandraStorageConfig());
@GET
@Produces({"application/json"})

View file

@ -0,0 +1,99 @@
<?xml version="1.0" encoding="UTF-8"?>
<module relativePaths="true" MavenProjectsManager.isMavenModule="true" type="JAVA_MODULE" version="4">
<component name="FacetManager">
<facet type="Scala" name="Scala">
<configuration />
</facet>
</component>
<component name="NewModuleRootManager" inherit-compiler-output="false">
<output url="file://$MODULE_DIR$/target/classes" />
<output-test url="file://$MODULE_DIR$/target/test-classes" />
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/src/main/scala" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/config" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/test/scala" isTestSource="true" />
<excludeFolder url="file://$MODULE_DIR$/target" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="library" exported="" name="Maven: org.codehaus.aspectwerkz:aspectwerkz-nodeps-jdk5:2.1" level="project" />
<orderEntry type="module" module-name="akka-kernel" exported="" />
<orderEntry type="module" module-name="akka-util-java" exported="" />
<orderEntry type="library" exported="" name="Maven: org.guiceyfruit:guice-core:2.0-beta-4" level="project" />
<orderEntry type="library" exported="" name="Maven: org.guiceyfruit:guice-jsr250:2.0-SNAPSHOT" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.annotation:jsr250-api:1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: com.google.protobuf:protobuf-java:2.1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: org.scala-lang:scala-library:2.7.5" level="project" />
<orderEntry type="library" exported="" name="Maven: net.lag:configgy:1.3" level="project" />
<orderEntry type="library" name="Maven: org.scala-lang:scala-compiler:2.7.5" level="project" />
<orderEntry type="library" exported="" name="Maven: org.scala-tools:vscaladoc:1.1-md-3" level="project" />
<orderEntry type="library" exported="" name="Maven: markdownj:markdownj:1.0.2b4-0.3.0" level="project" />
<orderEntry type="library" name="Maven: junit:junit:4.5" level="project" />
<orderEntry type="library" exported="" name="Maven: org.scala-tools.testing:specs:1.4.4" level="project" />
<orderEntry type="library" exported="" name="Maven: org.guiceyfruit:guiceyfruit-core:2.0" level="project" />
<orderEntry type="library" exported="" name="Maven: org.guiceyfruit:guice-all:2.0" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.annotation:com.springsource.javax.annotation:1.0.0" level="project" />
<orderEntry type="library" exported="" name="Maven: org.aopalliance:com.springsource.org.aopalliance:1.0.0" level="project" />
<orderEntry type="library" exported="" name="Maven: com.google.code.google-collections:google-collect:snapshot-20080530" level="project" />
<orderEntry type="library" exported="" name="Maven: cglib:cglib:2.2" level="project" />
<orderEntry type="library" exported="" name="Maven: asm:asm:3.1" level="project" />
<orderEntry type="library" exported="" name="Maven: aopalliance:aopalliance:1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: org.apache.camel:camel-core:2.0-SNAPSHOT" level="project" />
<orderEntry type="library" exported="" name="Maven: org.jboss.netty:netty:3.1.0.GA" level="project" />
<orderEntry type="library" exported="" name="Maven: org.apache:zookeeper:3.1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: org.scala-tools:javautils:2.7.4-0.1" level="project" />
<orderEntry type="library" exported="" name="Maven: org.codehaus.jackson:jackson-core-asl:1.1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: org.codehaus.jackson:jackson-mapper-asl:1.1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: sbinary:sbinary:0.3" level="project" />
<orderEntry type="library" exported="" name="Maven: com.twitter:scala-json:1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: org.apache.cassandra:cassandra:0.4.0-dev" level="project" />
<orderEntry type="library" exported="" name="Maven: com.facebook:thrift:1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: com.facebook:fb303:1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: commons-collections:commons-collections:3.2.1" level="project" />
<orderEntry type="library" exported="" name="Maven: high-scale-lib:high-scale-lib:1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: commons-lang:commons-lang:2.4" level="project" />
<orderEntry type="library" exported="" name="Maven: se.foldleft:cassidy:0.1" level="project" />
<orderEntry type="library" exported="" name="Maven: commons-pool:commons-pool:1.5.1" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-comet-webserver:1.8.6.3" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-servlet-webserver:1.8.6.3" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-http:1.8.6.3" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-framework:1.8.6.3" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-http-utils:1.8.6.3" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-rcm:1.8.6.3" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-portunif:1.8.6.3" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-http-servlet:1.8.6.3" level="project" />
<orderEntry type="library" name="Maven: javax.servlet:servlet-api:2.5" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-comet:1.8.6.3" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.jersey:jersey-server:1.1.1-ea" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.jersey:jersey-core:1.1.1-ea" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.ws.rs:jsr311-api:1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.jersey:jersey-json:1.1.1-ea" level="project" />
<orderEntry type="library" exported="" name="Maven: org.codehaus.jettison:jettison:1.1" level="project" />
<orderEntry type="library" exported="" name="Maven: stax:stax-api:1.0.1" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.xml.bind:jaxb-impl:2.1.12" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.xml.bind:jaxb-api:2.1" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.xml.stream:stax-api:1.0-2" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.activation:activation:1.1" level="project" />
<orderEntry type="library" exported="" name="Maven: org.codehaus.jackson:jackson-asl:0.9.4" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.jersey.contribs:jersey-scala:1.1.2-ea-SNAPSHOT" level="project" />
<orderEntry type="library" exported="" name="Maven: org.atmosphere:atmosphere-core:0.3" level="project" />
<orderEntry type="library" exported="" name="Maven: org.atmosphere:atmosphere-portable-runtime:0.3" level="project" />
<orderEntry type="library" exported="" name="Maven: org.atmosphere:atmosphere-compat:0.3" level="project" />
<orderEntry type="library" exported="" name="Maven: org.slf4j:slf4j-log4j12:1.4.3" level="project" />
<orderEntry type="library" exported="" name="Maven: org.slf4j:slf4j-api:1.4.3" level="project" />
<orderEntry type="library" exported="" name="Maven: log4j:log4j:1.2.13" level="project" />
<orderEntry type="library" exported="" name="Maven: commons-logging:commons-logging:1.0.4" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.jersey:jersey-client:1.1.0-ea" level="project" />
<orderEntry type="library" exported="" name="Maven: net.liftweb:lift-util:1.1-SNAPSHOT" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.mail:mail:1.4" level="project" />
<orderEntry type="library" exported="" name="Maven: commons-httpclient:commons-httpclient:3.1" level="project" />
<orderEntry type="library" exported="" name="Maven: commons-codec:commons-codec:1.3" level="project" />
<orderEntry type="library" exported="" name="Maven: commons-fileupload:commons-fileupload:1.2.1" level="project" />
<orderEntry type="library" exported="" name="Maven: net.liftweb:lift-webkit:1.1-SNAPSHOT" level="project" />
<orderEntry type="library" exported="" name="Maven: net.liftweb:lift-actor:1.1-SNAPSHOT" level="project" />
<orderEntry type="library" name="Maven: org.mortbay.jetty:jetty:7.0.0.pre5" level="project" />
<orderEntry type="library" name="Maven: org.mortbay.jetty:servlet-api:3.0.pre4" level="project" />
<orderEntry type="library" name="Maven: org.mortbay.jetty:jetty-util:7.0.0.pre5" level="project" />
</component>
</module>

View file

@ -0,0 +1,64 @@
#####################
# Akka Config File #
###################
# This file has all the default settings, so all these could be remove with no visible effect.
# Modify as needed.
<log>
filename = "./logs/akka.log"
roll = "daily" # Options: never, hourly, daily, sunday/monday/...
level = "debug" # Options: fatal, critical, error, warning, info, debug, trace
console = on
# syslog_host = ""
# syslog_server_name = ""
</log>
<akka>
version = "v0.5"
<actor>
timeout = 5000 # default timeout for future based invocations
concurrent-mode = off # if turned on, then the same actor instance is allowed to execute concurrently -
# e.g. departing from the actor model for better performance
serialize-messages = on # does a deep clone of (non-primitive) messages to ensure immutability
</actor>
<stm>
service = on
restart-on-collision = off # (not implemented yet) if 'on' then it reschedules the transaction,
# if 'off' then throws an exception or rollback for user to handle
wait-for-completion = 100 # how long time in millis a transaction should be given time to complete when a collision is detected
wait-nr-of-times = 3 # the number of times it should check for completion of a pending transaction upon collision
distributed = off # not implemented yet
</stm>
<remote>
service = on
hostname = "localhost"
port = 9999
connection-timeout = 1000 # in millis
</remote>
<rest>
service = on
hostname = "localhost"
port = 9998
</rest>
<storage>
system = "cassandra" # Options: cassandra (coming: terracotta, redis, tokyo-cabinet, tokyo-tyrant, voldemort, memcached, hazelcast)
<cassandra>
service = on
storage-format = "java" # Options: java, scala-json, java-json
blocking = false # inserts and queries should be blocking or not
<thrift-server>
service = on
pidfile = "akka.pid"
</thrift-server>
</cassandra>
</rest>
</akka>

77
samples-lift/pom.xml Normal file
View file

@ -0,0 +1,77 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>akka-samples-lift</artifactId>
<name>Akka Lift Samples Module</name>
<packaging>war</packaging>
<parent>
<artifactId>akka</artifactId>
<groupId>se.scalablesolutions.akka</groupId>
<version>0.5</version>
<relativePath>../pom.xml</relativePath>
</parent>
<properties>
<lift.version>1.1-SNAPSHOT</lift.version>
</properties>
<dependencies>
<dependency>
<groupId>se.scalablesolutions.akka</groupId>
<artifactId>akka-kernel</artifactId>
<version>0.5</version>
</dependency>
<dependency>
<groupId>se.scalablesolutions.akka</groupId>
<artifactId>akka-util-java</artifactId>
<version>0.5</version>
</dependency>
<dependency>
<groupId>javax.ws.rs</groupId>
<artifactId>jsr311-api</artifactId>
<version>1.0</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
</dependency>
<dependency>
<groupId>net.liftweb</groupId>
<artifactId>lift-util</artifactId>
<version>${lift.version}</version>
</dependency>
<dependency>
<groupId>net.liftweb</groupId>
<artifactId>lift-webkit</artifactId>
<version>${lift.version}</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<version>2.5</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.5</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jetty</artifactId>
<version>[6.1.6,)</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-compiler</artifactId>
<version>${scala.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View file

@ -0,0 +1,42 @@
package sample.lift
import se.scalablesolutions.akka.kernel.state.{TransactionalState, CassandraStorageConfig}
import se.scalablesolutions.akka.kernel.actor.{SupervisorFactory, Actor}
import se.scalablesolutions.akka.kernel.config.ScalaConfig._
import se.scalablesolutions.akka.kernel.util.Logging
import javax.ws.rs.core.MultivaluedMap
import javax.ws.rs.{GET, POST, Path, Produces, WebApplicationException, Consumes}
/**
* Try service out by invoking (multiple times):
* <pre>
* curl http://localhost:9998/liftcount
* </pre>
* Or browse to the URL from a web browser.
*/
@Path("/liftcount")
class SimpleService extends Actor {
makeTransactionRequired
case object Tick
private val KEY = "COUNTER";
private var hasStartedTicking = false;
private val storage = TransactionalState.newPersistentMap(CassandraStorageConfig())
@GET
@Produces(Array("text/html"))
def count = (this !! Tick).getOrElse(<h1>Error in counter</h1>)
override def receive: PartialFunction[Any, Unit] = {
case Tick => if (hasStartedTicking) {
val counter = storage.get(KEY).get.asInstanceOf[Integer].intValue
storage.put(KEY, new Integer(counter + 1))
reply(<h1>Tick: {counter + 1}</h1>)
} else {
storage.put(KEY, new Integer(0))
hasStartedTicking = true
reply(<h1>Tick: 0</h1>)
}
}
}

View file

@ -0,0 +1,58 @@
package bootstrap.liftweb
import _root_.net.liftweb.util._
import _root_.net.liftweb.http._
import _root_.net.liftweb.sitemap._
import _root_.net.liftweb.sitemap.Loc._
import Helpers._
import _root_.net.liftweb.http.auth._
import se.scalablesolutions.akka.kernel.state.{TransactionalState, CassandraStorageConfig}
import se.scalablesolutions.akka.kernel.actor.{SupervisorFactory, Actor}
import se.scalablesolutions.akka.kernel.config.ScalaConfig._
import se.scalablesolutions.akka.kernel.util.Logging
import sample.lift.SimpleService
/**
* A class that's instantiated early and run. It allows the application
* to modify lift's environment
*/
class Boot {
def boot {
// where to search snippet
LiftRules.addToPackages("sample.lift")
LiftRules.httpAuthProtectedResource.prepend {
case (ParsePath("liftcount" :: Nil, _, _, _)) => Full(AuthRole("admin"))
}
LiftRules.authentication = HttpBasicAuthentication("lift") {
case ("someuser", "1234", req) => {
Log.info("You are now authenticated !")
userRoles(AuthRole("admin"))
true
}
}
LiftRules.passNotFoundToChain = true
object factory extends SupervisorFactory {
override def getSupervisorConfig: SupervisorConfig = {
SupervisorConfig(
RestartStrategy(OneForOne, 3, 100),
Supervise(
new SimpleService,
LifeCycle(Permanent, 100)
)
:: Nil)
}
}
val supervisor = factory.newSupervisor
supervisor.startSupervisor
// Build SiteMap
// val entries = Menu(Loc("Home", List("index"), "Home")) :: Nil
// LiftRules.setSiteMap(SiteMap(entries:_*))
}
}

View file

View file

View file

@ -0,0 +1,6 @@
package sample.lift.snippet
class HelloWorld {
def howdy = <span>Welcome to lift-akka at {new _root_.java.util.Date}</span>
}

View file

View file

@ -0,0 +1,22 @@
<?xml version="1.0" encoding="ISO-8859-1"?>
<!DOCTYPE web-app PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN" "http://java.sun.com/dtd/web-app_2_3.dtd">
<web-app>
<filter>
<filter-name>LiftFilter</filter-name>
<display-name>Lift Filter</display-name>
<description>The Filter that intercepts lift calls</description>
<filter-class>net.liftweb.http.LiftFilter</filter-class>
</filter>
<filter-mapping>
<filter-name>LiftFilter</filter-name>
<url-pattern>/*</url-pattern>
</filter-mapping>
<servlet>
<servlet-name>AkkaServlet</servlet-name>
<servlet-class>se.scalablesolutions.akka.kernel.jersey.AkkaServlet</servlet-class>
</servlet>
<servlet-mapping>
<servlet-name>AkkaServlet</servlet-name>
<url-pattern>/*</url-pattern>
</servlet-mapping>
</web-app>

View file

@ -0,0 +1,15 @@
<lift:surround with="default" at="content">
<h2>Welcome to the Akka + Lift Sample</h2>
<p>This page is served by Lift, and Lift alone. In order to demonstrate how AkkaServlet and <br />
Lift can work in harmony we have supplied a sample JAX-RS service that is secured using <br />
Lift's HTTP Basic Authentication.</p>
<p>To access the Akka service, visit <a href="/liftcount">this url</a> and enter the
following access credentials:</p>
<p>user: <strong>someuser</strong><br />
password: <strong>1234</strong></p>
<p><lift:HelloWorld.howdy /></p>
</lift:surround>

View file

@ -0,0 +1,17 @@
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:lift="http://liftweb.net/">
<head>
<meta http-equiv="content-type" content="text/html; charset=UTF-8" />
<meta name="description" content="" />
<meta name="keywords" content="" />
<lift:CSS.blueprint />
<title>Akka with Lift Example</title>
<script id="jquery" src="/classpath/jquery.js" type="text/javascript"></script>
</head>
<body>
<div class="container">
<lift:bind name="content" />
<lift:Menu.builder />
<lift:msgs/>
</div>
</body>
</html>

View file

@ -0,0 +1,15 @@
import _root_.bootstrap.liftweb.Boot
import _root_.scala.tools.nsc.MainGenericRunner
object LiftConsole {
def main(args : Array[String]) {
// Instantiate your project's Boot file
val b = new Boot()
// Boot your project
b.boot
// Now run the MainGenericRunner to get your repl
MainGenericRunner.main(args)
// After the repl exits, then exit the scala script
exit(0)
}
}

Some files were not shown because too many files have changed in this diff Show more