merge with origin cassandra branch

This commit is contained in:
jboner 2009-08-05 15:12:34 +02:00
commit e11fa7bf33
12 changed files with 457 additions and 83 deletions

View file

@ -679,7 +679,6 @@
<component name="VcsDirectoryMappings"> <component name="VcsDirectoryMappings">
<mapping directory="" vcs="" /> <mapping directory="" vcs="" />
<mapping directory="$PROJECT_DIR$" vcs="Git" /> <mapping directory="$PROJECT_DIR$" vcs="Git" />
<mapping directory="$PROJECT_DIR$/samples-lift" vcs="Git" />
</component> </component>
<component name="WebServicesPlugin" addRequiredLibraries="true" /> <component name="WebServicesPlugin" addRequiredLibraries="true" />
<component name="XPathView.XPathProjectComponent"> <component name="XPathView.XPathProjectComponent">
@ -1799,6 +1798,17 @@
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/jetty-util/7.0.0.pre5/jetty-util-7.0.0.pre5-sources.jar!/" /> <root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/jetty-util/7.0.0.pre5/jetty-util-7.0.0.pre5-sources.jar!/" />
</SOURCES> </SOURCES>
</library> </library>
<library name="Maven: commons-pool:commons-pool:1.5.1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/commons-pool/commons-pool/1.5.1/commons-pool-1.5.1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/commons-pool/commons-pool/1.5.1/commons-pool-1.5.1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/commons-pool/commons-pool/1.5.1/commons-pool-1.5.1-sources.jar!/" />
</SOURCES>
</library>
</component> </component>
<UsedPathMacros> <UsedPathMacros>
<macro name="MAVEN_REPOSITORY" description="Maven Local Repostiry" /> <macro name="MAVEN_REPOSITORY" description="Maven Local Repostiry" />

View file

@ -6,16 +6,17 @@
</component> </component>
<component name="ChangeListManager"> <component name="ChangeListManager">
<list default="true" id="212ccd86-01aa-4780-a2f0-0d130be5abd2" name="Test" comment="Test"> <list default="true" id="212ccd86-01aa-4780-a2f0-0d130be5abd2" name="Test" comment="Test">
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/kernel/akka-kernel.iml" afterPath="$PROJECT_DIR$/kernel/akka-kernel.iml" /> <change type="MODIFICATION" beforePath="$PROJECT_DIR$/kernel/src/main/scala/state/State.scala" afterPath="$PROJECT_DIR$/kernel/src/main/scala/state/State.scala" />
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/config/akka-reference.conf" afterPath="$PROJECT_DIR$/config/akka-reference.conf" /> <change type="MODIFICATION" beforePath="$PROJECT_DIR$/config/akka-reference.conf" afterPath="$PROJECT_DIR$/config/akka-reference.conf" />
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/fun-test-java/akka-fun-test-java.iml" afterPath="$PROJECT_DIR$/fun-test-java/akka-fun-test-java.iml" /> <change type="MODIFICATION" beforePath="$PROJECT_DIR$/kernel/pom.xml" afterPath="$PROJECT_DIR$/kernel/pom.xml" />
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/pom.xml" afterPath="$PROJECT_DIR$/pom.xml" /> <change type="MODIFICATION" beforePath="$PROJECT_DIR$/kernel/src/main/scala/state/CassandraStorage.scala" afterPath="$PROJECT_DIR$/kernel/src/main/scala/state/CassandraStorage.scala" />
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/kernel/src/main/scala/actor/ActiveObject.scala" afterPath="$PROJECT_DIR$/kernel/src/main/scala/actor/ActiveObject.scala" />
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/kernel/src/main/scala/Kernel.scala" afterPath="$PROJECT_DIR$/kernel/src/main/scala/Kernel.scala" /> <change type="MODIFICATION" beforePath="$PROJECT_DIR$/kernel/src/main/scala/Kernel.scala" afterPath="$PROJECT_DIR$/kernel/src/main/scala/Kernel.scala" />
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/samples-scala/akka-samples-scala.iml" afterPath="$PROJECT_DIR$/samples-scala/akka-samples-scala.iml" /> <change type="MODIFICATION" beforePath="$PROJECT_DIR$/lib/cassandra-0.4.0-dev.jar" afterPath="$PROJECT_DIR$/lib/cassandra-0.4.0-dev.jar" />
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/samples-java/akka-samples-java.iml" afterPath="$PROJECT_DIR$/samples-java/akka-samples-java.iml" /> <change type="MODIFICATION" beforePath="$PROJECT_DIR$/embedded-repo/org/apache/cassandra/cassandra/0.4.0-dev/cassandra-0.4.0-dev.jar" afterPath="$PROJECT_DIR$/embedded-repo/org/apache/cassandra/cassandra/0.4.0-dev/cassandra-0.4.0-dev.jar" />
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/akka.ipr" afterPath="$PROJECT_DIR$/akka.ipr" /> <change type="MODIFICATION" beforePath="$PROJECT_DIR$/akka.ipr" afterPath="$PROJECT_DIR$/akka.ipr" />
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/samples-scala/src/main/scala/SimpleService.scala" afterPath="$PROJECT_DIR$/samples-scala/src/main/scala/SimpleService.scala" />
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/akka.iws" afterPath="$PROJECT_DIR$/akka.iws" /> <change type="MODIFICATION" beforePath="$PROJECT_DIR$/akka.iws" afterPath="$PROJECT_DIR$/akka.iws" />
<change type="NEW" beforePath="" afterPath="$PROJECT_DIR$/kernel/src/main/scala/state/Pool.scala" />
</list> </list>
<list readonly="true" id="6e842704-fac6-40e9-8a67-d02385f87db9" name="Default" comment="&#10;# Brief commit desciption here&#10;&#10;# Full commit description here (comment lines starting with '#' will not be included)&#10;&#10;" /> <list readonly="true" id="6e842704-fac6-40e9-8a67-d02385f87db9" name="Default" comment="&#10;# Brief commit desciption here&#10;&#10;# Full commit description here (comment lines starting with '#' will not be included)&#10;&#10;" />
<ignored path=".idea/workspace.xml" /> <ignored path=".idea/workspace.xml" />
@ -75,7 +76,7 @@
<option name="CONDITION" value="" /> <option name="CONDITION" value="" />
<option name="LOG_MESSAGE" value="" /> <option name="LOG_MESSAGE" value="" />
</breakpoint> </breakpoint>
<breakpoint url="file://$PROJECT_DIR$/kernel/src/main/scala/Kernel.scala" line="121" class="Class at Kernel.scala:117" package=""> <breakpoint url="file://$PROJECT_DIR$/kernel/src/main/scala/Kernel.scala" line="126" class="Class at Kernel.scala:117" package="">
<option name="ENABLED" value="true" /> <option name="ENABLED" value="true" />
<option name="LOG_ENABLED" value="false" /> <option name="LOG_ENABLED" value="false" />
<option name="LOG_EXPRESSION_ENABLED" value="false" /> <option name="LOG_EXPRESSION_ENABLED" value="false" />
@ -154,7 +155,7 @@
<file leaf-file-name="SimpleService.scala" pinned="false" current="false" current-in-tab="false"> <file leaf-file-name="SimpleService.scala" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/samples-scala/src/main/scala/SimpleService.scala"> <entry file="file://$PROJECT_DIR$/samples-scala/src/main/scala/SimpleService.scala">
<provider selected="true" editor-type-id="text-editor"> <provider selected="true" editor-type-id="text-editor">
<state line="0" column="0" selection-start="0" selection-end="0" vertical-scroll-proportion="0.0"> <state line="1" column="0" selection-start="4" selection-end="4" vertical-scroll-proportion="0.0">
<folding /> <folding />
</state> </state>
</provider> </provider>
@ -163,7 +164,7 @@
<file leaf-file-name="akka-reference.conf" pinned="false" current="false" current-in-tab="false"> <file leaf-file-name="akka-reference.conf" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/config/akka-reference.conf"> <entry file="file://$PROJECT_DIR$/config/akka-reference.conf">
<provider selected="true" editor-type-id="text-editor"> <provider selected="true" editor-type-id="text-editor">
<state line="19" column="3" selection-start="513" selection-end="513" vertical-scroll-proportion="0.0"> <state line="19" column="3" selection-start="474" selection-end="474" vertical-scroll-proportion="0.0">
<folding /> <folding />
</state> </state>
</provider> </provider>
@ -181,7 +182,7 @@
<file leaf-file-name="Kernel.scala" pinned="false" current="true" current-in-tab="true"> <file leaf-file-name="Kernel.scala" pinned="false" current="true" current-in-tab="true">
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/Kernel.scala"> <entry file="file://$PROJECT_DIR$/kernel/src/main/scala/Kernel.scala">
<provider selected="true" editor-type-id="text-editor"> <provider selected="true" editor-type-id="text-editor">
<state line="127" column="13" selection-start="4984" selection-end="4984" vertical-scroll-proportion="2.9195402"> <state line="133" column="0" selection-start="5539" selection-end="5539" vertical-scroll-proportion="0.3678161">
<folding /> <folding />
</state> </state>
</provider> </provider>
@ -199,8 +200,10 @@
<file leaf-file-name="CassandraStorage.scala" pinned="false" current="false" current-in-tab="false"> <file leaf-file-name="CassandraStorage.scala" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/state/CassandraStorage.scala"> <entry file="file://$PROJECT_DIR$/kernel/src/main/scala/state/CassandraStorage.scala">
<provider selected="true" editor-type-id="text-editor"> <provider selected="true" editor-type-id="text-editor">
<state line="27" column="7" selection-start="768" selection-end="768" vertical-scroll-proportion="0.0"> <state line="28" column="7" selection-start="779" selection-end="779" vertical-scroll-proportion="0.0">
<folding /> <folding>
<marker date="1249477860000" expanded="true" signature="106:114" placeholder="..." />
</folding>
</state> </state>
</provider> </provider>
</entry> </entry>
@ -895,40 +898,6 @@
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" /> <option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT> </PATH_ELEMENT>
</PATH> </PATH>
<PATH>
<PATH_ELEMENT>
<option name="myItemId" value="akka" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewProjectNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="akka-fun-test-java" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ProjectViewModuleNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="fun-test-java" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="src" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="test" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="java" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="api" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode" />
</PATH_ELEMENT>
<PATH_ELEMENT>
<option name="myItemId" value="ProtobufSerializationTest" />
<option name="myItemType" value="com.intellij.ide.projectView.impl.nodes.ClassTreeNode" />
</PATH_ELEMENT>
</PATH>
<PATH> <PATH>
<PATH_ELEMENT> <PATH_ELEMENT>
<option name="myItemId" value="akka" /> <option name="myItemId" value="akka" />
@ -2472,21 +2441,23 @@
</entry> </entry>
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/state/CassandraStorage.scala"> <entry file="file://$PROJECT_DIR$/kernel/src/main/scala/state/CassandraStorage.scala">
<provider selected="true" editor-type-id="text-editor"> <provider selected="true" editor-type-id="text-editor">
<state line="27" column="7" selection-start="768" selection-end="768" vertical-scroll-proportion="0.0"> <state line="28" column="7" selection-start="779" selection-end="779" vertical-scroll-proportion="0.0">
<folding /> <folding>
<marker date="1249477860000" expanded="true" signature="106:114" placeholder="..." />
</folding>
</state> </state>
</provider> </provider>
</entry> </entry>
<entry file="file://$PROJECT_DIR$/samples-scala/src/main/scala/SimpleService.scala"> <entry file="file://$PROJECT_DIR$/samples-scala/src/main/scala/SimpleService.scala">
<provider selected="true" editor-type-id="text-editor"> <provider selected="true" editor-type-id="text-editor">
<state line="0" column="0" selection-start="0" selection-end="0" vertical-scroll-proportion="0.0"> <state line="1" column="0" selection-start="4" selection-end="4" vertical-scroll-proportion="0.0">
<folding /> <folding />
</state> </state>
</provider> </provider>
</entry> </entry>
<entry file="file://$PROJECT_DIR$/config/akka-reference.conf"> <entry file="file://$PROJECT_DIR$/config/akka-reference.conf">
<provider selected="true" editor-type-id="text-editor"> <provider selected="true" editor-type-id="text-editor">
<state line="19" column="3" selection-start="513" selection-end="513" vertical-scroll-proportion="0.0"> <state line="19" column="3" selection-start="474" selection-end="474" vertical-scroll-proportion="0.0">
<folding /> <folding />
</state> </state>
</provider> </provider>
@ -2514,7 +2485,7 @@
</entry> </entry>
<entry file="file://$PROJECT_DIR$/kernel/src/main/scala/Kernel.scala"> <entry file="file://$PROJECT_DIR$/kernel/src/main/scala/Kernel.scala">
<provider selected="true" editor-type-id="text-editor"> <provider selected="true" editor-type-id="text-editor">
<state line="127" column="13" selection-start="4984" selection-end="4984" vertical-scroll-proportion="2.9195402"> <state line="133" column="0" selection-start="5539" selection-end="5539" vertical-scroll-proportion="0.3678161">
<folding /> <folding />
</state> </state>
</provider> </provider>

View file

@ -1,6 +1,6 @@
################################# ####################
# Akka Actor Kernel Config File # # Akka Config File #
################################# ####################
# This file has all the default settings, so all these could be remove with no visible effect. # This file has all the default settings, so all these could be remove with no visible effect.
# Modify as needed. # Modify as needed.
@ -22,9 +22,7 @@
# supervisor bootstrap, should be defined in default constructor # supervisor bootstrap, should be defined in default constructor
<actor> <actor>
timeout = 5000 # default timeout for future based invocations timeout = 5000 # default timeout for future based invocations
concurrent-mode = off # if turned on, then the same actor instance is allowed to execute concurrently - serialize-messages = off # does a deep clone of (non-primitive) messages to ensure immutability
# e.g. departing from the actor model for better performance
serialize-messages = on # does a deep clone of (non-primitive) messages to ensure immutability
</actor> </actor>
<stm> <stm>
@ -50,17 +48,14 @@
</rest> </rest>
<storage> <storage>
system = "cassandra" # Options: cassandra (coming: terracotta, redis, tokyo-cabinet, tokyo-tyrant, voldemort, memcached, hazelcast) system = "cassandra" # Options: cassandra (coming: terracotta, mongodb, redis, tokyo-cabinet, voldemort, memcached)
<cassandra> <cassandra>
service = on service = on
storage-format = "java" # Options: java, scala-json, java-json hostname = "localhost" # ip address or hostname of one of the Cassandra cluster's seeds
port = 9160
storage-format = "binary" # Options: binary, json, simple-json
blocking = false # inserts and queries should be blocking or not blocking = false # inserts and queries should be blocking or not
<thrift-server>
service = on
pidfile = "akka.pid"
</thrift-server>
</cassandra> </cassandra>
</rest> </rest>
</akka> </akka>

View file

@ -125,6 +125,11 @@
<artifactId>cassidy</artifactId> <artifactId>cassidy</artifactId>
<version>0.1</version> <version>0.1</version>
</dependency> </dependency>
<dependency>
<groupId>commons-pool</groupId>
<artifactId>commons-pool</artifactId>
<version>1.5.1</version>
</dependency>
<!-- For Jersey --> <!-- For Jersey -->
<dependency> <dependency>

View file

@ -16,7 +16,7 @@ import net.lag.configgy.{Config, Configgy, RuntimeEnvironment}
import kernel.jersey.AkkaCometServlet import kernel.jersey.AkkaCometServlet
import kernel.nio.RemoteServer import kernel.nio.RemoteServer
import kernel.state.CassandraStorage import kernel.state.EmbeddedCassandraStorage
import kernel.util.Logging import kernel.util.Logging
/** /**
@ -178,7 +178,7 @@ object Kernel extends Logging {
println("=================================================") println("=================================================")
var start = System.currentTimeMillis var start = System.currentTimeMillis
for (i <- 1 to NR_ENTRIES) CassandraStorage.insertMapStorageEntryFor("test", i.toString, "data") for (i <- 1 to NR_ENTRIES) EmbeddedCassandraStorage.insertMapStorageEntryFor("test", i.toString, "data")
var end = System.currentTimeMillis var end = System.currentTimeMillis
println("Writes per second: " + NR_ENTRIES / ((end - start).toDouble / 1000)) println("Writes per second: " + NR_ENTRIES / ((end - start).toDouble / 1000))
@ -186,13 +186,13 @@ object Kernel extends Logging {
start = System.currentTimeMillis start = System.currentTimeMillis
val entries = new scala.collection.mutable.ArrayBuffer[Tuple2[String, String]] val entries = new scala.collection.mutable.ArrayBuffer[Tuple2[String, String]]
for (i <- 1 to NR_ENTRIES) entries += (i.toString, "data") for (i <- 1 to NR_ENTRIES) entries += (i.toString, "data")
CassandraStorage.insertMapStorageEntriesFor("test", entries.toList) EmbeddedCassandraStorage.insertMapStorageEntriesFor("test", entries.toList)
end = System.currentTimeMillis end = System.currentTimeMillis
println("Writes per second - batch: " + NR_ENTRIES / ((end - start).toDouble / 1000)) println("Writes per second - batch: " + NR_ENTRIES / ((end - start).toDouble / 1000))
println("=================================================") println("=================================================")
start = System.currentTimeMillis start = System.currentTimeMillis
for (i <- 1 to NR_ENTRIES) CassandraStorage.getMapStorageEntryFor("test", i.toString) for (i <- 1 to NR_ENTRIES) EmbeddedCassandraStorage.getMapStorageEntryFor("test", i.toString)
end = System.currentTimeMillis end = System.currentTimeMillis
println("Reads per second: " + NR_ENTRIES / ((end - start).toDouble / 1000)) println("Reads per second: " + NR_ENTRIES / ((end - start).toDouble / 1000))

View file

@ -4,7 +4,7 @@
package se.scalablesolutions.akka.kernel.state package se.scalablesolutions.akka.kernel.state
import java.io.File import java.io.{File, Flushable, Closeable}
import kernel.util.Logging import kernel.util.Logging
import serialization.{Serializer, Serializable, SerializationProtocol} import serialization.{Serializer, Serializable, SerializationProtocol}
@ -12,11 +12,12 @@ import serialization.{Serializer, Serializable, SerializationProtocol}
import org.apache.cassandra.config.DatabaseDescriptor import org.apache.cassandra.config.DatabaseDescriptor
import org.apache.cassandra.service._ import org.apache.cassandra.service._
import org.apache.thrift.server.TThreadPoolServer //import org.apache.thrift.server.TThreadPoolServer
import org.apache.thrift.protocol.TBinaryProtocol
import org.apache.thrift.transport.TServerSocket
import org.apache.thrift.transport.TTransportFactory
import org.apache.thrift.TProcessorFactory import org.apache.thrift.TProcessorFactory
import org.apache.thrift.transport._
import org.apache.thrift._
import org.apache.thrift.transport._
import org.apache.thrift.protocol._
/** /**
* NOTE: requires command line options: * NOTE: requires command line options:
@ -33,6 +34,300 @@ object CassandraStorage extends Logging {
val IS_ASCENDING = true val IS_ASCENDING = true
import kernel.Kernel.config
val CASSANDRA_SERVER_HOSTNAME = config.getString("akka.storage.cassandra.hostname", "localhost")
val CASSANDRA_SERVER_PORT = config.getInt("akka.storage.cassandra.port", 9160)
val BLOCKING_CALL = if (config.getBool("akka.storage.cassandra.blocking", true)) 0
else 1
@volatile private[this] var isRunning = false
private[this] val protocol: Protocol = {
config.getString("akka.storage.cassandra.storage-format", "binary") match {
case "binary" => Protocol.Binary
case "json" => Protocol.JSON
case "simple-json" => Protocol.SimpleJSON
case unknown => throw new UnsupportedOperationException("Unknown storage serialization protocol [" + unknown + "]")
}
}
private[this] var sessions: Option[CassandraSessionPool[_]] = None
def start = synchronized {
if (!isRunning) {
try {
sessions = Some(new CassandraSessionPool(StackPool(SocketProvider(CASSANDRA_SERVER_HOSTNAME, CASSANDRA_SERVER_PORT)), protocol))
log.info("Cassandra persistent storage has started up successfully");
} catch {
case e =>
log.error("Could not start up Cassandra persistent storage")
throw e
}
isRunning
}
}
def stop = synchronized {
if (isRunning && sessions.isDefined) sessions.get.close
}
//implicit def strToBytes(s: String) = s.getBytes("UTF-8")
/*
def insertRefStorageFor(name: String, element: AnyRef) = sessions.withSession { session => {
val user_id = "1"
session ++| ("users", user_id, "base_attributes:name", "Lord Foo Bar", false)
session ++| ("users", user_id, "base_attributes:age", "24", false)
for( i <- session / ("users", user_id, "base_attributes", None, None).toList) println(i)
}}
*/
// ===============================================================
// For Ref
// ===============================================================
def insertRefStorageFor(name: String, element: String) = if (sessions.isDefined) {
sessions.get.withSession {
_ ++| (
TABLE_NAME,
name,
REF_COLUMN_FAMILY,
element,
System.currentTimeMillis,
BLOCKING_CALL)
}
} else throw new IllegalStateException("CassandraStorage is not started")
def getRefStorageFor(name: String): Option[String] = if (sessions.isDefined) {
try {
val column = sessions.get.withSession { _ | (TABLE_NAME, name, REF_COLUMN_FAMILY) }
Some(column.value)
} catch {
case e =>
e.printStackTrace
None
}
} else throw new IllegalStateException("CassandraStorage is not started")
// ===============================================================
// For Vector
// ===============================================================
def insertVectorStorageEntryFor(name: String, element: String) = if (sessions.isDefined) {
sessions.get.withSession {
_ ++| (
TABLE_NAME,
name,
VECTOR_COLUMN_FAMILY + ":" + getVectorStorageSizeFor(name),
element,
System.currentTimeMillis,
BLOCKING_CALL)
}
} else throw new IllegalStateException("CassandraStorage is not started")
def getVectorStorageEntryFor(name: String, index: Int): String = if (sessions.isDefined) {
try {
val column = sessions.get.withSession { _ | (TABLE_NAME, name, VECTOR_COLUMN_FAMILY + ":" + index) }
column.value
} catch {
case e =>
e.printStackTrace
throw new NoSuchElementException(e.getMessage)
}
} else throw new IllegalStateException("CassandraStorage is not started")
def getVectorStorageRangeFor(name: String, start: Int, count: Int): List[String] = if (sessions.isDefined) {
sessions.get.withSession { _ / (TABLE_NAME, name, VECTOR_COLUMN_FAMILY, IS_ASCENDING, count) }.map(_.value)
} else throw new IllegalStateException("CassandraStorage is not started")
def getVectorStorageSizeFor(name: String): Int = if (sessions.isDefined) {
sessions.get.withSession { _ |# (TABLE_NAME, name, VECTOR_COLUMN_FAMILY) }
} else throw new IllegalStateException("CassandraStorage is not started")
// ===============================================================
// For Map
// ===============================================================
def insertMapStorageEntryFor(name: String, key: String, value: String) = if (sessions.isDefined) {
sessions.get.withSession {
_ ++| (
TABLE_NAME,
name,
MAP_COLUMN_FAMILY + ":" + key,
value,
System.currentTimeMillis,
BLOCKING_CALL)
}
} else throw new IllegalStateException("CassandraStorage is not started")
def insertMapStorageEntriesFor(name: String, entries: List[Tuple2[String, String]]) = if (sessions.isDefined) {
import java.util.{Map, HashMap, List, ArrayList}
val columns: Map[String, List[column_t]] = new HashMap
for (entry <- entries) {
val cls: List[column_t] = new ArrayList
cls.add(new column_t(entry._1, entry._2, System.currentTimeMillis))
columns.put(MAP_COLUMN_FAMILY, cls)
}
sessions.get.withSession {
_ ++| (
new batch_mutation_t(
TABLE_NAME,
name,
columns),
BLOCKING_CALL)
}
} else throw new IllegalStateException("CassandraStorage is not started")
def getMapStorageEntryFor(name: String, key: String): Option[String] = if (sessions.isDefined) {
try {
val column = sessions.get.withSession { _ | (TABLE_NAME, name, MAP_COLUMN_FAMILY + ":" + key) }
Some(column.value)
} catch {
case e =>
e.printStackTrace
None
}
} else throw new IllegalStateException("CassandraStorage is not started")
/*
def getMapStorageFor(name: String): List[Tuple2[String, String]] = if (sessions.isDefined) {
val columns = server.get_columns_since(TABLE_NAME, name, MAP_COLUMN_FAMILY, -1)
.toArray.toList.asInstanceOf[List[org.apache.cassandra.service.column_t]]
for {
column <- columns
col = (column.columnName, column.value)
} yield col
} else throw new IllegalStateException("CassandraStorage is not started")
*/
def getMapStorageSizeFor(name: String): Int = if (sessions.isDefined) {
sessions.get.withSession { _ |# (TABLE_NAME, name, MAP_COLUMN_FAMILY) }
} else throw new IllegalStateException("CassandraStorage is not started")
def removeMapStorageFor(name: String) = if (sessions.isDefined) {
sessions.get.withSession { _ -- (TABLE_NAME, name, MAP_COLUMN_FAMILY, System.currentTimeMillis, BLOCKING_CALL) }
} else throw new IllegalStateException("CassandraStorage is not started")
def getMapStorageRangeFor(name: String, start: Int, count: Int): List[Tuple2[String, String]] = if (sessions.isDefined) {
sessions.get.withSession { _ / (TABLE_NAME, name, MAP_COLUMN_FAMILY, IS_ASCENDING, count) }.toArray.toList.asInstanceOf[List[Tuple2[String, String]]]
} else throw new IllegalStateException("CassandraStorage is not started")
}
trait CassandraSession extends Closeable with Flushable {
import scala.collection.jcl.Conversions._
import org.scala_tools.javautils.Imports._
private implicit def null2Option[T](t: T): Option[T] = if(t != null) Some(t) else None
protected val client: Cassandra.Client
val obtainedAt: Long
def /(tableName: String, key: String, columnParent: String, start: Option[Int],end: Option[Int]): List[column_t] =
client.get_slice(tableName, key, columnParent, start.getOrElse(-1),end.getOrElse(-1)).toList
def /(tableName: String, key: String, columnParent: String, colNames: List[String]): List[column_t] =
client.get_slice_by_names(tableName, key, columnParent, colNames.asJava ).toList
def |(tableName: String, key: String, colPath: String): Option[column_t] =
client.get_column(tableName, key, colPath)
def |#(tableName: String, key: String, columnParent: String): Int =
client.get_column_count(tableName, key, columnParent)
def ++|(tableName: String, key: String, columnPath: String, cellData: Array[Byte], timestamp: Long, block: Int) =
client.insert(tableName, key, columnPath, cellData,timestamp,block)
def ++|(tableName: String, key: String, columnPath: String, cellData: Array[Byte], block: Int) =
client.insert(tableName,key,columnPath,cellData,obtainedAt,block)
def ++|(batch: batch_mutation_t, block: Int) =
client.batch_insert(batch, block)
def --(tableName: String, key: String, columnPathOrParent: String, timestamp: Long, block: Int) =
client.remove(tableName, key, columnPathOrParent, timestamp, block)
def --(tableName: String, key: String, columnPathOrParent: String, block: Int) =
client.remove(tableName, key, columnPathOrParent, obtainedAt, block)
def /@(tableName: String, key: String, columnParent: String, timestamp: Long): List[column_t] =
client.get_columns_since(tableName, key, columnParent, timestamp).toList
def /^(tableName: String, key: String, columnFamily: String, start: Option[Int], end: Option[Int], count: Int ): List[superColumn_t] =
client.get_slice_super(tableName, key,columnFamily, start.getOrElse(-1), end.getOrElse(-1)).toList //TODO upgrade thrift interface to support count
def /^(tableName: String, key: String, columnFamily: String, superColNames: List[String]): List[superColumn_t] =
client.get_slice_super_by_names(tableName, key, columnFamily, superColNames.asJava).toList
def |^(tableName: String, key: String, superColumnPath: String): Option[superColumn_t] =
client.get_superColumn(tableName,key,superColumnPath)
def ++|^ (batch: batch_mutation_super_t, block: Int) =
client.batch_insert_superColumn(batch, block)
def keys(tableName: String, startsWith: String, stopsAt: String, maxResults: Option[Int]): List[String] =
client.get_key_range(tableName, startsWith, stopsAt, maxResults.getOrElse(-1)).toList
def property(name: String): String = client.getStringProperty(name)
def properties(name: String): List[String] = client.getStringListProperty(name).toList
def describeTable(tableName: String) = client.describeTable(tableName)
def ?(query: String) = client.executeQuery(query)
}
class CassandraSessionPool[T <: TTransport](transportPool: Pool[T], inputProtocol: Protocol, outputProtocol: Protocol) extends Closeable {
def this(transportPool: Pool[T], ioProtocol: Protocol) = this(transportPool,ioProtocol,ioProtocol)
def newSession: CassandraSession = {
val t = transportPool.borrowObject
val c = new Cassandra.Client(inputProtocol(t),outputProtocol(t))
new CassandraSession {
val client = c
val obtainedAt = System.currentTimeMillis
def flush = t.flush
def close = transportPool.returnObject(t)
}
}
def withSession[R](body: CassandraSession => R) = {
val session = newSession
try {
val result = body(session)
session.flush
result
} finally {
session.close
}
}
def close = transportPool.close
}
sealed abstract class Protocol(val factory: TProtocolFactory) {
def apply(transport: TTransport) = factory.getProtocol(transport)
}
object Protocol {
object Binary extends Protocol(new TBinaryProtocol.Factory)
object SimpleJSON extends Protocol(new TSimpleJSONProtocol.Factory)
object JSON extends Protocol(new TJSONProtocol.Factory)
}
/**
* NOTE: requires command line options:
* <br/>
* <code>-Dcassandra -Dstorage-config=config/ -Dpidfile=akka.pid</code>
* <p/>
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*
object EmbeddedCassandraStorage extends Logging {
val TABLE_NAME = "akka"
val MAP_COLUMN_FAMILY = "map"
val VECTOR_COLUMN_FAMILY = "vector"
val REF_COLUMN_FAMILY = "ref:item"
val IS_ASCENDING = true
val RUN_THRIFT_SERVICE = kernel.Kernel.config.getBool("akka.storage.cassandra.thrift-server.service", false) val RUN_THRIFT_SERVICE = kernel.Kernel.config.getBool("akka.storage.cassandra.thrift-server.service", false)
val BLOCKING_CALL = { val BLOCKING_CALL = {
if (kernel.Kernel.config.getBool("akka.storage.cassandra.blocking", true)) 0 if (kernel.Kernel.config.getBool("akka.storage.cassandra.blocking", true)) 0
@ -89,7 +384,7 @@ object CassandraStorage extends Logging {
TABLE_NAME, TABLE_NAME,
name, name,
REF_COLUMN_FAMILY, REF_COLUMN_FAMILY,
serializer.out(element), element,
System.currentTimeMillis, System.currentTimeMillis,
BLOCKING_CALL) BLOCKING_CALL)
} }
@ -114,7 +409,7 @@ object CassandraStorage extends Logging {
TABLE_NAME, TABLE_NAME,
name, name,
VECTOR_COLUMN_FAMILY + ":" + getVectorStorageSizeFor(name), VECTOR_COLUMN_FAMILY + ":" + getVectorStorageSizeFor(name),
serializer.out(element), element,
System.currentTimeMillis, System.currentTimeMillis,
BLOCKING_CALL) BLOCKING_CALL)
} }
@ -198,6 +493,7 @@ object CassandraStorage extends Logging {
} }
} }
class CassandraThriftServer(server: CassandraServer) extends Logging { class CassandraThriftServer(server: CassandraServer) extends Logging {
case object Start case object Start
case object Stop case object Stop
@ -241,3 +537,4 @@ class CassandraThriftServer(server: CassandraServer) extends Logging {
def start = serverDaemon ! Start def start = serverDaemon ! Start
def stop = serverDaemon ! Stop def stop = serverDaemon ! Stop
} }
*/

View file

@ -0,0 +1,94 @@
/**
* Copyright (C) 2009 Scalable Solutions.
*/
package se.scalablesolutions.akka.kernel.state
import org.apache.commons.pool._
import org.apache.commons.pool.impl._
trait Pool[T] extends java.io.Closeable {
def borrowObject: T
def returnObject(t: T): Unit
def invalidateObject(t: T): Unit
def addObject: Unit
def getNumIdle: Int
def getNumActive: Int
def clear: Unit
def setFactory(factory: PoolItemFactory[T]): Unit
}
trait PoolFactory[T] {
def createPool: Pool[T]
}
trait PoolItemFactory[T] {
def makeObject: T
def destroyObject(t: T): Unit
def validateObject(t: T): Boolean
def activateObject(t: T): Unit
def passivateObject(t: T): Unit
}
trait PoolBridge[T, OP <: ObjectPool] extends Pool[T] {
val impl: OP
override def borrowObject: T = impl.borrowObject.asInstanceOf[T]
override def returnObject(t: T) = impl.returnObject(t)
override def invalidateObject(t: T) = impl.invalidateObject(t)
override def addObject = impl.addObject
override def getNumIdle: Int = impl.getNumIdle
override def getNumActive: Int = impl.getNumActive
override def clear: Unit = impl.clear
override def close: Unit = impl.close
override def setFactory(factory: PoolItemFactory[T]) = impl.setFactory(toPoolableObjectFactory(factory))
def toPoolableObjectFactory[T](pif: PoolItemFactory[T]) = new PoolableObjectFactory {
def makeObject: Object = pif.makeObject.asInstanceOf[Object]
def destroyObject(o: Object): Unit = pif.destroyObject(o.asInstanceOf[T])
def validateObject(o: Object): Boolean = pif.validateObject(o.asInstanceOf[T])
def activateObject(o: Object): Unit = pif.activateObject(o.asInstanceOf[T])
def passivateObject(o: Object): Unit = pif.passivateObject(o.asInstanceOf[T])
}
}
object StackPool {
def apply[T](factory: PoolItemFactory[T]) = new PoolBridge[T,StackObjectPool] {
val impl = new StackObjectPool(toPoolableObjectFactory(factory))
}
def apply[T](factory: PoolItemFactory[T], maxIdle: Int) = new PoolBridge[T,StackObjectPool] {
val impl = new StackObjectPool(toPoolableObjectFactory(factory),maxIdle)
}
def apply[T](factory: PoolItemFactory[T], maxIdle: Int, initIdleCapacity: Int) = new PoolBridge[T,StackObjectPool] {
val impl = new StackObjectPool(toPoolableObjectFactory(factory),maxIdle,initIdleCapacity)
}
}
object SoftRefPool {
def apply[T](factory: PoolItemFactory[T]) = new PoolBridge[T,SoftReferenceObjectPool] {
val impl = new SoftReferenceObjectPool(toPoolableObjectFactory(factory))
}
def apply[T](factory: PoolItemFactory[T], initSize: Int) = new PoolBridge[T,SoftReferenceObjectPool] {
val impl = new SoftReferenceObjectPool(toPoolableObjectFactory(factory),initSize)
}
}
trait TransportFactory[T <: TTransport] extends PoolItemFactory[T] {
def createTransport: T
def makeObject: T = createTransport
def destroyObject(transport: T): Unit = transport.close
def validateObject(transport: T) = transport.isOpen
def activateObject(transport: T): Unit = if( !transport.isOpen ) transport.open else ()
def passivateObject(transport: T): Unit = transport.flush
}
case class SocketProvider(val host: String, val port: Int) extends TransportFactory[TSocket] {
def createTransport = {
val t = new TSocket(host,port)
t.open
t
}
}

Binary file not shown.

View file

@ -32,6 +32,11 @@
<orderEntry type="library" exported="" name="Maven: asm:asm:3.1" level="project" /> <orderEntry type="library" exported="" name="Maven: asm:asm:3.1" level="project" />
<orderEntry type="library" exported="" name="Maven: aopalliance:aopalliance:1.0" level="project" /> <orderEntry type="library" exported="" name="Maven: aopalliance:aopalliance:1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: org.apache.camel:camel-core:2.0-SNAPSHOT" level="project" /> <orderEntry type="library" exported="" name="Maven: org.apache.camel:camel-core:2.0-SNAPSHOT" level="project" />
<orderEntry type="library" exported="" name="Maven: commons-logging:commons-logging-api:1.1" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.xml.bind:jaxb-api:2.1" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.xml.stream:stax-api:1.0-2" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.activation:activation:1.1" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.xml.bind:jaxb-impl:2.1.6" level="project" />
<orderEntry type="library" exported="" name="Maven: org.jboss.netty:netty:3.1.0.CR1" level="project" /> <orderEntry type="library" exported="" name="Maven: org.jboss.netty:netty:3.1.0.CR1" level="project" />
<orderEntry type="library" exported="" name="Maven: org.apache:zookeeper:3.1.0" level="project" /> <orderEntry type="library" exported="" name="Maven: org.apache:zookeeper:3.1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: org.codehaus.jackson:jackson-core-asl:1.1.0" level="project" /> <orderEntry type="library" exported="" name="Maven: org.codehaus.jackson:jackson-core-asl:1.1.0" level="project" />
@ -45,6 +50,7 @@
<orderEntry type="library" exported="" name="Maven: high-scale-lib:high-scale-lib:1.0" level="project" /> <orderEntry type="library" exported="" name="Maven: high-scale-lib:high-scale-lib:1.0" level="project" />
<orderEntry type="library" exported="" name="Maven: commons-lang:commons-lang:2.4" level="project" /> <orderEntry type="library" exported="" name="Maven: commons-lang:commons-lang:2.4" level="project" />
<orderEntry type="library" exported="" name="Maven: se.foldleft:cassidy:0.1" level="project" /> <orderEntry type="library" exported="" name="Maven: se.foldleft:cassidy:0.1" level="project" />
<orderEntry type="library" exported="" name="Maven: commons-pool:commons-pool:1.5.1" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-comet-webserver:1.8.6.3" level="project" /> <orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-comet-webserver:1.8.6.3" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-servlet-webserver:1.8.6.3" level="project" /> <orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-servlet-webserver:1.8.6.3" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-http:1.8.6.3" level="project" /> <orderEntry type="library" exported="" name="Maven: com.sun.grizzly:grizzly-http:1.8.6.3" level="project" />
@ -61,10 +67,6 @@
<orderEntry type="library" exported="" name="Maven: com.sun.jersey:jersey-json:1.1.1-ea" level="project" /> <orderEntry type="library" exported="" name="Maven: com.sun.jersey:jersey-json:1.1.1-ea" level="project" />
<orderEntry type="library" exported="" name="Maven: org.codehaus.jettison:jettison:1.1" level="project" /> <orderEntry type="library" exported="" name="Maven: org.codehaus.jettison:jettison:1.1" level="project" />
<orderEntry type="library" exported="" name="Maven: stax:stax-api:1.0.1" level="project" /> <orderEntry type="library" exported="" name="Maven: stax:stax-api:1.0.1" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.xml.bind:jaxb-impl:2.1.12" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.xml.bind:jaxb-api:2.1" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.xml.stream:stax-api:1.0-2" level="project" />
<orderEntry type="library" exported="" name="Maven: javax.activation:activation:1.1" level="project" />
<orderEntry type="library" exported="" name="Maven: org.codehaus.jackson:jackson-asl:0.9.4" level="project" /> <orderEntry type="library" exported="" name="Maven: org.codehaus.jackson:jackson-asl:0.9.4" level="project" />
<orderEntry type="library" exported="" name="Maven: com.sun.jersey.contribs:jersey-scala:1.1.2-ea-SNAPSHOT" level="project" /> <orderEntry type="library" exported="" name="Maven: com.sun.jersey.contribs:jersey-scala:1.1.2-ea-SNAPSHOT" level="project" />
<orderEntry type="library" exported="" name="Maven: org.atmosphere:atmosphere-core:0.3" level="project" /> <orderEntry type="library" exported="" name="Maven: org.atmosphere:atmosphere-core:0.3" level="project" />