Added a simple test showing how to use Hbase testing utilities
This commit is contained in:
parent
848e0cbbfe
commit
0721937033
6 changed files with 195 additions and 0 deletions
|
|
@ -0,0 +1,50 @@
|
|||
/**
|
||||
* Copyright (C) 2009-2010 Scalable Solutions AB <http://scalablesolutions.se>
|
||||
*/
|
||||
|
||||
package se.scalablesolutions.akka.persistence.hbase
|
||||
|
||||
import se.scalablesolutions.akka.util.UUID
|
||||
import se.scalablesolutions.akka.stm._
|
||||
import se.scalablesolutions.akka.persistence.common._
|
||||
|
||||
object HbaseStorage /*extends Storage*/ {
|
||||
type ElementType = Array[Byte]
|
||||
|
||||
//def newMap: PersistentMap[ElementType, ElementType] = newMap(UUID.newUuid.toString)
|
||||
//def newVector: PersistentVector[ElementType] = newVector(UUID.newUuid.toString)
|
||||
//def newRef: PersistentRef[ElementType] = newRef(UUID.newUuid.toString)
|
||||
|
||||
//def getMap(id: String): PersistentMap[ElementType, ElementType] = newMap(id)
|
||||
//def getVector(id: String): PersistentVector[ElementType] = newVector(id)
|
||||
//def getRef(id: String): PersistentRef[ElementType] = newRef(id)
|
||||
|
||||
//def newMap(id: String): PersistentMap[ElementType, ElementType] = new HbasePersistentMap(id)
|
||||
//def newVector(id: String): PersistentVector[ElementType] = new HbasePersistentVector(id)
|
||||
//def newRef(id: String): PersistentRef[ElementType] = new HbasePersistentRef(id)
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements a persistent transactional map based on Hbase.
|
||||
*
|
||||
* @author <a href="http://www.davidgreco.it">David Greco</a>
|
||||
*/
|
||||
class HbasePersistentMap(id: String) /*extends PersistentMapBinary*/ {
|
||||
val uuid = id
|
||||
//val storage = HbaseStorageBackend
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements a persistent transactional vector based on Hbase.
|
||||
*
|
||||
* @author <a href="http://www.davidgreco.it">David Greco</a>
|
||||
*/
|
||||
class HbasePersistentVector(id: String) /*extends PersistentVector[Array[Byte]]*/ {
|
||||
val uuid = id
|
||||
//val storage = HbaseStorageBackend
|
||||
}
|
||||
|
||||
class HbasePersistentRef(id: String) /*extends PersistentRef[Array[Byte]]*/ {
|
||||
val uuid = id
|
||||
//val storage = HbaseStorageBackend
|
||||
}
|
||||
|
|
@ -0,0 +1,95 @@
|
|||
/**
|
||||
* Copyright (C) 2009-2010 Scalable Solutions AB <http://scalablesolutions.se>
|
||||
*/
|
||||
|
||||
package se.scalablesolutions.akka.persistence.hbase
|
||||
|
||||
import se.scalablesolutions.akka.stm._
|
||||
import se.scalablesolutions.akka.persistence.common._
|
||||
import se.scalablesolutions.akka.util.Logging
|
||||
import se.scalablesolutions.akka.util.Helpers._
|
||||
import se.scalablesolutions.akka.config.Config.config
|
||||
|
||||
/**
|
||||
* @author <a href="http://www.davidgreco.it">David Greco</a>
|
||||
*/
|
||||
private[akka] object HbaseStorageBackend /* extends
|
||||
MapStorageBackend[Array[Byte], Array[Byte]] with
|
||||
VectorStorageBackend[Array[Byte]] with
|
||||
RefStorageBackend[Array[Byte]] with
|
||||
Logging */ {
|
||||
|
||||
type ElementType = Array[Byte]
|
||||
|
||||
val KEYSPACE = "akka"
|
||||
val REF_KEY = "item".getBytes("UTF-8")
|
||||
val EMPTY_BYTE_ARRAY = new Array[Byte](0)
|
||||
|
||||
val HBASE_ZOOKEEPER_QUORUM = config.getString("akka.storage.hbase.zookeeper.quorum", "127.0.0.1")
|
||||
|
||||
// ===============================================================
|
||||
// For Ref
|
||||
// ===============================================================
|
||||
|
||||
def insertRefStorageFor(name: String, element: Array[Byte]) = {
|
||||
}
|
||||
|
||||
//def getRefStorageFor(name: String): Option[Array[Byte]] = {
|
||||
//}
|
||||
|
||||
// ===============================================================
|
||||
// For Vector
|
||||
// ===============================================================
|
||||
|
||||
def insertVectorStorageEntryFor(name: String, element: Array[Byte]) = {
|
||||
}
|
||||
|
||||
def insertVectorStorageEntriesFor(name: String, elements: List[Array[Byte]]) =
|
||||
elements.foreach(insertVectorStorageEntryFor(name, _))
|
||||
|
||||
def updateVectorStorageEntryFor(name: String, index: Int, elem: Array[Byte]) = {
|
||||
}
|
||||
|
||||
//def getVectorStorageEntryFor(name: String, index: Int): Array[Byte] = {
|
||||
//}
|
||||
|
||||
/**
|
||||
* if <tt>start</tt> and <tt>finish</tt> both are defined, ignore <tt>count</tt> and
|
||||
* report the range [start, finish)
|
||||
* if <tt>start</tt> is not defined, assume <tt>start</tt> = 0
|
||||
* if <tt>start</tt> == 0 and <tt>finish</tt> == 0, return an empty collection
|
||||
*/
|
||||
// def getVectorStorageRangeFor(name: String, start: Option[Int], finish: Option[Int], count: Int):
|
||||
// }
|
||||
|
||||
// def getVectorStorageSizeFor(name: String): Int = {
|
||||
// }
|
||||
|
||||
// ===============================================================
|
||||
// For Map
|
||||
// ===============================================================
|
||||
|
||||
def insertMapStorageEntryFor(name: String, key: Array[Byte], element: Array[Byte]) = {
|
||||
}
|
||||
|
||||
def insertMapStorageEntriesFor(name: String, entries: List[Tuple2[Array[Byte], Array[Byte]]]) = {
|
||||
}
|
||||
|
||||
// def getMapStorageEntryFor(name: String, key: Array[Byte]): Option[Array[Byte]] = {
|
||||
// }
|
||||
|
||||
// def getMapStorageFor(name: String): List[Tuple2[Array[Byte], Array[Byte]]] = {
|
||||
// }
|
||||
|
||||
// def getMapStorageSizeFor(name: String): Int = {
|
||||
// }
|
||||
|
||||
def removeMapStorageFor(name: String): Unit = removeMapStorageFor(name, null)
|
||||
|
||||
def removeMapStorageFor(name: String, key: Array[Byte]): Unit = {
|
||||
}
|
||||
|
||||
// def getMapStorageRangeFor(name: String, start: Option[Array[Byte]], finish: Option[Array[Byte]], count: Int):
|
||||
// List[Tuple2[Array[Byte], Array[Byte]]] = {
|
||||
// }
|
||||
}
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
log4j.rootLogger=DEBUG,R
|
||||
|
||||
# rolling log file ("system.log
|
||||
log4j.appender.R=org.apache.log4j.DailyRollingFileAppender
|
||||
log4j.appender.R.DatePattern='.'yyyy-MM-dd-HH
|
||||
log4j.appender.R.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.R.layout.ConversionPattern=%5p [%t] %d{ISO8601} %F (line %L) %m%n
|
||||
log4j.appender.R.File=target/logs/system.log
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
package se.scalablesolutions.akka.persistence.hbase
|
||||
|
||||
import org.scalatest.Spec
|
||||
import org.scalatest.matchers.ShouldMatchers
|
||||
import org.scalatest.BeforeAndAfterAll
|
||||
import org.scalatest.junit.JUnitRunner
|
||||
import org.junit.runner.RunWith
|
||||
import org.junit.Test
|
||||
import org.apache.hadoop.hbase.HBaseClusterTestCase
|
||||
|
||||
@Test
|
||||
class PersistenceTest extends HBaseClusterTestCase with Spec with BeforeAndAfterAll {
|
||||
|
||||
override def beforeAll {
|
||||
super.setUp
|
||||
}
|
||||
|
||||
@Test
|
||||
def testPersistence {}
|
||||
|
||||
override def afterAll {
|
||||
super.tearDown
|
||||
}
|
||||
|
||||
}
|
||||
Binary file not shown.
BIN
embedded-repo/org/apache/hbase/hbase/0.20.6/hbase-0.20.6.jar
Normal file
BIN
embedded-repo/org/apache/hbase/hbase/0.20.6/hbase-0.20.6.jar
Normal file
Binary file not shown.
Loading…
Add table
Add a link
Reference in a new issue