+cdd #16799 Add Distributed Data module

Previously know as [patriknw/akka-data-replication](https://github.com/patriknw/akka-data-replication),
which was originally inspired by [jboner/akka-crdt](https://github.com/jboner/akka-crdt).

The functionality is very similar to akka-data-replication 0.11.

Here is a list of the most important changes:

* The package name changed to `akka.cluster.ddata`
* The extension was renamed to `DistributedData`
* The keys changed from strings to classes with unique identifiers and type information of the data values,
  e.g. `ORSetKey[Int]("set2")`
* The optional read consistency parameter was removed from the `Update` message. If you need to read from
  other replicas before performing the update you have to first send a `Get` message and then continue with
  the ``Update`` when the ``GetSuccess`` is received.
* `BigInt` is used in `GCounter` and `PNCounter` instead of `Long`
* Improvements of java api
* Better documentation
This commit is contained in:
Patrik Nordwall 2015-05-17 12:28:47 +02:00
parent bf28260cd0
commit cbe5dd2cf5
69 changed files with 40036 additions and 3 deletions

View file

@ -0,0 +1,45 @@
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.cluster.ddata
import akka.cluster.ddata.Replicator.Changed
import org.scalatest.Matchers
import org.scalatest.WordSpec
class FlagSpec extends WordSpec with Matchers {
"A Flag" must {
"be able to switch on once" in {
val f1 = Flag()
val f2 = f1.switchOn
val f3 = f2.switchOn
f1.enabled should be(false)
f2.enabled should be(true)
f3.enabled should be(true)
}
"merge by picking true" in {
val f1 = Flag()
val f2 = f1.switchOn
val m1 = f1 merge f2
m1.enabled should be(true)
val m2 = f2 merge f1
m2.enabled should be(true)
}
"have unapply extractor" in {
val f1 = Flag.empty.switchOn
val Flag(value1) = f1
val value2: Boolean = value1
Changed(FlagKey("key"))(f1) match {
case c @ Changed(FlagKey("key"))
val Flag(value3) = c.dataValue
val value4: Boolean = value3
value4 should be(true)
}
}
}
}

View file

@ -0,0 +1,171 @@
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.cluster.ddata
import akka.actor.Address
import akka.cluster.UniqueAddress
import akka.cluster.ddata.Replicator.Changed
import org.scalatest.Matchers
import org.scalatest.WordSpec
class GCounterSpec extends WordSpec with Matchers {
val node1 = UniqueAddress(Address("akka.tcp", "Sys", "localhost", 2551), 1)
val node2 = UniqueAddress(node1.address.copy(port = Some(2552)), 2)
val node3 = UniqueAddress(node1.address.copy(port = Some(2553)), 3)
"A GCounter" must {
"be able to increment each node's record by one" in {
val c1 = GCounter()
val c2 = c1 increment node1
val c3 = c2 increment node1
val c4 = c3 increment node2
val c5 = c4 increment node2
val c6 = c5 increment node2
c6.state(node1) should be(2)
c6.state(node2) should be(3)
}
"be able to increment each node's record by arbitrary delta" in {
val c1 = GCounter()
val c2 = c1 increment (node1, 3)
val c3 = c2 increment (node1, 4)
val c4 = c3 increment (node2, 2)
val c5 = c4 increment (node2, 7)
val c6 = c5 increment node2
c6.state(node1) should be(7)
c6.state(node2) should be(10)
}
"be able to summarize the history to the correct aggregated value" in {
val c1 = GCounter()
val c2 = c1 increment (node1, 3)
val c3 = c2 increment (node1, 4)
val c4 = c3 increment (node2, 2)
val c5 = c4 increment (node2, 7)
val c6 = c5 increment node2
c6.state(node1) should be(7)
c6.state(node2) should be(10)
c6.value should be(17)
}
"be able to have its history correctly merged with another GCounter 1" in {
// counter 1
val c11 = GCounter()
val c12 = c11 increment (node1, 3)
val c13 = c12 increment (node1, 4)
val c14 = c13 increment (node2, 2)
val c15 = c14 increment (node2, 7)
val c16 = c15 increment node2
c16.state(node1) should be(7)
c16.state(node2) should be(10)
c16.value should be(17)
// counter 1
val c21 = GCounter()
val c22 = c21 increment (node1, 2)
val c23 = c22 increment (node1, 2)
val c24 = c23 increment (node2, 3)
val c25 = c24 increment (node2, 2)
val c26 = c25 increment node2
c26.state(node1) should be(4)
c26.state(node2) should be(6)
c26.value should be(10)
// merge both ways
val merged1 = c16 merge c26
merged1.state(node1) should be(7)
merged1.state(node2) should be(10)
merged1.value should be(17)
val merged2 = c26 merge c16
merged2.state(node1) should be(7)
merged2.state(node2) should be(10)
merged2.value should be(17)
}
"be able to have its history correctly merged with another GCounter 2" in {
// counter 1
val c11 = GCounter()
val c12 = c11 increment (node1, 2)
val c13 = c12 increment (node1, 2)
val c14 = c13 increment (node2, 2)
val c15 = c14 increment (node2, 7)
val c16 = c15 increment node2
c16.state(node1) should be(4)
c16.state(node2) should be(10)
c16.value should be(14)
// counter 1
val c21 = GCounter()
val c22 = c21 increment (node1, 3)
val c23 = c22 increment (node1, 4)
val c24 = c23 increment (node2, 3)
val c25 = c24 increment (node2, 2)
val c26 = c25 increment node2
c26.state(node1) should be(7)
c26.state(node2) should be(6)
c26.value should be(13)
// merge both ways
val merged1 = c16 merge c26
merged1.state(node1) should be(7)
merged1.state(node2) should be(10)
merged1.value should be(17)
val merged2 = c26 merge c16
merged2.state(node1) should be(7)
merged2.state(node2) should be(10)
merged2.value should be(17)
}
"have support for pruning" in {
val c1 = GCounter()
val c2 = c1 increment node1
val c3 = c2 increment node2
c2.needPruningFrom(node1) should be(true)
c2.needPruningFrom(node2) should be(false)
c3.needPruningFrom(node1) should be(true)
c3.needPruningFrom(node2) should be(true)
c3.value should be(2)
val c4 = c3.prune(node1, node2)
c4.needPruningFrom(node2) should be(true)
c4.needPruningFrom(node1) should be(false)
c4.value should be(2)
val c5 = (c4 increment node1).pruningCleanup(node1)
c5.needPruningFrom(node1) should be(false)
c4.value should be(2)
}
"have unapply extractor" in {
val c1 = GCounter.empty.increment(node1).increment(node2)
val GCounter(value1) = c1
val value2: BigInt = value1
Changed(GCounterKey("key"))(c1) match {
case c @ Changed(GCounterKey("key"))
val GCounter(value3) = c.dataValue
val value4: BigInt = value3
value4 should be(2L)
}
}
}
}

View file

@ -0,0 +1,119 @@
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.cluster.ddata
import akka.cluster.ddata.Replicator.Changed
import org.scalatest.Matchers
import org.scalatest.WordSpec
class GSetSpec extends WordSpec with Matchers {
val user1 = """{"username":"john","password":"coltrane"}"""
val user2 = """{"username":"sonny","password":"rollins"}"""
val user3 = """{"username":"charlie","password":"parker"}"""
val user4 = """{"username":"charles","password":"mingus"}"""
"A GSet" must {
"be able to add user" in {
val c1 = GSet.empty[String]
val c2 = c1 + user1
val c3 = c2 + user2
val c4 = c3 + user4
val c5 = c4 + user3
c5.elements should contain(user1)
c5.elements should contain(user2)
c5.elements should contain(user3)
c5.elements should contain(user4)
}
"be able to have its user set correctly merged with another GSet with unique user sets" in {
// set 1
val c11 = GSet.empty[String]
val c12 = c11 + user1
val c13 = c12 + user2
c13.elements should contain(user1)
c13.elements should contain(user2)
// set 2
val c21 = GSet.empty[String]
val c22 = c21 + user3
val c23 = c22 + user4
c23.elements should contain(user3)
c23.elements should contain(user4)
// merge both ways
val merged1 = c13 merge c23
merged1.elements should contain(user1)
merged1.elements should contain(user2)
merged1.elements should contain(user3)
merged1.elements should contain(user4)
val merged2 = c23 merge c13
merged2.elements should contain(user1)
merged2.elements should contain(user2)
merged2.elements should contain(user3)
merged2.elements should contain(user4)
}
"be able to have its user set correctly merged with another GSet with overlapping user sets" in {
// set 1
val c10 = GSet.empty[String]
val c11 = c10 + user1
val c12 = c11 + user2
val c13 = c12 + user3
c13.elements should contain(user1)
c13.elements should contain(user2)
c13.elements should contain(user3)
// set 2
val c20 = GSet.empty[String]
val c21 = c20 + user2
val c22 = c21 + user3
val c23 = c22 + user4
c23.elements should contain(user2)
c23.elements should contain(user3)
c23.elements should contain(user4)
// merge both ways
val merged1 = c13 merge c23
merged1.elements should contain(user1)
merged1.elements should contain(user2)
merged1.elements should contain(user3)
merged1.elements should contain(user4)
val merged2 = c23 merge c13
merged2.elements should contain(user1)
merged2.elements should contain(user2)
merged2.elements should contain(user3)
merged2.elements should contain(user4)
}
"have unapply extractor" in {
val s1 = GSet.empty + "a" + "b"
val s2: GSet[String] = s1
val GSet(elements1) = s1
val elements2: Set[String] = elements1
Changed(GSetKey[String]("key"))(s1) match {
case c @ Changed(GSetKey("key"))
val GSet(elements3) = c.dataValue
val elements4: Set[String] = elements3
elements4 should be(Set("a", "b"))
}
}
}
}

View file

@ -0,0 +1,63 @@
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.cluster.ddata
import akka.actor.Address
import akka.cluster.UniqueAddress
import akka.cluster.ddata.Replicator.Changed
import org.scalatest.Matchers
import org.scalatest.WordSpec
class LWWMapSpec extends WordSpec with Matchers {
import LWWRegister.defaultClock
val node1 = UniqueAddress(Address("akka.tcp", "Sys", "localhost", 2551), 1)
val node2 = UniqueAddress(node1.address.copy(port = Some(2552)), 2)
"A LWWMap" must {
"be able to set entries" in {
val m = LWWMap.empty[Int].put(node1, "a", 1, defaultClock[Int]).put(node2, "b", 2, defaultClock[Int])
m.entries should be(Map("a" -> 1, "b" -> 2))
}
"be able to have its entries correctly merged with another LWWMap with other entries" in {
val m1 = LWWMap.empty.put(node1, "a", 1, defaultClock[Int]).put(node1, "b", 2, defaultClock[Int])
val m2 = LWWMap.empty.put(node2, "c", 3, defaultClock[Int])
// merge both ways
val expected = Map("a" -> 1, "b" -> 2, "c" -> 3)
(m1 merge m2).entries should be(expected)
(m2 merge m1).entries should be(expected)
}
"be able to remove entry" in {
val m1 = LWWMap.empty.put(node1, "a", 1, defaultClock[Int]).put(node1, "b", 2, defaultClock[Int])
val m2 = LWWMap.empty.put(node2, "c", 3, defaultClock[Int])
val merged1 = m1 merge m2
val m3 = merged1.remove(node1, "b")
(merged1 merge m3).entries should be(Map("a" -> 1, "c" -> 3))
// but if there is a conflicting update the entry is not removed
val m4 = merged1.put(node2, "b", 22, defaultClock[Int])
(m3 merge m4).entries should be(Map("a" -> 1, "b" -> 22, "c" -> 3))
}
"have unapply extractor" in {
val m1 = LWWMap.empty.put(node1, "a", 1L, defaultClock[Long])
val LWWMap(entries1) = m1
val entries2: Map[String, Long] = entries1
Changed(LWWMapKey[Long]("key"))(m1) match {
case c @ Changed(LWWMapKey("key"))
val LWWMap(entries3) = c.dataValue
val entries4: Map[String, Long] = entries3
entries4 should be(Map("a" -> 1L))
}
}
}
}

View file

@ -0,0 +1,92 @@
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.cluster.ddata
import akka.actor.Address
import akka.cluster.UniqueAddress
import akka.cluster.ddata.Replicator.Changed
import org.scalatest.Matchers
import org.scalatest.WordSpec
class LWWRegisterSpec extends WordSpec with Matchers {
import LWWRegister.defaultClock
val node1 = UniqueAddress(Address("akka.tcp", "Sys", "localhost", 2551), 1)
val node2 = UniqueAddress(node1.address.copy(port = Some(2552)), 2)
"A LWWRegister" must {
"use latest of successive assignments" in {
val r = (1 to 100).foldLeft(LWWRegister(node1, 0, defaultClock[Int])) {
case (r, n)
r.value should be(n - 1)
r.withValue(node1, n, defaultClock[Int])
}
r.value should be(100)
}
"merge by picking max timestamp" in {
val clock = new LWWRegister.Clock[String] {
val i = Iterator.from(100)
override def apply(current: Long, value: String): Long = i.next()
}
val r1 = LWWRegister(node1, "A", clock)
r1.timestamp should be(100)
val r2 = r1.withValue(node2, "B", clock)
r2.timestamp should be(101)
val m1 = r1 merge r2
m1.value should be("B")
m1.timestamp should be(101)
val m2 = r2 merge r1
m2.value should be("B")
m2.timestamp should be(101)
}
"merge by picking least address when same timestamp" in {
val clock = new LWWRegister.Clock[String] {
override def apply(current: Long, value: String): Long = 100
}
val r1 = LWWRegister(node1, "A", clock)
val r2 = LWWRegister(node2, "B", clock)
val m1 = r1 merge r2
m1.value should be("A")
val m2 = r2 merge r1
m2.value should be("A")
}
"use monotonically increasing defaultClock" in {
(1 to 100).foldLeft(LWWRegister(node1, 0, defaultClock)) {
case (r, n)
r.value should be(n - 1)
val r2 = r.withValue(node1, n, defaultClock[Int])
r2.timestamp should be > r.timestamp
r2
}
}
"have unapply extractor" in {
val r1 = LWWRegister(node1, "a", defaultClock)
val LWWRegister(value1) = r1
val value2: String = value1
Changed(LWWRegisterKey[String]("key"))(r1) match {
case c @ Changed(LWWRegisterKey("key"))
val LWWRegister(value3) = c.dataValue
val value4: String = value3
value4 should be("a")
}
}
"can be used as first-write-wins-register" in {
import LWWRegister.reverseClock
val r = (1 to 100).foldLeft(LWWRegister(node1, 0, reverseClock[Int])) {
case (r, n)
r.value should be(0)
val newRegister = r.merge(r.withValue(node1, n, reverseClock[Int]))
newRegister should be(r)
newRegister
}
r.value should be(0)
}
}
}

View file

@ -0,0 +1,81 @@
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.cluster.ddata
import scala.concurrent.duration._
import akka.actor.Actor
import akka.actor.ActorSystem
import akka.actor.Props
import akka.actor.Stash
import akka.cluster.Cluster
import akka.testkit.ImplicitSender
import akka.testkit.TestKit
import com.typesafe.config.ConfigFactory
import org.scalatest.BeforeAndAfterAll
import org.scalatest.Matchers
import org.scalatest.WordSpecLike
object LocalConcurrencySpec {
final case class Add(s: String)
object Updater {
val key = ORSetKey[String]("key")
}
class Updater extends Actor with Stash {
implicit val cluster = Cluster(context.system)
val replicator = DistributedData(context.system).replicator
def receive = {
case s: String
val update = Replicator.Update(Updater.key, ORSet.empty[String], Replicator.WriteLocal)(_ + s)
replicator ! update
}
}
}
class LocalConcurrencySpec(_system: ActorSystem) extends TestKit(_system)
with WordSpecLike with Matchers with BeforeAndAfterAll with ImplicitSender {
import LocalConcurrencySpec._
def this() {
this(ActorSystem("LocalConcurrencySpec",
ConfigFactory.parseString("""
akka.actor.provider = "akka.cluster.ClusterActorRefProvider"
akka.remote.netty.tcp.port=0
""")))
}
override def afterAll(): Unit = {
shutdown(system)
}
val replicator = DistributedData(system).replicator
"Updates from same node" must {
"be possible to do from two actors" in {
val updater1 = system.actorOf(Props[Updater], "updater1")
val updater2 = system.actorOf(Props[Updater], "updater2")
val numMessages = 100
for (n 1 to numMessages) {
updater1 ! s"a$n"
updater2 ! s"b$n"
}
val expected = ((1 to numMessages).map("a" + _) ++ (1 to numMessages).map("b" + _)).toSet
awaitAssert {
replicator ! Replicator.Get(Updater.key, Replicator.ReadLocal)
val ORSet(elements) = expectMsgType[Replicator.GetSuccess[_]].get(Updater.key)
elements should be(expected)
}
}
}
}

View file

@ -0,0 +1,205 @@
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.cluster.ddata
import akka.actor.Address
import akka.cluster.UniqueAddress
import akka.cluster.ddata.Replicator.Changed
import org.scalatest.Matchers
import org.scalatest.WordSpec
class ORMapSpec extends WordSpec with Matchers {
val node1 = UniqueAddress(Address("akka.tcp", "Sys", "localhost", 2551), 1)
val node2 = UniqueAddress(node1.address.copy(port = Some(2552)), 2)
"A ORMap" must {
"be able to add entries" in {
val m = ORMap().put(node1, "a", GSet() + "A").put(node1, "b", GSet() + "B")
val GSet(a) = m.entries("a")
a should be(Set("A"))
val GSet(b) = m.entries("b")
b should be(Set("B"))
val m2 = m.put(node1, "a", GSet() + "C")
val GSet(a2) = m2.entries("a")
a2 should be(Set("C"))
}
"be able to remove entry" in {
val m = ORMap().put(node1, "a", GSet() + "A").put(node1, "b", GSet() + "B").remove(node1, "a")
m.entries.keySet should not contain ("a")
m.entries.keySet should contain("b")
}
"be able to add removed" in {
val m = ORMap().put(node1, "a", GSet() + "A").put(node1, "b", GSet() + "B").remove(node1, "a")
m.entries.keySet should not contain ("a")
m.entries.keySet should contain("b")
val m2 = m.put(node1, "a", GSet() + "C")
m2.entries.keySet should contain("a")
m2.entries.keySet should contain("b")
}
"be able to have its entries correctly merged with another ORMap with other entries" in {
val m1 = ORMap().put(node1, "a", GSet() + "A").put(node1, "b", GSet() + "B")
val m2 = ORMap().put(node2, "c", GSet() + "C")
// merge both ways
val merged1 = m1 merge m2
merged1.entries.keySet should contain("a")
merged1.entries.keySet should contain("b")
merged1.entries.keySet should contain("c")
val merged2 = m2 merge m1
merged2.entries.keySet should contain("a")
merged2.entries.keySet should contain("b")
merged2.entries.keySet should contain("c")
}
"be able to have its entries correctly merged with another ORMap with overlapping entries" in {
val m1 = ORMap().put(node1, "a", GSet() + "A1").put(node1, "b", GSet() + "B1").
remove(node1, "a").put(node1, "d", GSet() + "D1")
val m2 = ORMap().put(node2, "c", GSet() + "C2").put(node2, "a", GSet() + "A2").
put(node2, "b", GSet() + "B2").remove(node2, "b").put(node2, "d", GSet() + "D2")
// merge both ways
val merged1 = m1 merge m2
merged1.entries.keySet should contain("a")
val GSet(a1) = merged1.entries("a")
a1 should be(Set("A2"))
merged1.entries.keySet should contain("b")
val GSet(b1) = merged1.entries("b")
b1 should be(Set("B1"))
merged1.entries.keySet should contain("c")
merged1.entries.keySet should contain("d")
val GSet(d1) = merged1.entries("d")
d1 should be(Set("D1", "D2"))
val merged2 = m2 merge m1
merged2.entries.keySet should contain("a")
val GSet(a2) = merged1.entries("a")
a2 should be(Set("A2"))
merged2.entries.keySet should contain("b")
val GSet(b2) = merged2.entries("b")
b2 should be(Set("B1"))
merged2.entries.keySet should contain("c")
merged2.entries.keySet should contain("d")
val GSet(d2) = merged2.entries("d")
d2 should be(Set("D1", "D2"))
}
"illustrate the danger of using remove+put to replace an entry" in {
val m1 = ORMap.empty.put(node1, "a", GSet.empty + "A").put(node1, "b", GSet.empty + "B")
val m2 = ORMap.empty.put(node2, "c", GSet.empty + "C")
val merged1 = m1 merge m2
val m3 = merged1.remove(node1, "b").put(node1, "b", GSet.empty + "B2")
// same thing if only put is used
// val m3 = merged1.put(node1, "b", GSet() + "B2")
val merged2 = merged1 merge m3
merged2.entries("a").elements should be(Set("A"))
// note that B is included, because GSet("B") is merged with GSet("B2")
merged2.entries("b").elements should be(Set("B", "B2"))
merged2.entries("c").elements should be(Set("C"))
}
"not allow put for ORSet elements type" in {
val m = ORMap().put(node1, "a", ORSet().add(node1, "A"))
intercept[IllegalArgumentException] {
m.put(node1, "a", ORSet().add(node1, "B"))
}
}
"be able to update entry" in {
val m1 = ORMap.empty[ORSet[String]].put(node1, "a", ORSet.empty.add(node1, "A"))
.put(node1, "b", ORSet.empty.add(node1, "B01").add(node1, "B02").add(node1, "B03"))
val m2 = ORMap.empty[ORSet[String]].put(node2, "c", ORSet.empty.add(node2, "C"))
val merged1: ORMap[ORSet[String]] = m1 merge m2
val m3 = merged1.updated(node1, "b", ORSet.empty[String])(_.clear(node1).add(node1, "B2"))
val merged2 = merged1 merge m3
merged2.entries("a").elements should be(Set("A"))
merged2.entries("b").elements should be(Set("B2"))
merged2.entries("c").elements should be(Set("C"))
val m4 = merged1.updated(node2, "b", ORSet.empty[String])(_.add(node2, "B3"))
val merged3 = m3 merge m4
merged3.entries("a").elements should be(Set("A"))
merged3.entries("b").elements should be(Set("B2", "B3"))
merged3.entries("c").elements should be(Set("C"))
}
"be able to update ORSet entry with remove+put" in {
val m1 = ORMap.empty[ORSet[String]].put(node1, "a", ORSet.empty.add(node1, "A01"))
.updated(node1, "a", ORSet.empty[String])(_.add(node1, "A02"))
.updated(node1, "a", ORSet.empty[String])(_.add(node1, "A03"))
.put(node1, "b", ORSet.empty.add(node1, "B01").add(node1, "B02").add(node1, "B03"))
val m2 = ORMap.empty[ORSet[String]].put(node2, "c", ORSet.empty.add(node2, "C"))
val merged1 = m1 merge m2
// note that remove + put work because the new VersionVector version is incremented
// from a global counter
val m3 = merged1.remove(node1, "b").put(node1, "b", ORSet.empty.add(node1, "B2"))
val merged2 = merged1 merge m3
merged2.entries("a").elements should be(Set("A01", "A02", "A03"))
merged2.entries("b").elements should be(Set("B2"))
merged2.entries("c").elements should be(Set("C"))
val m4 = merged1.updated(node2, "b", ORSet.empty[String])(_.add(node2, "B3"))
val merged3 = m3 merge m4
merged3.entries("a").elements should be(Set("A01", "A02", "A03"))
merged3.entries("b").elements should be(Set("B2", "B3"))
merged3.entries("c").elements should be(Set("C"))
}
"be able to update ORSet entry with remove -> merge -> put" in {
val m1 = ORMap.empty.put(node1, "a", ORSet.empty.add(node1, "A"))
.put(node1, "b", ORSet.empty.add(node1, "B01").add(node1, "B02").add(node1, "B03"))
val m2 = ORMap.empty.put(node2, "c", ORSet.empty.add(node2, "C"))
val merged1 = m1 merge m2
val m3 = merged1.remove(node1, "b")
val merged2 = merged1 merge m3
merged2.entries("a").elements should be(Set("A"))
merged2.contains("b") should be(false)
merged2.entries("c").elements should be(Set("C"))
val m4 = merged2.put(node1, "b", ORSet.empty.add(node1, "B2"))
val m5 = merged2.updated(node2, "c", ORSet.empty[String])(_.add(node2, "C2"))
.put(node2, "b", ORSet.empty.add(node2, "B3"))
val merged3 = m5 merge m4
merged3.entries("a").elements should be(Set("A"))
merged3.entries("b").elements should be(Set("B2", "B3"))
merged3.entries("c").elements should be(Set("C", "C2"))
}
"have unapply extractor" in {
val m1 = ORMap.empty.put(node1, "a", Flag(true)).put(node2, "b", Flag(false))
val m2: ORMap[Flag] = m1
val ORMap(entries1) = m1
val entries2: Map[String, Flag] = entries1
Changed(ORMapKey[Flag]("key"))(m1) match {
case c @ Changed(ORMapKey("key"))
val ORMap(entries3) = c.dataValue
val entries4: Map[String, ReplicatedData] = entries3
entries4 should be(Map("a" -> Flag(true), "b" -> Flag(false)))
}
}
}
}

View file

@ -0,0 +1,355 @@
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.cluster.ddata
import scala.collection.immutable.TreeMap
import akka.actor.Address
import akka.cluster.UniqueAddress
import akka.cluster.ddata.Replicator.Changed
import org.scalatest.Matchers
import org.scalatest.WordSpec
class ORSetSpec extends WordSpec with Matchers {
val node1 = UniqueAddress(Address("akka.tcp", "Sys", "localhost", 2551), 1)
val node2 = UniqueAddress(node1.address.copy(port = Some(2552)), 2)
val nodeA = UniqueAddress(Address("akka.tcp", "Sys", "a", 2552), 1)
val nodeB = UniqueAddress(nodeA.address.copy(host = Some("b")), 2)
val nodeC = UniqueAddress(nodeA.address.copy(host = Some("c")), 3)
val nodeD = UniqueAddress(nodeA.address.copy(host = Some("d")), 4)
val nodeE = UniqueAddress(nodeA.address.copy(host = Some("e")), 5)
val nodeF = UniqueAddress(nodeA.address.copy(host = Some("f")), 6)
val nodeG = UniqueAddress(nodeA.address.copy(host = Some("g")), 7)
val nodeH = UniqueAddress(nodeA.address.copy(host = Some("h")), 8)
val user1 = """{"username":"john","password":"coltrane"}"""
val user2 = """{"username":"sonny","password":"rollins"}"""
val user3 = """{"username":"charlie","password":"parker"}"""
val user4 = """{"username":"charles","password":"mingus"}"""
"A ORSet" must {
"be able to add user" in {
val c1 = ORSet()
val c2 = c1.add(node1, user1)
val c3 = c2.add(node1, user2)
val c4 = c3.add(node1, user4)
val c5 = c4.add(node1, user3)
c5.elements should contain(user1)
c5.elements should contain(user2)
c5.elements should contain(user3)
c5.elements should contain(user4)
}
"be able to remove added user" in {
val c1 = ORSet()
val c2 = c1.add(node1, user1)
val c3 = c2.add(node1, user2)
val c4 = c3.remove(node1, user2)
val c5 = c4.remove(node1, user1)
c5.elements should not contain (user1)
c5.elements should not contain (user2)
}
"be able to add removed" in {
val c1 = ORSet()
val c2 = c1.remove(node1, user1)
val c3 = c2.add(node1, user1)
c3.elements should contain(user1)
val c4 = c3.remove(node1, user1)
c4.elements should not contain (user1)
val c5 = c4.add(node1, user1)
c5.elements should contain(user1)
}
"be able to remove and add several times" in {
val c1 = ORSet()
val c2 = c1.add(node1, user1)
val c3 = c2.add(node1, user2)
val c4 = c3.remove(node1, user1)
c4.elements should not contain (user1)
c4.elements should contain(user2)
val c5 = c4.add(node1, user1)
val c6 = c5.add(node1, user2)
c6.elements should contain(user1)
c6.elements should contain(user2)
val c7 = c6.remove(node1, user1)
val c8 = c7.add(node1, user2)
val c9 = c8.remove(node1, user1)
c9.elements should not contain (user1)
c9.elements should contain(user2)
}
"be able to have its user set correctly merged with another ORSet with unique user sets" in {
// set 1
val c1 = ORSet().add(node1, user1).add(node1, user2)
c1.elements should contain(user1)
c1.elements should contain(user2)
// set 2
val c2 = ORSet().add(node2, user3).add(node2, user4).remove(node2, user3)
c2.elements should not contain (user3)
c2.elements should contain(user4)
// merge both ways
val merged1 = c1 merge c2
merged1.elements should contain(user1)
merged1.elements should contain(user2)
merged1.elements should not contain (user3)
merged1.elements should contain(user4)
val merged2 = c2 merge c1
merged2.elements should contain(user1)
merged2.elements should contain(user2)
merged2.elements should not contain (user3)
merged2.elements should contain(user4)
}
"be able to have its user set correctly merged with another ORSet with overlapping user sets" in {
// set 1
val c1 = ORSet().add(node1, user1).add(node1, user2).add(node1, user3).remove(node1, user1).remove(node1, user3)
c1.elements should not contain (user1)
c1.elements should contain(user2)
c1.elements should not contain (user3)
// set 2
val c2 = ORSet().add(node2, user1).add(node2, user2).add(node2, user3).add(node2, user4).remove(node2, user3)
c2.elements should contain(user1)
c2.elements should contain(user2)
c2.elements should not contain (user3)
c2.elements should contain(user4)
// merge both ways
val merged1 = c1 merge c2
merged1.elements should contain(user1)
merged1.elements should contain(user2)
merged1.elements should not contain (user3)
merged1.elements should contain(user4)
val merged2 = c2 merge c1
merged2.elements should contain(user1)
merged2.elements should contain(user2)
merged2.elements should not contain (user3)
merged2.elements should contain(user4)
}
"be able to have its user set correctly merged for concurrent updates" in {
val c1 = ORSet().add(node1, user1).add(node1, user2).add(node1, user3)
c1.elements should contain(user1)
c1.elements should contain(user2)
c1.elements should contain(user3)
val c2 = c1.add(node2, user1).remove(node2, user2).remove(node2, user3)
c2.elements should contain(user1)
c2.elements should not contain (user2)
c2.elements should not contain (user3)
// merge both ways
val merged1 = c1 merge c2
merged1.elements should contain(user1)
merged1.elements should not contain (user2)
merged1.elements should not contain (user3)
val merged2 = c2 merge c1
merged2.elements should contain(user1)
merged2.elements should not contain (user2)
merged2.elements should not contain (user3)
val c3 = c1.add(node1, user4).remove(node1, user3).add(node1, user2)
// merge both ways
val merged3 = c2 merge c3
merged3.elements should contain(user1)
merged3.elements should contain(user2)
merged3.elements should not contain (user3)
merged3.elements should contain(user4)
val merged4 = c3 merge c2
merged4.elements should contain(user1)
merged4.elements should contain(user2)
merged4.elements should not contain (user3)
merged4.elements should contain(user4)
}
"be able to have its user set correctly merged after remove" in {
val c1 = ORSet().add(node1, user1).add(node1, user2)
val c2 = c1.remove(node2, user2)
// merge both ways
val merged1 = c1 merge c2
merged1.elements should contain(user1)
merged1.elements should not contain (user2)
val merged2 = c2 merge c1
merged2.elements should contain(user1)
merged2.elements should not contain (user2)
val c3 = c1.add(node1, user3)
// merge both ways
val merged3 = c3 merge c2
merged3.elements should contain(user1)
merged3.elements should not contain (user2)
merged3.elements should contain(user3)
val merged4 = c2 merge c3
merged4.elements should contain(user1)
merged4.elements should not contain (user2)
merged4.elements should contain(user3)
}
}
"ORSet unit test" must {
"verify subtractDots" in {
val dot = new VersionVector(TreeMap(nodeA -> 3, nodeB -> 2, nodeD -> 14, nodeG -> 22))
val vvector = new VersionVector(TreeMap(nodeA -> 4, nodeB -> 1, nodeC -> 1, nodeD -> 14, nodeE -> 5, nodeF -> 2))
val expected = new VersionVector(TreeMap(nodeB -> 2, nodeG -> 22))
ORSet.subtractDots(dot, vvector) should be(expected)
}
"verify mergeCommonKeys" in {
val commonKeys: Set[String] = Set("K1", "K2")
val thisDot1 = new VersionVector(TreeMap(nodeA -> 3, nodeD -> 7))
val thisDot2 = new VersionVector(TreeMap(nodeB -> 5, nodeC -> 2))
val thisVvector = new VersionVector(TreeMap(nodeA -> 3, nodeB -> 5, nodeC -> 2, nodeD -> 7))
val thisSet = new ORSet(
elementsMap = Map("K1" -> thisDot1, "K2" -> thisDot2),
vvector = thisVvector)
val thatDot1 = new VersionVector(TreeMap(nodeA -> 3))
val thatDot2 = new VersionVector(TreeMap(nodeB -> 6))
val thatVvector = new VersionVector(TreeMap(nodeA -> 3, nodeB -> 6, nodeC -> 1, nodeD -> 8))
val thatSet = new ORSet(
elementsMap = Map("K1" -> thatDot1, "K2" -> thatDot2),
vvector = thatVvector)
val expectedDots = Map(
"K1" -> new VersionVector(TreeMap(nodeA -> 3)),
"K2" -> new VersionVector(TreeMap(nodeB -> 6, nodeC -> 2)))
ORSet.mergeCommonKeys(commonKeys, thisSet, thatSet) should be(expectedDots)
}
"verify mergeDisjointKeys" in {
val keys: Set[Any] = Set("K3", "K4", "K5")
val elements: Map[Any, VersionVector] = Map(
"K3" -> new VersionVector(TreeMap(nodeA -> 4)),
"K4" -> new VersionVector(TreeMap(nodeA -> 3, nodeD -> 8)),
"K5" -> new VersionVector(TreeMap(nodeA -> 2)))
val vvector = new VersionVector(TreeMap(nodeA -> 3, nodeD -> 7))
val acc: Map[Any, VersionVector] = Map("K1" -> new VersionVector(TreeMap(nodeA -> 3)))
val expectedDots = acc ++ Map(
"K3" -> new VersionVector(TreeMap(nodeA -> 4)),
"K4" -> new VersionVector(TreeMap(nodeD -> 8))) // "a" -> 3 removed, optimized to include only those unseen
ORSet.mergeDisjointKeys(keys, elements, vvector, acc) should be(expectedDots)
}
"verify disjoint merge" in {
val a1 = ORSet().add(node1, "bar")
val b1 = ORSet().add(node2, "baz")
val c = a1.merge(b1)
val a2 = a1.remove(node1, "bar")
val d = a2.merge(c)
d.elements should be(Set("baz"))
}
"verify removed after merge" in {
// Add Z at node1 replica
val a = ORSet().add(node1, "Z")
// Replicate it to some node3, i.e. it has dot 'Z'->{node1 -> 1}
val c = a
// Remove Z at node1 replica
val a2 = a.remove(node1, "Z")
// Add Z at node2, a new replica
val b = ORSet().add(node2, "Z")
// Replicate b to node1, so now node1 has a Z, the one with a Dot of
// {node2 -> 1} and version vector of [{node1 -> 1}, {node2 -> 1}]
val a3 = b.merge(a2)
a3.elements should be(Set("Z"))
// Remove the 'Z' at node2 replica
val b2 = b.remove(node2, "Z")
// Both node3 (c) and node1 (a3) have a 'Z', but when they merge, there should be
// no 'Z' as node3 (c)'s has been removed by node1 and node1 (a3)'s has been removed by
// node2
c.elements should be(Set("Z"))
a3.elements should be(Set("Z"))
b2.elements should be(Set())
a3.merge(c).merge(b2).elements should be(Set.empty)
a3.merge(b2).merge(c).elements should be(Set.empty)
c.merge(b2).merge(a3).elements should be(Set.empty)
c.merge(a3).merge(b2).elements should be(Set.empty)
b2.merge(c).merge(a3).elements should be(Set.empty)
b2.merge(a3).merge(c).elements should be(Set.empty)
}
"verify removed after merge 2" in {
val a = ORSet().add(node1, "Z")
val b = ORSet().add(node2, "Z")
// replicate node3
val c = a
val a2 = a.remove(node1, "Z")
// replicate b to node1, now node1 has node2's 'Z'
val a3 = a2.merge(b)
a3.elements should be(Set("Z"))
// Remove node2's 'Z'
val b2 = b.remove(node2, "Z")
// Replicate c to node2, now node2 has node1's old 'Z'
val b3 = b2.merge(c)
b3.elements should be(Set("Z"))
// Merge everytyhing
a3.merge(c).merge(b3).elements should be(Set.empty)
a3.merge(b3).merge(c).elements should be(Set.empty)
c.merge(b3).merge(a3).elements should be(Set.empty)
c.merge(a3).merge(b3).elements should be(Set.empty)
b3.merge(c).merge(a3).elements should be(Set.empty)
b3.merge(a3).merge(c).elements should be(Set.empty)
}
"have unapply extractor" in {
val s1 = ORSet.empty.add(node1, "a").add(node2, "b")
val s2: ORSet[String] = s1
val ORSet(elements1) = s1 // `unapply[A](s: ORSet[A])` is used here
val elements2: Set[String] = elements1
Changed(ORSetKey[String]("key"))(s1) match {
case c @ Changed(ORSetKey("key"))
val x: ORSet[String] = c.dataValue
val ORSet(elements3) = c.dataValue
val elements4: Set[String] = elements3
elements4 should be(Set("a", "b"))
}
val msg: Any = Changed(ORSetKey[String]("key"))(s1)
msg match {
case c @ Changed(ORSetKey("key"))
val ORSet(elements3) = c.dataValue // `unapply(a: ReplicatedData)` is used here
// if `unapply(a: ReplicatedData)` isn't defined the next line doesn't compile:
// type mismatch; found : scala.collection.immutable.Set[A] where type A required: Set[Any] Note: A <: Any,
// but trait Set is invariant in type A. You may wish to investigate a wildcard type such as _ <: Any. (SLS 3.2.10)
val elements4: Set[Any] = elements3
elements4 should be(Set("a", "b"))
}
}
}
}

View file

@ -0,0 +1,62 @@
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.cluster.ddata
import akka.actor.Address
import akka.cluster.UniqueAddress
import akka.cluster.ddata.Replicator.Changed
import org.scalatest.Matchers
import org.scalatest.WordSpec
class PNCounterMapSpec extends WordSpec with Matchers {
val node1 = UniqueAddress(Address("akka.tcp", "Sys", "localhost", 2551), 1)
val node2 = UniqueAddress(node1.address.copy(port = Some(2552)), 2)
"A PNCounterMap" must {
"be able to increment and decrement entries" in {
val m = PNCounterMap().increment(node1, "a", 2).increment(node1, "b", 3).decrement(node2, "a", 1)
m.entries should be(Map("a" -> 1, "b" -> 3))
}
"be able to have its entries correctly merged with another ORMap with other entries" in {
val m1 = PNCounterMap().increment(node1, "a", 1).increment(node1, "b", 3).increment(node1, "c", 2)
val m2 = PNCounterMap().increment(node2, "c", 5)
// merge both ways
val expected = Map("a" -> 1, "b" -> 3, "c" -> 7)
(m1 merge m2).entries should be(expected)
(m2 merge m1).entries should be(expected)
}
"be able to remove entry" in {
val m1 = PNCounterMap().increment(node1, "a", 1).increment(node1, "b", 3).increment(node1, "c", 2)
val m2 = PNCounterMap().increment(node2, "c", 5)
val merged1 = m1 merge m2
val m3 = merged1.remove(node1, "b")
(merged1 merge m3).entries should be(Map("a" -> 1, "c" -> 7))
// but if there is a conflicting update the entry is not removed
val m4 = merged1.increment(node2, "b", 10)
(m3 merge m4).entries should be(Map("a" -> 1, "b" -> 13, "c" -> 7))
}
"have unapply extractor" in {
val m1 = PNCounterMap.empty.increment(node1, "a", 1).increment(node2, "b", 2)
val PNCounterMap(entries1) = m1
val entries2: Map[String, BigInt] = entries1
Changed(PNCounterMapKey("key"))(m1) match {
case c @ Changed(PNCounterMapKey("key"))
val PNCounterMap(entries3) = c.dataValue
val entries4: Map[String, BigInt] = entries3
entries4 should be(Map("a" -> 1L, "b" -> 2L))
}
}
}
}

View file

@ -0,0 +1,172 @@
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.cluster.ddata
import akka.actor.Address
import akka.cluster.UniqueAddress
import akka.cluster.ddata.Replicator.Changed
import org.scalatest.Matchers
import org.scalatest.WordSpec
class PNCounterSpec extends WordSpec with Matchers {
val node1 = UniqueAddress(Address("akka.tcp", "Sys", "localhost", 2551), 1)
val node2 = UniqueAddress(node1.address.copy(port = Some(2552)), 2)
"A PNCounter" must {
"be able to increment each node's record by one" in {
val c1 = PNCounter()
val c2 = c1 increment node1
val c3 = c2 increment node1
val c4 = c3 increment node2
val c5 = c4 increment node2
val c6 = c5 increment node2
c6.increments.state(node1) should be(2)
c6.increments.state(node2) should be(3)
}
"be able to decrement each node's record by one" in {
val c1 = PNCounter()
val c2 = c1 decrement node1
val c3 = c2 decrement node1
val c4 = c3 decrement node2
val c5 = c4 decrement node2
val c6 = c5 decrement node2
c6.decrements.state(node1) should be(2)
c6.decrements.state(node2) should be(3)
}
"be able to increment each node's record by arbitrary delta" in {
val c1 = PNCounter()
val c2 = c1 increment (node1, 3)
val c3 = c2 increment (node1, 4)
val c4 = c3 increment (node2, 2)
val c5 = c4 increment (node2, 7)
val c6 = c5 increment node2
c6.increments.state(node1) should be(7)
c6.increments.state(node2) should be(10)
}
"be able to decrement each node's record by arbitrary delta" in {
val c1 = PNCounter()
val c2 = c1 decrement (node1, 3)
val c3 = c2 decrement (node1, 4)
val c4 = c3 decrement (node2, 2)
val c5 = c4 decrement (node2, 7)
val c6 = c5 decrement node2
c6.decrements.state(node1) should be(7)
c6.decrements.state(node2) should be(10)
}
"be able to increment and decrement each node's record by arbitrary delta" in {
val c1 = PNCounter()
val c2 = c1 increment (node1, 3)
val c3 = c2 decrement (node1, 2)
val c4 = c3 increment (node2, 5)
val c5 = c4 decrement (node2, 2)
val c6 = c5 increment node2
c6.increments.value should be(9)
c6.decrements.value should be(4)
}
"be able to summarize the history to the correct aggregated value of increments and decrements" in {
val c1 = PNCounter()
val c2 = c1 increment (node1, 3)
val c3 = c2 decrement (node1, 2)
val c4 = c3 increment (node2, 5)
val c5 = c4 decrement (node2, 2)
val c6 = c5 increment node2
c6.increments.value should be(9)
c6.decrements.value should be(4)
c6.value should be(5)
}
"be able to have its history correctly merged with another GCounter" in {
// counter 1
val c11 = PNCounter()
val c12 = c11 increment (node1, 3)
val c13 = c12 decrement (node1, 2)
val c14 = c13 increment (node2, 5)
val c15 = c14 decrement (node2, 2)
val c16 = c15 increment node2
c16.increments.value should be(9)
c16.decrements.value should be(4)
c16.value should be(5)
// counter 1
val c21 = PNCounter()
val c22 = c21 increment (node1, 2)
val c23 = c22 decrement (node1, 3)
val c24 = c23 increment (node2, 3)
val c25 = c24 decrement (node2, 2)
val c26 = c25 increment node2
c26.increments.value should be(6)
c26.decrements.value should be(5)
c26.value should be(1)
// merge both ways
val merged1 = c16 merge c26
merged1.increments.value should be(9)
merged1.decrements.value should be(5)
merged1.value should be(4)
val merged2 = c26 merge c16
merged2.increments.value should be(9)
merged2.decrements.value should be(5)
merged2.value should be(4)
}
"have support for pruning" in {
val c1 = PNCounter()
val c2 = c1 increment node1
val c3 = c2 decrement node2
c2.needPruningFrom(node1) should be(true)
c2.needPruningFrom(node2) should be(false)
c3.needPruningFrom(node1) should be(true)
c3.needPruningFrom(node2) should be(true)
val c4 = c3.prune(node1, node2)
c4.needPruningFrom(node2) should be(true)
c4.needPruningFrom(node1) should be(false)
val c5 = (c4 increment node1).pruningCleanup(node1)
c5.needPruningFrom(node1) should be(false)
}
"have unapply extractor" in {
val c1 = PNCounter.empty.increment(node1).increment(node1).decrement(node2)
val PNCounter(value1) = c1
val value2: BigInt = value1
Changed(PNCounterKey("key"))(c1) match {
case c @ Changed(PNCounterKey("key"))
val PNCounter(value3) = c.dataValue
val value4: BigInt = value3
value4 should be(1L)
}
}
}
}

View file

@ -0,0 +1,46 @@
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.cluster.ddata
import akka.actor.Address
import akka.cluster.UniqueAddress
import org.scalatest.Matchers
import org.scalatest.WordSpec
class PruningStateSpec extends WordSpec with Matchers {
import PruningState._
val node1 = UniqueAddress(Address("akka.tcp", "Sys", "localhost", 2551), 1)
val node2 = UniqueAddress(node1.address.copy(port = Some(2552)), 2)
val node3 = UniqueAddress(node1.address.copy(port = Some(2553)), 3)
val node4 = UniqueAddress(node1.address.copy(port = Some(2554)), 4)
"Pruning state" must {
"merge phase correctly" in {
val p1 = PruningState(node1, PruningInitialized(Set.empty))
val p2 = PruningState(node1, PruningPerformed)
p1.merge(p2).phase should be(PruningPerformed)
p2.merge(p1).phase should be(PruningPerformed)
}
"merge owner correctly" in {
val p1 = PruningState(node1, PruningInitialized(Set.empty))
val p2 = PruningState(node2, PruningInitialized(Set.empty))
val expected = PruningState(node1, PruningInitialized(Set.empty))
p1.merge(p2) should be(expected)
p2.merge(p1) should be(expected)
}
"merge seen correctly" in {
val p1 = PruningState(node1, PruningInitialized(Set(node2.address)))
val p2 = PruningState(node1, PruningInitialized(Set(node4.address)))
val expected = PruningState(node1, PruningInitialized(Set(node2.address, node4.address)))
p1.merge(p2) should be(expected)
p2.merge(p1) should be(expected)
}
}
}

View file

@ -0,0 +1,249 @@
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.cluster.ddata
import akka.actor.ActorSystem
import akka.actor.Address
import akka.cluster.UniqueAddress
import akka.testkit.TestKit
import org.scalatest.BeforeAndAfterAll
import org.scalatest.Matchers
import org.scalatest.WordSpecLike
class VersionVectorSpec extends TestKit(ActorSystem("VersionVectorSpec"))
with WordSpecLike with Matchers with BeforeAndAfterAll {
val node1 = UniqueAddress(Address("akka.tcp", "Sys", "localhost", 2551), 1)
val node2 = UniqueAddress(node1.address.copy(port = Some(2552)), 2)
val node3 = UniqueAddress(node1.address.copy(port = Some(2553)), 3)
val node4 = UniqueAddress(node1.address.copy(port = Some(2554)), 4)
override def afterAll {
shutdown()
}
"A VersionVector" must {
"have zero versions when created" in {
val vv = VersionVector()
vv.versions should be(Map())
}
"not happen before itself" in {
val vv1 = VersionVector()
val vv2 = VersionVector()
vv1 <> vv2 should be(false)
}
"pass misc comparison test 1" in {
val vv1_1 = VersionVector()
val vv2_1 = vv1_1 + node1
val vv3_1 = vv2_1 + node2
val vv4_1 = vv3_1 + node1
val vv1_2 = VersionVector()
val vv2_2 = vv1_2 + node1
val vv3_2 = vv2_2 + node2
val vv4_2 = vv3_2 + node1
vv4_1 <> vv4_2 should be(false)
}
"pass misc comparison test 2" in {
val vv1_1 = VersionVector()
val vv2_1 = vv1_1 + node1
val vv3_1 = vv2_1 + node2
val vv4_1 = vv3_1 + node1
val vv1_2 = VersionVector()
val vv2_2 = vv1_2 + node1
val vv3_2 = vv2_2 + node2
val vv4_2 = vv3_2 + node1
val vv5_2 = vv4_2 + node3
vv4_1 < vv5_2 should be(true)
}
"pass misc comparison test 3" in {
var vv1_1 = VersionVector()
val vv2_1 = vv1_1 + node1
val vv1_2 = VersionVector()
val vv2_2 = vv1_2 + node2
vv2_1 <> vv2_2 should be(true)
}
"pass misc comparison test 4" in {
val vv1_3 = VersionVector()
val vv2_3 = vv1_3 + node1
val vv3_3 = vv2_3 + node2
val vv4_3 = vv3_3 + node1
val vv1_4 = VersionVector()
val vv2_4 = vv1_4 + node1
val vv3_4 = vv2_4 + node1
val vv4_4 = vv3_4 + node3
vv4_3 <> vv4_4 should be(true)
}
"pass misc comparison test 5" in {
val vv1_1 = VersionVector()
val vv2_1 = vv1_1 + node2
val vv3_1 = vv2_1 + node2
val vv1_2 = VersionVector()
val vv2_2 = vv1_2 + node1
val vv3_2 = vv2_2 + node2
val vv4_2 = vv3_2 + node2
val vv5_2 = vv4_2 + node3
vv3_1 < vv5_2 should be(true)
vv5_2 > vv3_1 should be(true)
}
"pass misc comparison test 6" in {
val vv1_1 = VersionVector()
val vv2_1 = vv1_1 + node1
val vv3_1 = vv2_1 + node2
val vv1_2 = VersionVector()
val vv2_2 = vv1_2 + node1
val vv3_2 = vv2_2 + node1
vv3_1 <> vv3_2 should be(true)
vv3_2 <> vv3_1 should be(true)
}
"pass misc comparison test 7" in {
val vv1_1 = VersionVector()
val vv2_1 = vv1_1 + node1
val vv3_1 = vv2_1 + node2
val vv4_1 = vv3_1 + node2
val vv5_1 = vv4_1 + node3
val vv1_2 = vv4_1
val vv2_2 = vv1_2 + node2
val vv3_2 = vv2_2 + node2
vv5_1 <> vv3_2 should be(true)
vv3_2 <> vv5_1 should be(true)
}
"pass misc comparison test 8" in {
val vv1_1 = VersionVector()
val vv2_1 = vv1_1 + node1
val vv3_1 = vv2_1 + node3
val vv1_2 = vv3_1 + node2
val vv4_1 = vv3_1 + node3
vv4_1 <> vv1_2 should be(true)
vv1_2 <> vv4_1 should be(true)
}
"correctly merge two version vectors" in {
val vv1_1 = VersionVector()
val vv2_1 = vv1_1 + node1
val vv3_1 = vv2_1 + node2
val vv4_1 = vv3_1 + node2
val vv5_1 = vv4_1 + node3
val vv1_2 = vv4_1
val vv2_2 = vv1_2 + node2
val vv3_2 = vv2_2 + node2
val merged1 = vv3_2 merge vv5_1
merged1.versions.size should be(3)
merged1.versions.contains(node1) should be(true)
merged1.versions.contains(node2) should be(true)
merged1.versions.contains(node3) should be(true)
val merged2 = vv5_1 merge vv3_2
merged2.versions.size should be(3)
merged2.versions.contains(node1) should be(true)
merged2.versions.contains(node2) should be(true)
merged2.versions.contains(node3) should be(true)
vv3_2 < merged1 should be(true)
vv5_1 < merged1 should be(true)
vv3_2 < merged2 should be(true)
vv5_1 < merged2 should be(true)
merged1 == merged2 should be(true)
}
"correctly merge two disjoint version vectors" in {
val vv1_1 = VersionVector()
val vv2_1 = vv1_1 + node1
val vv3_1 = vv2_1 + node2
val vv4_1 = vv3_1 + node2
val vv5_1 = vv4_1 + node3
val vv1_2 = VersionVector()
val vv2_2 = vv1_2 + node4
val vv3_2 = vv2_2 + node4
val merged1 = vv3_2 merge vv5_1
merged1.versions.size should be(4)
merged1.versions.contains(node1) should be(true)
merged1.versions.contains(node2) should be(true)
merged1.versions.contains(node3) should be(true)
merged1.versions.contains(node4) should be(true)
val merged2 = vv5_1 merge vv3_2
merged2.versions.size should be(4)
merged2.versions.contains(node1) should be(true)
merged2.versions.contains(node2) should be(true)
merged2.versions.contains(node3) should be(true)
merged2.versions.contains(node4) should be(true)
vv3_2 < merged1 should be(true)
vv5_1 < merged1 should be(true)
vv3_2 < merged2 should be(true)
vv5_1 < merged2 should be(true)
merged1 == merged2 should be(true)
}
"pass blank version vector incrementing" in {
val v1 = VersionVector()
val v2 = VersionVector()
val vv1 = v1 + node1
val vv2 = v2 + node2
(vv1 > v1) should be(true)
(vv2 > v2) should be(true)
(vv1 > v2) should be(true)
(vv2 > v1) should be(true)
(vv2 > vv1) should be(false)
(vv1 > vv2) should be(false)
}
"pass merging behavior" in {
val a = VersionVector()
val b = VersionVector()
val a1 = a + node1
val b1 = b + node2
var a2 = a1 + node1
var c = a2.merge(b1)
var c1 = c + node3
(c1 > a2) should be(true)
(c1 > b1) should be(true)
}
}
}

View file

@ -0,0 +1,173 @@
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.cluster.ddata.protobuf
import scala.concurrent.duration._
import org.scalatest.BeforeAndAfterAll
import org.scalatest.Matchers
import org.scalatest.WordSpecLike
import akka.actor.ActorSystem
import akka.actor.Address
import akka.actor.ExtendedActorSystem
import akka.cluster.ddata.Flag
import akka.cluster.ddata.GCounter
import akka.cluster.ddata.GSet
import akka.cluster.ddata.LWWMap
import akka.cluster.ddata.LWWRegister
import akka.cluster.ddata.ORMap
import akka.cluster.ddata.ORSet
import akka.cluster.ddata.PNCounter
import akka.cluster.ddata.PNCounterMap
import akka.cluster.ddata.Replicator._
import akka.cluster.ddata.Replicator.Internal._
import akka.cluster.ddata.VersionVector
import akka.testkit.TestKit
import akka.cluster.UniqueAddress
import com.typesafe.config.ConfigFactory
class ReplicatedDataSerializerSpec extends TestKit(ActorSystem("ReplicatedDataSerializerSpec",
ConfigFactory.parseString("""
akka.actor.provider=akka.cluster.ClusterActorRefProvider
akka.remote.netty.tcp.port=0
"""))) with WordSpecLike with Matchers with BeforeAndAfterAll {
val serializer = new ReplicatedDataSerializer(system.asInstanceOf[ExtendedActorSystem])
val address1 = UniqueAddress(Address("akka.tcp", system.name, "some.host.org", 4711), 1)
val address2 = UniqueAddress(Address("akka.tcp", system.name, "other.host.org", 4711), 2)
val address3 = UniqueAddress(Address("akka.tcp", system.name, "some.host.org", 4712), 3)
override def afterAll {
shutdown()
}
def checkSerialization(obj: AnyRef): Unit = {
val blob = serializer.toBinary(obj)
val ref = serializer.fromBinary(blob, serializer.manifest(obj))
ref should be(obj)
}
def checkSameContent(a: AnyRef, b: AnyRef): Unit = {
a should be(b)
val blobA = serializer.toBinary(a)
val blobB = serializer.toBinary(b)
blobA.toSeq should be(blobB.toSeq)
}
"ReplicatedDataSerializer" must {
"serialize GSet" in {
checkSerialization(GSet())
checkSerialization(GSet() + "a")
checkSerialization(GSet() + "a" + "b")
checkSerialization(GSet() + 1 + 2 + 3)
checkSerialization(GSet() + address1 + address2)
checkSerialization(GSet() + 1L + "2" + 3 + address1)
checkSameContent(GSet() + "a" + "b", GSet() + "a" + "b")
checkSameContent(GSet() + "a" + "b", GSet() + "b" + "a")
checkSameContent(GSet() + address1 + address2 + address3, GSet() + address2 + address1 + address3)
checkSameContent(GSet() + address1 + address2 + address3, GSet() + address3 + address2 + address1)
}
"serialize ORSet" in {
checkSerialization(ORSet())
checkSerialization(ORSet().add(address1, "a"))
checkSerialization(ORSet().add(address1, "a").add(address2, "a"))
checkSerialization(ORSet().add(address1, "a").remove(address2, "a"))
checkSerialization(ORSet().add(address1, "a").add(address2, "b").remove(address1, "a"))
checkSerialization(ORSet().add(address1, 1).add(address2, 2))
checkSerialization(ORSet().add(address1, 1L).add(address2, 2L))
checkSerialization(ORSet().add(address1, "a").add(address2, 2).add(address3, 3L).add(address3, address3))
val s1 = ORSet().add(address1, "a").add(address2, "b")
val s2 = ORSet().add(address2, "b").add(address1, "a")
checkSameContent(s1.merge(s2), s2.merge(s1))
val s3 = ORSet().add(address1, "a").add(address2, 17).remove(address3, 17)
val s4 = ORSet().add(address2, 17).remove(address3, 17).add(address1, "a")
checkSameContent(s3.merge(s4), s4.merge(s3))
}
"serialize Flag" in {
checkSerialization(Flag())
checkSerialization(Flag().switchOn)
}
"serialize LWWRegister" in {
checkSerialization(LWWRegister(address1, "value1", LWWRegister.defaultClock))
checkSerialization(LWWRegister(address1, "value2", LWWRegister.defaultClock[String])
.withValue(address2, "value3", LWWRegister.defaultClock[String]))
}
"serialize GCounter" in {
checkSerialization(GCounter())
checkSerialization(GCounter().increment(address1, 3))
checkSerialization(GCounter().increment(address1, 2).increment(address2, 5))
checkSameContent(
GCounter().increment(address1, 2).increment(address2, 5),
GCounter().increment(address2, 5).increment(address1, 1).increment(address1, 1))
checkSameContent(
GCounter().increment(address1, 2).increment(address3, 5),
GCounter().increment(address3, 5).increment(address1, 2))
}
"serialize PNCounter" in {
checkSerialization(PNCounter())
checkSerialization(PNCounter().increment(address1, 3))
checkSerialization(PNCounter().increment(address1, 3).decrement(address1, 1))
checkSerialization(PNCounter().increment(address1, 2).increment(address2, 5))
checkSerialization(PNCounter().increment(address1, 2).increment(address2, 5).decrement(address1, 1))
checkSameContent(
PNCounter().increment(address1, 2).increment(address2, 5),
PNCounter().increment(address2, 5).increment(address1, 1).increment(address1, 1))
checkSameContent(
PNCounter().increment(address1, 2).increment(address3, 5),
PNCounter().increment(address3, 5).increment(address1, 2))
checkSameContent(
PNCounter().increment(address1, 2).decrement(address1, 1).increment(address3, 5),
PNCounter().increment(address3, 5).increment(address1, 2).decrement(address1, 1))
}
"serialize ORMap" in {
checkSerialization(ORMap())
checkSerialization(ORMap().put(address1, "a", GSet() + "A"))
checkSerialization(ORMap().put(address1, "a", GSet() + "A").put(address2, "b", GSet() + "B"))
}
"serialize LWWMap" in {
checkSerialization(LWWMap())
checkSerialization(LWWMap().put(address1, "a", "value1", LWWRegister.defaultClock[Any]))
checkSerialization(LWWMap().put(address1, "a", "value1", LWWRegister.defaultClock[Any])
.put(address2, "b", 17, LWWRegister.defaultClock[Any]))
}
"serialize PNCounterMap" in {
checkSerialization(PNCounterMap())
checkSerialization(PNCounterMap().increment(address1, "a", 3))
checkSerialization(PNCounterMap().increment(address1, "a", 3).decrement(address2, "a", 2).
increment(address2, "b", 5))
}
"serialize DeletedData" in {
checkSerialization(DeletedData)
}
"serialize VersionVector" in {
checkSerialization(VersionVector())
checkSerialization(VersionVector().increment(address1))
checkSerialization(VersionVector().increment(address1).increment(address2))
val v1 = VersionVector().increment(address1).increment(address1)
val v2 = VersionVector().increment(address2)
checkSameContent(v1.merge(v2), v2.merge(v1))
}
}
}

View file

@ -0,0 +1,81 @@
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.cluster.ddata.protobuf
import scala.concurrent.duration._
import org.scalatest.BeforeAndAfterAll
import org.scalatest.Matchers
import org.scalatest.WordSpecLike
import akka.actor.ActorSystem
import akka.actor.Address
import akka.actor.ExtendedActorSystem
import akka.actor.Props
import akka.cluster.ddata.GSet
import akka.cluster.ddata.GSetKey
import akka.cluster.ddata.PruningState
import akka.cluster.ddata.PruningState.PruningInitialized
import akka.cluster.ddata.PruningState.PruningPerformed
import akka.cluster.ddata.Replicator._
import akka.cluster.ddata.Replicator.Internal._
import akka.testkit.TestKit
import akka.util.ByteString
import akka.cluster.UniqueAddress
import com.typesafe.config.ConfigFactory
class ReplicatorMessageSerializerSpec extends TestKit(ActorSystem("ReplicatorMessageSerializerSpec",
ConfigFactory.parseString("""
akka.actor.provider=akka.cluster.ClusterActorRefProvider
akka.remote.netty.tcp.port=0
"""))) with WordSpecLike with Matchers with BeforeAndAfterAll {
val serializer = new ReplicatorMessageSerializer(system.asInstanceOf[ExtendedActorSystem])
val address1 = UniqueAddress(Address("akka.tcp", system.name, "some.host.org", 4711), 1)
val address2 = UniqueAddress(Address("akka.tcp", system.name, "other.host.org", 4711), 2)
val address3 = UniqueAddress(Address("akka.tcp", system.name, "some.host.org", 4712), 3)
val keyA = GSetKey[String]("A")
override def afterAll {
shutdown()
}
def checkSerialization(obj: AnyRef): Unit = {
val blob = serializer.toBinary(obj)
val ref = serializer.fromBinary(blob, serializer.manifest(obj))
ref should be(obj)
}
"ReplicatorMessageSerializer" must {
"serialize Replicator messages" in {
val ref1 = system.actorOf(Props.empty, "ref1")
val data1 = GSet.empty[String] + "a"
checkSerialization(Get(keyA, ReadLocal))
checkSerialization(Get(keyA, ReadMajority(2.seconds), Some("x")))
checkSerialization(GetSuccess(keyA, None)(data1))
checkSerialization(GetSuccess(keyA, Some("x"))(data1))
checkSerialization(NotFound(keyA, Some("x")))
checkSerialization(GetFailure(keyA, Some("x")))
checkSerialization(Subscribe(keyA, ref1))
checkSerialization(Unsubscribe(keyA, ref1))
checkSerialization(Changed(keyA)(data1))
checkSerialization(DataEnvelope(data1))
checkSerialization(DataEnvelope(data1, pruning = Map(
address1 -> PruningState(address2, PruningPerformed),
address3 -> PruningState(address2, PruningInitialized(Set(address1.address))))))
checkSerialization(Write("A", DataEnvelope(data1)))
checkSerialization(WriteAck)
checkSerialization(Read("A"))
checkSerialization(ReadResult(Some(DataEnvelope(data1))))
checkSerialization(ReadResult(None))
checkSerialization(Status(Map("A" -> ByteString.fromString("a"),
"B" -> ByteString.fromString("b")), chunk = 3, totChunks = 10))
checkSerialization(Gossip(Map("A" -> DataEnvelope(data1),
"B" -> DataEnvelope(GSet() + "b" + "c")), sendBack = true))
}
}
}

View file

@ -0,0 +1,98 @@
/**
* Copyright (C) 2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.cluster.ddata.sample
import scala.concurrent.duration._
import scala.concurrent.forkjoin.ThreadLocalRandom
import akka.actor.Actor
import akka.actor.ActorLogging
import akka.actor.ActorSystem
import akka.actor.Props
import akka.cluster.Cluster
import akka.cluster.ddata.DistributedData
import akka.cluster.ddata.ORSet
import com.typesafe.config.ConfigFactory
import akka.cluster.ddata.Replicator
import akka.cluster.ddata.ORSetKey
object DataBot {
def main(args: Array[String]): Unit = {
if (args.isEmpty)
startup(Seq("2551", "2552", "0"))
else
startup(args)
}
def startup(ports: Seq[String]): Unit = {
ports.foreach { port
// Override the configuration of the port
val config = ConfigFactory.parseString("akka.remote.netty.tcp.port=" + port).
withFallback(ConfigFactory.load(
ConfigFactory.parseString("""
akka.actor.provider = "akka.cluster.ClusterActorRefProvider"
akka.remote {
netty.tcp {
hostname = "127.0.0.1"
port = 0
}
}
akka.cluster {
seed-nodes = [
"akka.tcp://ClusterSystem@127.0.0.1:2551",
"akka.tcp://ClusterSystem@127.0.0.1:2552"]
auto-down-unreachable-after = 10s
}
""")))
// Create an Akka system
val system = ActorSystem("ClusterSystem", config)
// Create an actor that handles cluster domain events
system.actorOf(Props[DataBot], name = "dataBot")
}
}
private case object Tick
}
class DataBot extends Actor with ActorLogging {
import DataBot._
import Replicator._
val replicator = DistributedData(context.system).replicator
implicit val node = Cluster(context.system)
import context.dispatcher
val tickTask = context.system.scheduler.schedule(5.seconds, 5.seconds, self, Tick)
val DataKey = ORSetKey[String]("key")
replicator ! Subscribe(DataKey, self)
def receive = {
case Tick
val s = ThreadLocalRandom.current().nextInt(97, 123).toChar.toString
if (ThreadLocalRandom.current().nextBoolean()) {
// add
log.info("Adding: {}", s)
replicator ! Update(DataKey, ORSet.empty[String], WriteLocal)(_ + s)
} else {
// remove
log.info("Removing: {}", s)
replicator ! Update(DataKey, ORSet.empty[String], WriteLocal)(_ - s)
}
case _: UpdateResponse[_] // ignore
case c @ Changed(DataKey)
log.info("Current elements: {}", c.get(DataKey).elements)
}
override def postStop(): Unit = tickTask.cancel()
}

View file

@ -0,0 +1,137 @@
/**
* Copyright (C) 2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.cluster.ddata.sample
import scala.concurrent.duration._
import scala.concurrent.forkjoin.ThreadLocalRandom
import akka.actor.Actor
import akka.actor.ActorLogging
import akka.actor.ActorSystem
import akka.actor.Props
import akka.cluster.Cluster
import akka.cluster.ddata.DistributedData
import akka.cluster.ddata.ORSet
import com.typesafe.config.ConfigFactory
import akka.cluster.ddata.Replicator
import akka.cluster.ddata.ORSetKey
object LotsOfDataBot {
def main(args: Array[String]): Unit = {
if (args.isEmpty)
startup(Seq("2551", "2552", "0"))
else
startup(args)
}
def startup(ports: Seq[String]): Unit = {
ports.foreach { port
// Override the configuration of the port
val config = ConfigFactory.parseString("akka.remote.netty.tcp.port=" + port).
withFallback(ConfigFactory.load(
ConfigFactory.parseString("""
passive = off
max-entries = 100000
akka.actor.provider = "akka.cluster.ClusterActorRefProvider"
akka.remote {
netty.tcp {
hostname = "127.0.0.1"
port = 0
}
}
akka.cluster {
seed-nodes = [
"akka.tcp://ClusterSystem@127.0.0.1:2551",
"akka.tcp://ClusterSystem@127.0.0.1:2552"]
auto-down-unreachable-after = 10s
}
akka.cluster.distributed-data.use-offheap-memory = off
akka.remote.log-frame-size-exceeding = 10000b
""")))
// Create an Akka system
val system = ActorSystem("ClusterSystem", config)
// Create an actor that handles cluster domain events
system.actorOf(Props[LotsOfDataBot], name = "dataBot")
}
}
private case object Tick
}
class LotsOfDataBot extends Actor with ActorLogging {
import LotsOfDataBot._
import Replicator._
val replicator = DistributedData(context.system).replicator
implicit val cluster = Cluster(context.system)
import context.dispatcher
val isPassive = context.system.settings.config.getBoolean("passive")
var tickTask =
if (isPassive)
context.system.scheduler.schedule(1.seconds, 1.seconds, self, Tick)
else
context.system.scheduler.schedule(20.millis, 20.millis, self, Tick)
val startTime = System.nanoTime()
var count = 1L
val maxEntries = context.system.settings.config.getInt("max-entries")
def receive = if (isPassive) passive else active
def active: Receive = {
case Tick
val loop = if (count >= maxEntries) 1 else 100
for (_ 1 to loop) {
count += 1
if (count % 10000 == 0)
log.info("Reached {} entries", count)
if (count == maxEntries) {
log.info("Reached {} entries", count)
tickTask.cancel()
tickTask = context.system.scheduler.schedule(1.seconds, 1.seconds, self, Tick)
}
val key = ORSetKey[String]((count % maxEntries).toString)
if (count <= 100)
replicator ! Subscribe(key, self)
val s = ThreadLocalRandom.current().nextInt(97, 123).toChar.toString
if (count <= maxEntries || ThreadLocalRandom.current().nextBoolean()) {
// add
replicator ! Update(key, ORSet(), WriteLocal)(_ + s)
} else {
// remove
replicator ! Update(key, ORSet(), WriteLocal)(_ - s)
}
}
case _: UpdateResponse[_] // ignore
case c @ Changed(ORSetKey(id))
val ORSet(elements) = c.dataValue
log.info("Current elements: {} -> {}", id, elements)
}
def passive: Receive = {
case Tick
if (!tickTask.isCancelled)
replicator ! GetKeyIds
case GetKeyIdsResult(keys)
if (keys.size >= maxEntries) {
tickTask.cancel()
val duration = (System.nanoTime() - startTime).nanos.toMillis
log.info("It took {} ms to replicate {} entries", duration, keys.size)
}
case c @ Changed(ORSetKey(id))
val ORSet(elements) = c.dataValue
log.info("Current elements: {} -> {}", id, elements)
}
override def postStop(): Unit = tickTask.cancel()
}