+cdd #16799 Add Distributed Data module

Previously know as [patriknw/akka-data-replication](https://github.com/patriknw/akka-data-replication),
which was originally inspired by [jboner/akka-crdt](https://github.com/jboner/akka-crdt).

The functionality is very similar to akka-data-replication 0.11.

Here is a list of the most important changes:

* The package name changed to `akka.cluster.ddata`
* The extension was renamed to `DistributedData`
* The keys changed from strings to classes with unique identifiers and type information of the data values,
  e.g. `ORSetKey[Int]("set2")`
* The optional read consistency parameter was removed from the `Update` message. If you need to read from
  other replicas before performing the update you have to first send a `Get` message and then continue with
  the ``Update`` when the ``GetSuccess`` is received.
* `BigInt` is used in `GCounter` and `PNCounter` instead of `Long`
* Improvements of java api
* Better documentation
This commit is contained in:
Patrik Nordwall 2015-05-17 12:28:47 +02:00
parent bf28260cd0
commit cbe5dd2cf5
69 changed files with 40036 additions and 3 deletions

View file

@ -0,0 +1,172 @@
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.cluster.ddata
import akka.actor.Address
import akka.cluster.UniqueAddress
import akka.cluster.ddata.Replicator.Changed
import org.scalatest.Matchers
import org.scalatest.WordSpec
class PNCounterSpec extends WordSpec with Matchers {
val node1 = UniqueAddress(Address("akka.tcp", "Sys", "localhost", 2551), 1)
val node2 = UniqueAddress(node1.address.copy(port = Some(2552)), 2)
"A PNCounter" must {
"be able to increment each node's record by one" in {
val c1 = PNCounter()
val c2 = c1 increment node1
val c3 = c2 increment node1
val c4 = c3 increment node2
val c5 = c4 increment node2
val c6 = c5 increment node2
c6.increments.state(node1) should be(2)
c6.increments.state(node2) should be(3)
}
"be able to decrement each node's record by one" in {
val c1 = PNCounter()
val c2 = c1 decrement node1
val c3 = c2 decrement node1
val c4 = c3 decrement node2
val c5 = c4 decrement node2
val c6 = c5 decrement node2
c6.decrements.state(node1) should be(2)
c6.decrements.state(node2) should be(3)
}
"be able to increment each node's record by arbitrary delta" in {
val c1 = PNCounter()
val c2 = c1 increment (node1, 3)
val c3 = c2 increment (node1, 4)
val c4 = c3 increment (node2, 2)
val c5 = c4 increment (node2, 7)
val c6 = c5 increment node2
c6.increments.state(node1) should be(7)
c6.increments.state(node2) should be(10)
}
"be able to decrement each node's record by arbitrary delta" in {
val c1 = PNCounter()
val c2 = c1 decrement (node1, 3)
val c3 = c2 decrement (node1, 4)
val c4 = c3 decrement (node2, 2)
val c5 = c4 decrement (node2, 7)
val c6 = c5 decrement node2
c6.decrements.state(node1) should be(7)
c6.decrements.state(node2) should be(10)
}
"be able to increment and decrement each node's record by arbitrary delta" in {
val c1 = PNCounter()
val c2 = c1 increment (node1, 3)
val c3 = c2 decrement (node1, 2)
val c4 = c3 increment (node2, 5)
val c5 = c4 decrement (node2, 2)
val c6 = c5 increment node2
c6.increments.value should be(9)
c6.decrements.value should be(4)
}
"be able to summarize the history to the correct aggregated value of increments and decrements" in {
val c1 = PNCounter()
val c2 = c1 increment (node1, 3)
val c3 = c2 decrement (node1, 2)
val c4 = c3 increment (node2, 5)
val c5 = c4 decrement (node2, 2)
val c6 = c5 increment node2
c6.increments.value should be(9)
c6.decrements.value should be(4)
c6.value should be(5)
}
"be able to have its history correctly merged with another GCounter" in {
// counter 1
val c11 = PNCounter()
val c12 = c11 increment (node1, 3)
val c13 = c12 decrement (node1, 2)
val c14 = c13 increment (node2, 5)
val c15 = c14 decrement (node2, 2)
val c16 = c15 increment node2
c16.increments.value should be(9)
c16.decrements.value should be(4)
c16.value should be(5)
// counter 1
val c21 = PNCounter()
val c22 = c21 increment (node1, 2)
val c23 = c22 decrement (node1, 3)
val c24 = c23 increment (node2, 3)
val c25 = c24 decrement (node2, 2)
val c26 = c25 increment node2
c26.increments.value should be(6)
c26.decrements.value should be(5)
c26.value should be(1)
// merge both ways
val merged1 = c16 merge c26
merged1.increments.value should be(9)
merged1.decrements.value should be(5)
merged1.value should be(4)
val merged2 = c26 merge c16
merged2.increments.value should be(9)
merged2.decrements.value should be(5)
merged2.value should be(4)
}
"have support for pruning" in {
val c1 = PNCounter()
val c2 = c1 increment node1
val c3 = c2 decrement node2
c2.needPruningFrom(node1) should be(true)
c2.needPruningFrom(node2) should be(false)
c3.needPruningFrom(node1) should be(true)
c3.needPruningFrom(node2) should be(true)
val c4 = c3.prune(node1, node2)
c4.needPruningFrom(node2) should be(true)
c4.needPruningFrom(node1) should be(false)
val c5 = (c4 increment node1).pruningCleanup(node1)
c5.needPruningFrom(node1) should be(false)
}
"have unapply extractor" in {
val c1 = PNCounter.empty.increment(node1).increment(node1).decrement(node2)
val PNCounter(value1) = c1
val value2: BigInt = value1
Changed(PNCounterKey("key"))(c1) match {
case c @ Changed(PNCounterKey("key"))
val PNCounter(value3) = c.dataValue
val value4: BigInt = value3
value4 should be(1L)
}
}
}
}