format source with scalafmt

This commit is contained in:
Auto Format 2019-03-11 10:38:24 +01:00 committed by Patrik Nordwall
parent 0f40491d42
commit ce404e4f53
1669 changed files with 43208 additions and 35404 deletions

View file

@ -108,27 +108,27 @@ import akka.util.ccompat._
val cacheKey = (key, fromSeqNr, toSeqNr)
val deltaGroup = cache.get(cacheKey) match {
case None =>
val group = deltaEntriesAfterJ.valuesIterator.reduceLeft {
(d1, d2) =>
val merged = d2 match {
case NoDeltaPlaceholder => NoDeltaPlaceholder
case _ =>
// this is fine also if d1 is a NoDeltaPlaceholder
d1.merge(d2.asInstanceOf[d1.T])
}
merged match {
case s: ReplicatedDeltaSize if s.deltaSize >= maxDeltaSize =>
// discard too large deltas
NoDeltaPlaceholder
case _ => merged
}
val group = deltaEntriesAfterJ.valuesIterator.reduceLeft { (d1, d2) =>
val merged = d2 match {
case NoDeltaPlaceholder => NoDeltaPlaceholder
case _ =>
// this is fine also if d1 is a NoDeltaPlaceholder
d1.merge(d2.asInstanceOf[d1.T])
}
merged match {
case s: ReplicatedDeltaSize if s.deltaSize >= maxDeltaSize =>
// discard too large deltas
NoDeltaPlaceholder
case _ => merged
}
}
cache = cache.updated(cacheKey, group)
group
case Some(group) => group
}
deltas = deltas.updated(key, (deltaGroup, fromSeqNr, toSeqNr))
deltaSentToNode = deltaSentToNode.updated(key, deltaSentToNodeForKey.updated(node, deltaEntriesAfterJ.lastKey))
deltaSentToNode =
deltaSentToNode.updated(key, deltaSentToNodeForKey.updated(node, deltaEntriesAfterJ.lastKey))
}
}

View file

@ -37,7 +37,8 @@ class DistributedData(system: ExtendedActorSystem) extends Extension {
*/
val replicator: ActorRef =
if (isTerminated) {
system.log.warning("Replicator points to dead letters: Make sure the cluster node is not terminated and has the proper role!")
system.log.warning(
"Replicator points to dead letters: Make sure the cluster node is not terminated and has the proper role!")
system.deadLetters
} else {
system.systemActorOf(Replicator.props(settings), ReplicatorSettings.name(system, None))

View file

@ -81,8 +81,8 @@ object DurableStore {
* the wrapped `ReplicatedData` including its serializerId and
* manifest.
*/
final class DurableDataEnvelope private[akka] (
private[akka] val dataEnvelope: DataEnvelope) extends ReplicatorMessage {
final class DurableDataEnvelope private[akka] (private[akka] val dataEnvelope: DataEnvelope)
extends ReplicatorMessage {
def this(data: ReplicatedData) = this(DataEnvelope(data))
@ -103,11 +103,10 @@ object LmdbDurableStore {
private case object WriteBehind extends DeadLetterSuppression
private final case class Lmdb(
env: Env[ByteBuffer],
db: Dbi[ByteBuffer],
keyBuffer: ByteBuffer,
valueBuffer: ByteBuffer)
private final case class Lmdb(env: Env[ByteBuffer],
db: Dbi[ByteBuffer],
keyBuffer: ByteBuffer,
valueBuffer: ByteBuffer)
}
final class LmdbDurableStore(config: Config) extends Actor with ActorLogging {
@ -142,10 +141,7 @@ final class LmdbDurableStore(config: Config) extends Actor with ActorLogging {
val env = {
val mapSize = config.getBytes("lmdb.map-size")
dir.mkdirs()
Env.create()
.setMapSize(mapSize)
.setMaxDbs(1)
.open(dir, EnvFlags.MDB_NOLOCK)
Env.create().setMapSize(mapSize).setMaxDbs(1).open(dir, EnvFlags.MDB_NOLOCK)
}
val db = env.openDbi("ddata", DbiFlags.MDB_CREATE)
@ -154,8 +150,9 @@ final class LmdbDurableStore(config: Config) extends Actor with ActorLogging {
val valueBuffer = ByteBuffer.allocateDirect(100 * 1024) // will grow when needed
if (log.isDebugEnabled)
log.debug("Init of LMDB in directory [{}] took [{} ms]", dir.getCanonicalPath,
TimeUnit.NANOSECONDS.toMillis(System.nanoTime - t0))
log.debug("Init of LMDB in directory [{}] took [{} ms]",
dir.getCanonicalPath,
TimeUnit.NANOSECONDS.toMillis(System.nanoTime - t0))
val l = Lmdb(env, db, keyBuffer, valueBuffer)
_lmdb = OptionVal.Some(l)
l
@ -218,8 +215,7 @@ final class LmdbDurableStore(config: Config) extends Actor with ActorLogging {
sender() ! loadData
sender() ! LoadAllCompleted
if (log.isDebugEnabled)
log.debug("load all of [{}] entries took [{} ms]", n,
TimeUnit.NANOSECONDS.toMillis(System.nanoTime - t0))
log.debug("load all of [{}] entries took [{} ms]", n, TimeUnit.NANOSECONDS.toMillis(System.nanoTime - t0))
context.become(active)
} finally {
Try(iter.close())
@ -297,8 +293,9 @@ final class LmdbDurableStore(config: Config) extends Actor with ActorLogging {
}
tx.commit()
if (log.isDebugEnabled)
log.debug("store and commit of [{}] entries took [{} ms]", pending.size,
TimeUnit.NANOSECONDS.toMillis(System.nanoTime - t0))
log.debug("store and commit of [{}] entries took [{} ms]",
pending.size,
TimeUnit.NANOSECONDS.toMillis(System.nanoTime - t0))
} catch {
case NonFatal(e) =>
import scala.collection.JavaConverters._
@ -311,4 +308,3 @@ final class LmdbDurableStore(config: Config) extends Actor with ActorLogging {
}
}

View file

@ -5,6 +5,7 @@
package akka.cluster.ddata
object Flag {
/**
* `Flag` that is initialized to `false`.
*/

View file

@ -12,6 +12,7 @@ import akka.annotation.InternalApi
object GCounter {
val empty: GCounter = new GCounter
def apply(): GCounter = empty
/**
* Java API
*/
@ -40,11 +41,13 @@ object GCounter {
* This class is immutable, i.e. "modifying" methods return a new instance.
*/
@SerialVersionUID(1L)
final class GCounter private[akka] (
private[akka] val state: Map[UniqueAddress, BigInt] = Map.empty,
override val delta: Option[GCounter] = None)
extends DeltaReplicatedData with ReplicatedDelta
with ReplicatedDataSerialization with RemovedNodePruning with FastMerge {
final class GCounter private[akka] (private[akka] val state: Map[UniqueAddress, BigInt] = Map.empty,
override val delta: Option[GCounter] = None)
extends DeltaReplicatedData
with ReplicatedDelta
with ReplicatedDataSerialization
with RemovedNodePruning
with FastMerge {
import GCounter.Zero
@ -54,7 +57,9 @@ final class GCounter private[akka] (
/**
* Scala API: Current total value of the counter.
*/
def value: BigInt = state.values.foldLeft(Zero) { (acc, v) => acc + v }
def value: BigInt = state.values.foldLeft(Zero) { (acc, v) =>
acc + v
}
/**
* Java API: Current total value of the counter.

View file

@ -31,8 +31,10 @@ object GSet {
*/
@SerialVersionUID(1L)
final case class GSet[A] private (elements: Set[A])(override val delta: Option[GSet[A]])
extends DeltaReplicatedData with ReplicatedDelta
with ReplicatedDataSerialization with FastMerge {
extends DeltaReplicatedData
with ReplicatedDelta
with ReplicatedDataSerialization
with FastMerge {
type T = GSet[A]
type D = GSet[A]
@ -72,7 +74,7 @@ final case class GSet[A] private (elements: Set[A])(override val delta: Option[G
else if (this.isAncestorOf(that)) that.clearAncestor()
else {
clearAncestor()
new GSet[A](elements union that.elements)(None)
new GSet[A](elements.union(that.elements))(None)
}
override def mergeDelta(thatDelta: GSet[A]): GSet[A] = merge(thatDelta)

View file

@ -5,6 +5,7 @@
package akka.cluster.ddata
object Key {
/**
* Extract the [[Key#id]].
*/
@ -35,4 +36,3 @@ abstract class Key[+T <: ReplicatedData](val id: Key.KeyId) extends Serializable
override def toString(): String = id
}

View file

@ -10,6 +10,7 @@ import akka.cluster.UniqueAddress
import akka.cluster.ddata.ORMap.ZeroTag
object LWWMap {
/**
* INTERNAL API
*/
@ -21,6 +22,7 @@ object LWWMap {
private val _empty: LWWMap[Any, Any] = new LWWMap(new ORMap(ORSet.empty, Map.empty, zeroTag = LWWMapTag))
def empty[A, B]: LWWMap[A, B] = _empty.asInstanceOf[LWWMap[A, B]]
def apply(): LWWMap[Any, Any] = _empty
/**
* Java API
*/
@ -55,10 +57,11 @@ object LWWMap {
* This class is immutable, i.e. "modifying" methods return a new instance.
*/
@SerialVersionUID(1L)
final class LWWMap[A, B] private[akka] (
private[akka] val underlying: ORMap[A, LWWRegister[B]])
extends DeltaReplicatedData with ReplicatedDataSerialization with RemovedNodePruning {
import LWWRegister.{ Clock, defaultClock }
final class LWWMap[A, B] private[akka] (private[akka] val underlying: ORMap[A, LWWRegister[B]])
extends DeltaReplicatedData
with ReplicatedDataSerialization
with RemovedNodePruning {
import LWWRegister.{ defaultClock, Clock }
type T = LWWMap[A, B]
type D = ORMap.DeltaOp

View file

@ -12,6 +12,7 @@ import akka.util.HashCode
object LWWRegister {
trait Clock[A] {
/**
* @param currentTimestamp the current `timestamp` value of the `LWWRegister`
* @param value the register value to set and associate with the returned timestamp
@ -128,12 +129,10 @@ object LWWRegister {
* This class is immutable, i.e. "modifying" methods return a new instance.
*/
@SerialVersionUID(1L)
final class LWWRegister[A] private[akka] (
private[akka] val node: UniqueAddress,
val value: A,
val timestamp: Long)
extends ReplicatedData with ReplicatedDataSerialization {
import LWWRegister.{ Clock, defaultClock }
final class LWWRegister[A] private[akka] (private[akka] val node: UniqueAddress, val value: A, val timestamp: Long)
extends ReplicatedData
with ReplicatedDataSerialization {
import LWWRegister.{ defaultClock, Clock }
type T = LWWRegister[A]

View file

@ -16,6 +16,7 @@ object ORMap {
private val _empty: ORMap[Any, ReplicatedData] = new ORMap(ORSet.empty, Map.empty, VanillaORMapTag)
def empty[A, B <: ReplicatedData]: ORMap[A, B] = _empty.asInstanceOf[ORMap[A, B]]
def apply(): ORMap[Any, ReplicatedData] = _empty
/**
* Java API
*/
@ -52,7 +53,9 @@ object ORMap {
/**
* INTERNAL API
*/
@InternalApi private[akka] sealed abstract class AtomicDeltaOp[A, B <: ReplicatedData] extends DeltaOp with ReplicatedDeltaSize {
@InternalApi private[akka] sealed abstract class AtomicDeltaOp[A, B <: ReplicatedData]
extends DeltaOp
with ReplicatedDeltaSize {
def underlying: ORSet.DeltaOp
def zeroTag: ZeroTag
override def zero: DeltaReplicatedData = zeroTag.zero
@ -65,7 +68,10 @@ object ORMap {
// PutDeltaOp contains ORSet delta and full value
/** INTERNAL API */
@InternalApi private[akka] final case class PutDeltaOp[A, B <: ReplicatedData](underlying: ORSet.DeltaOp, value: (A, B), zeroTag: ZeroTag) extends AtomicDeltaOp[A, B] {
@InternalApi private[akka] final case class PutDeltaOp[A, B <: ReplicatedData](underlying: ORSet.DeltaOp,
value: (A, B),
zeroTag: ZeroTag)
extends AtomicDeltaOp[A, B] {
override def merge(that: DeltaOp): DeltaOp = that match {
case put: PutDeltaOp[A, B] if this.value._1 == put.value._1 =>
new PutDeltaOp[A, B](this.underlying.merge(put.underlying), put.value, zeroTag)
@ -87,21 +93,21 @@ object ORMap {
// UpdateDeltaOp contains ORSet delta and either delta of value (in case where underlying type supports deltas) or full value
/** INTERNAL API */
@InternalApi private[akka] final case class UpdateDeltaOp[A, B <: ReplicatedData](underlying: ORSet.DeltaOp, values: Map[A, B], zeroTag: ZeroTag) extends AtomicDeltaOp[A, B] {
@InternalApi private[akka] final case class UpdateDeltaOp[A, B <: ReplicatedData](underlying: ORSet.DeltaOp,
values: Map[A, B],
zeroTag: ZeroTag)
extends AtomicDeltaOp[A, B] {
override def merge(that: DeltaOp): DeltaOp = that match {
case update: UpdateDeltaOp[A, B] =>
new UpdateDeltaOp[A, B](
this.underlying.merge(update.underlying),
update.values.foldLeft(this.values) {
(map, pair) =>
val (key, value) = pair
if (this.values.contains(key)) {
val elem1 = this.values(key)
val elem2 = value.asInstanceOf[elem1.T]
map + (key -> elem1.merge(elem2).asInstanceOf[B])
} else map + pair
},
zeroTag)
new UpdateDeltaOp[A, B](this.underlying.merge(update.underlying), update.values.foldLeft(this.values) {
(map, pair) =>
val (key, value) = pair
if (this.values.contains(key)) {
val elem1 = this.values(key)
val elem2 = value.asInstanceOf[elem1.T]
map + (key -> elem1.merge(elem2).asInstanceOf[B])
} else map + pair
}, zeroTag)
case put: PutDeltaOp[A, B] if this.values.size == 1 && this.values.contains(put.value._1) =>
new PutDeltaOp[A, B](this.underlying.merge(put.underlying), put.value, zeroTag)
case other: AtomicDeltaOp[A, B] => DeltaGroup(Vector(this, other))
@ -111,16 +117,22 @@ object ORMap {
// RemoveDeltaOp does not contain any value at all - the propagated 'value' map would be empty
/** INTERNAL API */
@InternalApi private[akka] final case class RemoveDeltaOp[A, B <: ReplicatedData](underlying: ORSet.DeltaOp, zeroTag: ZeroTag) extends AtomicDeltaOp[A, B]
@InternalApi private[akka] final case class RemoveDeltaOp[A, B <: ReplicatedData](underlying: ORSet.DeltaOp,
zeroTag: ZeroTag)
extends AtomicDeltaOp[A, B]
// RemoveKeyDeltaOp contains a single value - to provide the recipient with the removed key for value map
/** INTERNAL API */
@InternalApi private[akka] final case class RemoveKeyDeltaOp[A, B <: ReplicatedData](underlying: ORSet.DeltaOp, removedKey: A, zeroTag: ZeroTag) extends AtomicDeltaOp[A, B]
@InternalApi private[akka] final case class RemoveKeyDeltaOp[A, B <: ReplicatedData](underlying: ORSet.DeltaOp,
removedKey: A,
zeroTag: ZeroTag)
extends AtomicDeltaOp[A, B]
// DeltaGroup is effectively a causally ordered list of individual deltas
/** INTERNAL API */
@InternalApi private[akka] final case class DeltaGroup[A, B <: ReplicatedData](ops: immutable.IndexedSeq[DeltaOp])
extends DeltaOp with ReplicatedDeltaSize {
extends DeltaOp
with ReplicatedDeltaSize {
override def merge(that: DeltaOp): DeltaOp = that match {
case that: AtomicDeltaOp[A, B] =>
ops.last match {
@ -141,7 +153,8 @@ object ORMap {
case DeltaGroup(thatOps) => DeltaGroup(ops ++ thatOps)
}
override def zero: DeltaReplicatedData = ops.headOption.fold(ORMap.empty[A, B].asInstanceOf[DeltaReplicatedData])(_.zero)
override def zero: DeltaReplicatedData =
ops.headOption.fold(ORMap.empty[A, B].asInstanceOf[DeltaReplicatedData])(_.zero)
override def deltaSize: Int = ops.size
}
@ -156,14 +169,15 @@ object ORMap {
* This class is immutable, i.e. "modifying" methods return a new instance.
*/
@SerialVersionUID(1L)
final class ORMap[A, B <: ReplicatedData] private[akka] (
private[akka] val keys: ORSet[A],
private[akka] val values: Map[A, B],
private[akka] val zeroTag: ZeroTag,
override val delta: Option[ORMap.DeltaOp] = None)
extends DeltaReplicatedData with ReplicatedDataSerialization with RemovedNodePruning {
final class ORMap[A, B <: ReplicatedData] private[akka] (private[akka] val keys: ORSet[A],
private[akka] val values: Map[A, B],
private[akka] val zeroTag: ZeroTag,
override val delta: Option[ORMap.DeltaOp] = None)
extends DeltaReplicatedData
with ReplicatedDataSerialization
with RemovedNodePruning {
import ORMap.{ PutDeltaOp, UpdateDeltaOp, RemoveDeltaOp, RemoveKeyDeltaOp }
import ORMap.{ PutDeltaOp, RemoveDeltaOp, RemoveKeyDeltaOp, UpdateDeltaOp }
type T = ORMap[A, B]
type D = ORMap.DeltaOp
@ -235,8 +249,8 @@ final class ORMap[A, B <: ReplicatedData] private[akka] (
if (value.isInstanceOf[ORSet[_]] && values.contains(key))
throw new IllegalArgumentException(
"`ORMap.put` must not be used to replace an existing `ORSet` " +
"value, because important history can be lost when replacing the `ORSet` and " +
"undesired effects of merging will occur. Use `ORMultiMap` or `ORMap.updated` instead.")
"value, because important history can be lost when replacing the `ORSet` and " +
"undesired effects of merging will occur. Use `ORMultiMap` or `ORMap.updated` instead.")
else {
val newKeys = keys.resetDelta.add(node, key)
val putDeltaOp = PutDeltaOp(newKeys.delta.get, key -> value, zeroTag)
@ -285,7 +299,8 @@ final class ORMap[A, B <: ReplicatedData] private[akka] (
/**
* INTERNAL API
*/
@InternalApi private[akka] def updated(node: UniqueAddress, key: A, initial: B, valueDeltas: Boolean = false)(modify: B => B): ORMap[A, B] = {
@InternalApi private[akka] def updated(node: UniqueAddress, key: A, initial: B, valueDeltas: Boolean = false)(
modify: B => B): ORMap[A, B] = {
val (oldValue, hasOldValue) = values.get(key) match {
case Some(old) => (old, true)
case _ => (initial, false)
@ -411,7 +426,9 @@ final class ORMap[A, B <: ReplicatedData] private[akka] (
}
var mergedKeys: ORSet[A] = this.keys
var (mergedValues, tombstonedVals): (Map[A, B], Map[A, B]) = this.values.partition { case (k, _) => this.keys.contains(k) }
var (mergedValues, tombstonedVals): (Map[A, B], Map[A, B]) = this.values.partition {
case (k, _) => this.keys.contains(k)
}
val processDelta: PartialFunction[ORMap.DeltaOp, Unit] = {
case putOp: PutDeltaOp[A, B] =>
@ -466,7 +483,7 @@ final class ORMap[A, B <: ReplicatedData] private[akka] (
}
}
(processDelta orElse processNestedDelta)(thatDelta)
processDelta.orElse(processNestedDelta)(thatDelta)
if (withValueDeltas)
new ORMap[A, B](mergedKeys, tombstonedVals ++ mergedValues, zeroTag = zeroTag)
@ -497,10 +514,10 @@ final class ORMap[A, B <: ReplicatedData] private[akka] (
}
override def modifiedByNodes: Set[UniqueAddress] = {
keys.modifiedByNodes union values.foldLeft(Set.empty[UniqueAddress]) {
case (acc, (_, data: RemovedNodePruning)) => acc union data.modifiedByNodes
keys.modifiedByNodes.union(values.foldLeft(Set.empty[UniqueAddress]) {
case (acc, (_, data: RemovedNodePruning)) => acc.union(data.modifiedByNodes)
case (acc, _) => acc
}
})
}
override def needPruningFrom(removedNode: UniqueAddress): Boolean = {
@ -553,4 +570,6 @@ object ORMapKey {
}
@SerialVersionUID(1L)
final case class ORMapKey[A, B <: ReplicatedData](_id: String) extends Key[ORMap[A, B]](_id) with ReplicatedDataSerialization
final case class ORMapKey[A, B <: ReplicatedData](_id: String)
extends Key[ORMap[A, B]](_id)
with ReplicatedDataSerialization

View file

@ -9,6 +9,7 @@ import akka.cluster.ddata.ORMap._
import akka.cluster.{ Cluster, UniqueAddress }
object ORMultiMap {
/**
* INTERNAL API
*/
@ -26,7 +27,9 @@ object ORMultiMap {
}
val _empty: ORMultiMap[Any, Any] = new ORMultiMap(new ORMap(ORSet.empty, Map.empty, zeroTag = ORMultiMapTag), false)
val _emptyWithValueDeltas: ORMultiMap[Any, Any] = new ORMultiMap(new ORMap(ORSet.empty, Map.empty, zeroTag = ORMultiMapWithValueDeltasTag), true)
val _emptyWithValueDeltas: ORMultiMap[Any, Any] =
new ORMultiMap(new ORMap(ORSet.empty, Map.empty, zeroTag = ORMultiMapWithValueDeltasTag), true)
/**
* Provides an empty multimap.
*/
@ -62,10 +65,11 @@ object ORMultiMap {
* Note that on concurrent adds and removals for the same key (on the same set), removals can be lost.
*/
@SerialVersionUID(1L)
final class ORMultiMap[A, B] private[akka] (
private[akka] val underlying: ORMap[A, ORSet[B]],
private[akka] val withValueDeltas: Boolean)
extends DeltaReplicatedData with ReplicatedDataSerialization with RemovedNodePruning {
final class ORMultiMap[A, B] private[akka] (private[akka] val underlying: ORMap[A, ORSet[B]],
private[akka] val withValueDeltas: Boolean)
extends DeltaReplicatedData
with ReplicatedDataSerialization
with RemovedNodePruning {
override type T = ORMultiMap[A, B]
override type D = ORMap.DeltaOp
@ -75,8 +79,11 @@ final class ORMultiMap[A, B] private[akka] (
if (withValueDeltas) {
val newUnderlying = underlying.mergeRetainingDeletedValues(that.underlying)
// Garbage collect the tombstones we no longer need, i.e. those that have Set() as a value.
val newValues = newUnderlying.values.filterNot { case (key, value) => !newUnderlying.keys.contains(key) && value.isEmpty }
new ORMultiMap[A, B](new ORMap(newUnderlying.keys, newValues, newUnderlying.zeroTag, newUnderlying.delta), withValueDeltas)
val newValues = newUnderlying.values.filterNot {
case (key, value) => !newUnderlying.keys.contains(key) && value.isEmpty
}
new ORMultiMap[A, B](new ORMap(newUnderlying.keys, newValues, newUnderlying.zeroTag, newUnderlying.delta),
withValueDeltas)
} else
new ORMultiMap(underlying.merge(that.underlying), withValueDeltas)
} else throw new IllegalArgumentException("Trying to merge two ORMultiMaps of different map sub-type")
@ -84,10 +91,10 @@ final class ORMultiMap[A, B] private[akka] (
/**
* Scala API: All entries of a multimap where keys are strings and values are sets.
*/
def entries: Map[A, Set[B]] = if (withValueDeltas)
underlying.entries.collect { case (k, v) if underlying.keys.elements.contains(k) => k -> v.elements }
else
underlying.entries.map { case (k, v) => k -> v.elements }
def entries: Map[A, Set[B]] =
if (withValueDeltas)
underlying.entries.collect { case (k, v) if underlying.keys.elements.contains(k) => k -> v.elements } else
underlying.entries.map { case (k, v) => k -> v.elements }
/**
* Java API: All entries of a multimap where keys are strings and values are sets.
@ -96,8 +103,9 @@ final class ORMultiMap[A, B] private[akka] (
import scala.collection.JavaConverters._
val result = new java.util.HashMap[A, java.util.Set[B]]
if (withValueDeltas)
underlying.entries.foreach { case (k, v) => if (underlying.keys.elements.contains(k)) result.put(k, v.elements.asJava) }
else
underlying.entries.foreach {
case (k, v) => if (underlying.keys.elements.contains(k)) result.put(k, v.elements.asJava)
} else
underlying.entries.foreach { case (k, v) => result.put(k, v.elements.asJava) }
result
}
@ -171,7 +179,9 @@ final class ORMultiMap[A, B] private[akka] (
*/
@InternalApi private[akka] def put(node: UniqueAddress, key: A, value: Set[B]): ORMultiMap[A, B] = {
val newUnderlying = underlying.updated(node, key, ORSet.empty[B], valueDeltas = withValueDeltas) { existing =>
value.foldLeft(existing.clear(node)) { (s, element) => s.add(node, element) }
value.foldLeft(existing.clear(node)) { (s, element) =>
s.add(node, element)
}
}
new ORMultiMap(newUnderlying, withValueDeltas)
}
@ -203,7 +213,9 @@ final class ORMultiMap[A, B] private[akka] (
*/
@InternalApi private[akka] def remove(node: UniqueAddress, key: A): ORMultiMap[A, B] = {
if (withValueDeltas) {
val u = underlying.updated(node, key, ORSet.empty[B], valueDeltas = true) { existing => existing.clear(node) }
val u = underlying.updated(node, key, ORSet.empty[B], valueDeltas = true) { existing =>
existing.clear(node)
}
new ORMultiMap(u.removeKey(node, key), withValueDeltas)
} else {
new ORMultiMap(underlying.remove(node, key), withValueDeltas)
@ -232,7 +244,8 @@ final class ORMultiMap[A, B] private[akka] (
* INTERNAL API
*/
@InternalApi private[akka] def addBinding(node: UniqueAddress, key: A, element: B): ORMultiMap[A, B] = {
val newUnderlying = underlying.updated(node, key, ORSet.empty[B], valueDeltas = withValueDeltas)(_.add(node, element))
val newUnderlying =
underlying.updated(node, key, ORSet.empty[B], valueDeltas = withValueDeltas)(_.add(node, element))
new ORMultiMap(newUnderlying, withValueDeltas)
}
@ -292,7 +305,10 @@ final class ORMultiMap[A, B] private[akka] (
/**
* INTERNAL API
*/
@InternalApi private[akka] def replaceBinding(node: UniqueAddress, key: A, oldElement: B, newElement: B): ORMultiMap[A, B] =
@InternalApi private[akka] def replaceBinding(node: UniqueAddress,
key: A,
oldElement: B,
newElement: B): ORMultiMap[A, B] =
if (newElement != oldElement)
addBinding(node, key, newElement).removeBinding(node, key, oldElement)
else
@ -307,8 +323,11 @@ final class ORMultiMap[A, B] private[akka] (
if (withValueDeltas) {
val newUnderlying = underlying.mergeDeltaRetainingDeletedValues(thatDelta)
// Garbage collect the tombstones we no longer need, i.e. those that have Set() as a value.
val newValues = newUnderlying.values.filterNot { case (key, value) => !newUnderlying.keys.contains(key) && value.isEmpty }
new ORMultiMap[A, B](new ORMap(newUnderlying.keys, newValues, newUnderlying.zeroTag, newUnderlying.delta), withValueDeltas)
val newValues = newUnderlying.values.filterNot {
case (key, value) => !newUnderlying.keys.contains(key) && value.isEmpty
}
new ORMultiMap[A, B](new ORMap(newUnderlying.keys, newValues, newUnderlying.zeroTag, newUnderlying.delta),
withValueDeltas)
} else
new ORMultiMap(underlying.mergeDelta(thatDelta), withValueDeltas)

View file

@ -16,6 +16,7 @@ object ORSet {
private val _empty: ORSet[Any] = new ORSet(Map.empty, VersionVector.empty)
def empty[A]: ORSet[A] = _empty.asInstanceOf[ORSet[A]]
def apply(): ORSet[Any] = _empty
/**
* Java API
*/
@ -58,9 +59,8 @@ object ORSet {
override def merge(that: DeltaOp): DeltaOp = that match {
case AddDeltaOp(u) =>
// Note that we only merge deltas originating from the same node
AddDeltaOp(new ORSet(
concatElementsMap(u.elementsMap.asInstanceOf[Map[A, Dot]]),
underlying.vvector.merge(u.vvector)))
AddDeltaOp(
new ORSet(concatElementsMap(u.elementsMap.asInstanceOf[Map[A, Dot]]), underlying.vvector.merge(u.vvector)))
case _: AtomicDeltaOp[A] => DeltaGroup(Vector(this, that))
case DeltaGroup(ops) => DeltaGroup(this +: ops)
}
@ -97,7 +97,8 @@ object ORSet {
* INTERNAL API
*/
@InternalApi private[akka] final case class DeltaGroup[A](ops: immutable.IndexedSeq[DeltaOp])
extends DeltaOp with ReplicatedDeltaSize {
extends DeltaOp
with ReplicatedDeltaSize {
override def merge(that: DeltaOp): DeltaOp = that match {
case thatAdd: AddDeltaOp[A] =>
// merge AddDeltaOp into last AddDeltaOp in the group, if possible
@ -125,7 +126,8 @@ object ORSet {
*/
@InternalApi private[akka] def subtractDots(dot: Dot, vvector: VersionVector): Dot = {
@tailrec def dropDots(remaining: List[(UniqueAddress, Long)], acc: List[(UniqueAddress, Long)]): List[(UniqueAddress, Long)] =
@tailrec def dropDots(remaining: List[(UniqueAddress, Long)],
acc: List[(UniqueAddress, Long)]): List[(UniqueAddress, Long)] =
remaining match {
case Nil => acc
case (d @ (node, v1)) :: rest =>
@ -158,7 +160,9 @@ object ORSet {
* INTERNAL API
* @see [[ORSet#merge]]
*/
@InternalApi private[akka] def mergeCommonKeys[A](commonKeys: Set[A], lhs: ORSet[A], rhs: ORSet[A]): Map[A, ORSet.Dot] =
@InternalApi private[akka] def mergeCommonKeys[A](commonKeys: Set[A],
lhs: ORSet[A],
rhs: ORSet[A]): Map[A, ORSet.Dot] =
mergeCommonKeys(commonKeys.iterator, lhs, rhs)
private def mergeCommonKeys[A](commonKeys: Iterator[A], lhs: ORSet[A], rhs: ORSet[A]): Map[A, ORSet.Dot] = {
@ -227,12 +231,15 @@ object ORSet {
* INTERNAL API
* @see [[ORSet#merge]]
*/
@InternalApi private[akka] def mergeDisjointKeys[A](
keys: Set[A], elementsMap: Map[A, ORSet.Dot], vvector: VersionVector,
accumulator: Map[A, ORSet.Dot]): Map[A, ORSet.Dot] =
@InternalApi private[akka] def mergeDisjointKeys[A](keys: Set[A],
elementsMap: Map[A, ORSet.Dot],
vvector: VersionVector,
accumulator: Map[A, ORSet.Dot]): Map[A, ORSet.Dot] =
mergeDisjointKeys(keys.iterator, elementsMap, vvector, accumulator)
private def mergeDisjointKeys[A](keys: Iterator[A], elementsMap: Map[A, ORSet.Dot], vvector: VersionVector,
private def mergeDisjointKeys[A](keys: Iterator[A],
elementsMap: Map[A, ORSet.Dot],
vvector: VersionVector,
accumulator: Map[A, ORSet.Dot]): Map[A, ORSet.Dot] = {
keys.foldLeft(accumulator) {
case (acc, k) =>
@ -278,12 +285,13 @@ object ORSet {
* This class is immutable, i.e. "modifying" methods return a new instance.
*/
@SerialVersionUID(1L)
final class ORSet[A] private[akka] (
private[akka] val elementsMap: Map[A, ORSet.Dot],
private[akka] val vvector: VersionVector,
override val delta: Option[ORSet.DeltaOp] = None)
extends DeltaReplicatedData
with ReplicatedDataSerialization with RemovedNodePruning with FastMerge {
final class ORSet[A] private[akka] (private[akka] val elementsMap: Map[A, ORSet.Dot],
private[akka] val vvector: VersionVector,
override val delta: Option[ORSet.DeltaOp] = None)
extends DeltaReplicatedData
with ReplicatedDataSerialization
with RemovedNodePruning
with FastMerge {
type T = ORSet[A]
type D = ORSet.DeltaOp
@ -426,8 +434,7 @@ final class ORSet[A] private[akka] (
val entries00 = ORSet.mergeCommonKeys(commonKeys, this, that)
val entries0 =
if (addDeltaOp)
entries00 ++ this.elementsMap.filter { case (elem, _) => !that.elementsMap.contains(elem) }
else {
entries00 ++ this.elementsMap.filter { case (elem, _) => !that.elementsMap.contains(elem) } else {
val thisUniqueKeys = this.elementsMap.keysIterator.filterNot(that.elementsMap.contains)
ORSet.mergeDisjointKeys(thisUniqueKeys, this.elementsMap, that.vvector, entries00)
}
@ -523,7 +530,8 @@ final class ORSet[A] private[akka] (
new ORSet(updated, vvector.pruningCleanup(removedNode))
}
private def copy(elementsMap: Map[A, ORSet.Dot] = this.elementsMap, vvector: VersionVector = this.vvector,
private def copy(elementsMap: Map[A, ORSet.Dot] = this.elementsMap,
vvector: VersionVector = this.vvector,
delta: Option[ORSet.DeltaOp] = this.delta): ORSet[A] =
new ORSet(elementsMap, vvector, delta)

View file

@ -14,6 +14,7 @@ import akka.annotation.InternalApi
object PNCounter {
val empty: PNCounter = new PNCounter(GCounter.empty, GCounter.empty)
def apply(): PNCounter = empty
/**
* Java API
*/
@ -40,10 +41,11 @@ object PNCounter {
* This class is immutable, i.e. "modifying" methods return a new instance.
*/
@SerialVersionUID(1L)
final class PNCounter private[akka] (
private[akka] val increments: GCounter, private[akka] val decrements: GCounter)
extends DeltaReplicatedData with ReplicatedDelta
with ReplicatedDataSerialization with RemovedNodePruning {
final class PNCounter private[akka] (private[akka] val increments: GCounter, private[akka] val decrements: GCounter)
extends DeltaReplicatedData
with ReplicatedDelta
with ReplicatedDataSerialization
with RemovedNodePruning {
type T = PNCounter
type D = PNCounter
@ -157,10 +159,13 @@ final class PNCounter private[akka] (
/** Internal API */
@InternalApi private[akka] def increment(key: UniqueAddress, n: BigInt): PNCounter = change(key, n)
/** Internal API */
@InternalApi private[akka] def increment(key: UniqueAddress): PNCounter = increment(key, 1)
/** Internal API */
@InternalApi private[akka] def decrement(key: UniqueAddress, n: BigInt): PNCounter = change(key, -n)
/** Internal API */
@InternalApi private[akka] def decrement(key: UniqueAddress): PNCounter = decrement(key, 1)
@ -171,9 +176,7 @@ final class PNCounter private[akka] (
else this
override def merge(that: PNCounter): PNCounter =
copy(
increments = that.increments.merge(this.increments),
decrements = that.decrements.merge(this.decrements))
copy(increments = that.increments.merge(this.increments), decrements = that.decrements.merge(this.decrements))
override def delta: Option[PNCounter] = {
val incrementsDelta = increments.delta match {
@ -196,20 +199,17 @@ final class PNCounter private[akka] (
else new PNCounter(increments.resetDelta, decrements.resetDelta)
override def modifiedByNodes: Set[UniqueAddress] =
increments.modifiedByNodes union decrements.modifiedByNodes
increments.modifiedByNodes.union(decrements.modifiedByNodes)
override def needPruningFrom(removedNode: UniqueAddress): Boolean =
increments.needPruningFrom(removedNode) || decrements.needPruningFrom(removedNode)
override def prune(removedNode: UniqueAddress, collapseInto: UniqueAddress): PNCounter =
copy(
increments = increments.prune(removedNode, collapseInto),
decrements = decrements.prune(removedNode, collapseInto))
copy(increments = increments.prune(removedNode, collapseInto),
decrements = decrements.prune(removedNode, collapseInto))
override def pruningCleanup(removedNode: UniqueAddress): PNCounter =
copy(
increments = increments.pruningCleanup(removedNode),
decrements = decrements.pruningCleanup(removedNode))
copy(increments = increments.pruningCleanup(removedNode), decrements = decrements.pruningCleanup(removedNode))
private def copy(increments: GCounter = this.increments, decrements: GCounter = this.decrements): PNCounter =
new PNCounter(increments, decrements)

View file

@ -12,6 +12,7 @@ import akka.cluster.UniqueAddress
import akka.cluster.ddata.ORMap._
object PNCounterMap {
/**
* INTERNAL API
*/
@ -22,6 +23,7 @@ object PNCounterMap {
def empty[A]: PNCounterMap[A] = new PNCounterMap(new ORMap(ORSet.empty, Map.empty, zeroTag = PNCounterMapTag))
def apply[A](): PNCounterMap[A] = empty
/**
* Java API
*/
@ -39,9 +41,10 @@ object PNCounterMap {
* This class is immutable, i.e. "modifying" methods return a new instance.
*/
@SerialVersionUID(1L)
final class PNCounterMap[A] private[akka] (
private[akka] val underlying: ORMap[A, PNCounter])
extends DeltaReplicatedData with ReplicatedDataSerialization with RemovedNodePruning {
final class PNCounterMap[A] private[akka] (private[akka] val underlying: ORMap[A, PNCounter])
extends DeltaReplicatedData
with ReplicatedDataSerialization
with RemovedNodePruning {
type T = PNCounterMap[A]
type D = ORMap.DeltaOp

View file

@ -37,7 +37,7 @@ import akka.annotation.InternalApi
case (_, _: PruningPerformed) => that
case (PruningInitialized(thisOwner, thisSeen), PruningInitialized(thatOwner, thatSeen)) =>
if (thisOwner == thatOwner)
PruningInitialized(thisOwner, thisSeen union thatSeen)
PruningInitialized(thisOwner, thisSeen.union(thatSeen))
else if (Member.addressOrdering.compare(thisOwner.address, thatOwner.address) > 0)
that
else
@ -46,4 +46,3 @@ import akka.annotation.InternalApi
def addSeen(node: Address): PruningState = this
}

View file

@ -30,6 +30,7 @@ import java.util.Optional
* it has support for delta-CRDT replication.
*/
trait ReplicatedData {
/**
* The type of the concrete implementation, e.g. `GSet[A]`.
* To be specified by subclass.
@ -97,6 +98,7 @@ trait DeltaReplicatedData extends ReplicatedData {
* The delta must implement this type.
*/
trait ReplicatedDelta extends ReplicatedData {
/**
* The empty full state. This is used when a delta is received
* and no existing full state exists on the receiving side. Then
@ -159,7 +161,8 @@ abstract class AbstractReplicatedData[A <: AbstractReplicatedData[A]] extends Re
* E.g. `class TwoPhaseSet extends AbstractDeltaReplicatedData&lt;TwoPhaseSet, TwoPhaseSet&gt;`
*/
abstract class AbstractDeltaReplicatedData[A <: AbstractDeltaReplicatedData[A, B], B <: ReplicatedDelta]
extends AbstractReplicatedData[A] with DeltaReplicatedData {
extends AbstractReplicatedData[A]
with DeltaReplicatedData {
override type D = ReplicatedDelta
@ -239,4 +242,3 @@ trait RemovedNodePruning extends ReplicatedData {
* [[akka.cluster.ddata.protobuf.ReplicatedDataSerializer]].
*/
trait ReplicatedDataSerialization extends Serializable

View file

@ -44,6 +44,7 @@ object VersionVector {
case object Before extends Ordering
case object Same extends Ordering
case object Concurrent extends Ordering
/**
* Marker to ensure that we do a full order comparison instead of bailing out early.
*/
@ -96,8 +97,7 @@ object VersionVector {
* This class is immutable, i.e. "modifying" methods return a new instance.
*/
@SerialVersionUID(1L)
sealed abstract class VersionVector
extends ReplicatedData with ReplicatedDataSerialization with RemovedNodePruning {
sealed abstract class VersionVector extends ReplicatedData with ReplicatedDataSerialization with RemovedNodePruning {
type T = VersionVector
@ -182,18 +182,23 @@ sealed abstract class VersionVector
private final def compareOnlyTo(that: VersionVector, order: Ordering): Ordering = {
def nextOrElse[A](iter: Iterator[A], default: A): A = if (iter.hasNext) iter.next() else default
def compare(i1: Iterator[(UniqueAddress, Long)], i2: Iterator[(UniqueAddress, Long)], requestedOrder: Ordering): Ordering = {
def compare(i1: Iterator[(UniqueAddress, Long)],
i2: Iterator[(UniqueAddress, Long)],
requestedOrder: Ordering): Ordering = {
@tailrec
def compareNext(nt1: (UniqueAddress, Long), nt2: (UniqueAddress, Long), currentOrder: Ordering): Ordering =
if ((requestedOrder ne FullOrder) && (currentOrder ne Same) && (currentOrder ne requestedOrder)) currentOrder
else if ((nt1 eq cmpEndMarker) && (nt2 eq cmpEndMarker)) currentOrder
// i1 is empty but i2 is not, so i1 can only be Before
else if (nt1 eq cmpEndMarker) { if (currentOrder eq After) Concurrent else Before }
else if (nt1 eq cmpEndMarker) {
if (currentOrder eq After) Concurrent else Before
}
// i2 is empty but i1 is not, so i1 can only be After
else if (nt2 eq cmpEndMarker) { if (currentOrder eq Before) Concurrent else After }
else {
else if (nt2 eq cmpEndMarker) {
if (currentOrder eq Before) Concurrent else After
} else {
// compare the nodes
val nc = nt1._1 compareTo nt2._1
val nc = nt1._1.compareTo(nt2._1)
if (nc == 0) {
// both nodes exist compare the timestamps
// same timestamp so just continue with the next nodes
@ -347,22 +352,23 @@ final case class ManyVersionVector(versions: TreeMap[UniqueAddress, Long]) exten
override def merge(that: VersionVector): VersionVector = {
if (that.isEmpty) this
else if (this.isEmpty) that
else that match {
case ManyVersionVector(vs2) =>
var mergedVersions = vs2
for ((node, time) <- versions) {
val mergedVersionsCurrentTime = mergedVersions.getOrElse(node, Timestamp.Zero)
if (time > mergedVersionsCurrentTime)
mergedVersions = mergedVersions.updated(node, time)
}
VersionVector(mergedVersions)
case OneVersionVector(n2, v2) =>
val v1 = versions.getOrElse(n2, Timestamp.Zero)
val mergedVersions =
if (v1 >= v2) versions
else versions.updated(n2, v2)
VersionVector(mergedVersions)
}
else
that match {
case ManyVersionVector(vs2) =>
var mergedVersions = vs2
for ((node, time) <- versions) {
val mergedVersionsCurrentTime = mergedVersions.getOrElse(node, Timestamp.Zero)
if (time > mergedVersionsCurrentTime)
mergedVersions = mergedVersions.updated(node, time)
}
VersionVector(mergedVersions)
case OneVersionVector(n2, v2) =>
val v1 = versions.getOrElse(n2, Timestamp.Zero)
val mergedVersions =
if (v1 >= v2) versions
else versions.updated(n2, v2)
VersionVector(mergedVersions)
}
}
override def modifiedByNodes: Set[UniqueAddress] =

View file

@ -35,6 +35,7 @@ private object ReplicatedDataSerializer {
* Generic superclass to allow to compare Entry types used in protobuf.
*/
abstract class KeyComparator[A <: GeneratedMessage] extends Comparator[A] {
/**
* Get the key from the entry. The key may be a String, Integer, Long, or Any
* @param entry The protobuf entry used with Map types
@ -57,19 +58,36 @@ private object ReplicatedDataSerializer {
}
implicit object ORMapEntryComparator extends KeyComparator[rd.ORMap.Entry] {
override def getKey(e: rd.ORMap.Entry): Any = if (e.hasStringKey) e.getStringKey else if (e.hasIntKey) e.getIntKey else if (e.hasLongKey) e.getLongKey else e.getOtherKey
override def getKey(e: rd.ORMap.Entry): Any =
if (e.hasStringKey) e.getStringKey
else if (e.hasIntKey) e.getIntKey
else if (e.hasLongKey) e.getLongKey
else e.getOtherKey
}
implicit object LWWMapEntryComparator extends KeyComparator[rd.LWWMap.Entry] {
override def getKey(e: rd.LWWMap.Entry): Any = if (e.hasStringKey) e.getStringKey else if (e.hasIntKey) e.getIntKey else if (e.hasLongKey) e.getLongKey else e.getOtherKey
override def getKey(e: rd.LWWMap.Entry): Any =
if (e.hasStringKey) e.getStringKey
else if (e.hasIntKey) e.getIntKey
else if (e.hasLongKey) e.getLongKey
else e.getOtherKey
}
implicit object PNCounterMapEntryComparator extends KeyComparator[rd.PNCounterMap.Entry] {
override def getKey(e: rd.PNCounterMap.Entry): Any = if (e.hasStringKey) e.getStringKey else if (e.hasIntKey) e.getIntKey else if (e.hasLongKey) e.getLongKey else e.getOtherKey
override def getKey(e: rd.PNCounterMap.Entry): Any =
if (e.hasStringKey) e.getStringKey
else if (e.hasIntKey) e.getIntKey
else if (e.hasLongKey) e.getLongKey
else e.getOtherKey
}
implicit object ORMultiMapEntryComparator extends KeyComparator[rd.ORMultiMap.Entry] {
override def getKey(e: rd.ORMultiMap.Entry): Any = if (e.hasStringKey) e.getStringKey else if (e.hasIntKey) e.getIntKey else if (e.hasLongKey) e.getLongKey else e.getOtherKey
override def getKey(e: rd.ORMultiMap.Entry): Any =
if (e.hasStringKey) e.getStringKey
else if (e.hasIntKey) e.getIntKey
else if (e.hasLongKey) e.getLongKey
else e.getOtherKey
}
sealed trait ProtoMapEntryWriter[Entry <: GeneratedMessage, EntryBuilder <: GeneratedMessage.Builder[EntryBuilder], Value <: GeneratedMessage] {
sealed trait ProtoMapEntryWriter[
Entry <: GeneratedMessage, EntryBuilder <: GeneratedMessage.Builder[EntryBuilder], Value <: GeneratedMessage] {
def setStringKey(builder: EntryBuilder, key: String, value: Value): Entry
def setLongKey(builder: EntryBuilder, key: Long, value: Value): Entry
def setIntKey(builder: EntryBuilder, key: Int, value: Value): Entry
@ -88,11 +106,18 @@ private object ReplicatedDataSerializer {
def getValue(entry: Entry): A
}
implicit object ORMapEntry extends ProtoMapEntryWriter[rd.ORMap.Entry, rd.ORMap.Entry.Builder, dm.OtherMessage] with ProtoMapEntryReader[rd.ORMap.Entry, dm.OtherMessage] {
override def setStringKey(builder: rd.ORMap.Entry.Builder, key: String, value: dm.OtherMessage): rd.ORMap.Entry = builder.setStringKey(key).setValue(value).build()
override def setLongKey(builder: rd.ORMap.Entry.Builder, key: Long, value: dm.OtherMessage): rd.ORMap.Entry = builder.setLongKey(key).setValue(value).build()
override def setIntKey(builder: rd.ORMap.Entry.Builder, key: Int, value: dm.OtherMessage): rd.ORMap.Entry = builder.setIntKey(key).setValue(value).build()
override def setOtherKey(builder: rd.ORMap.Entry.Builder, key: dm.OtherMessage, value: dm.OtherMessage): rd.ORMap.Entry = builder.setOtherKey(key).setValue(value).build()
implicit object ORMapEntry
extends ProtoMapEntryWriter[rd.ORMap.Entry, rd.ORMap.Entry.Builder, dm.OtherMessage]
with ProtoMapEntryReader[rd.ORMap.Entry, dm.OtherMessage] {
override def setStringKey(builder: rd.ORMap.Entry.Builder, key: String, value: dm.OtherMessage): rd.ORMap.Entry =
builder.setStringKey(key).setValue(value).build()
override def setLongKey(builder: rd.ORMap.Entry.Builder, key: Long, value: dm.OtherMessage): rd.ORMap.Entry =
builder.setLongKey(key).setValue(value).build()
override def setIntKey(builder: rd.ORMap.Entry.Builder, key: Int, value: dm.OtherMessage): rd.ORMap.Entry =
builder.setIntKey(key).setValue(value).build()
override def setOtherKey(builder: rd.ORMap.Entry.Builder,
key: dm.OtherMessage,
value: dm.OtherMessage): rd.ORMap.Entry = builder.setOtherKey(key).setValue(value).build()
override def hasStringKey(entry: rd.ORMap.Entry): Boolean = entry.hasStringKey
override def getStringKey(entry: rd.ORMap.Entry): String = entry.getStringKey
override def hasIntKey(entry: rd.ORMap.Entry): Boolean = entry.hasIntKey
@ -104,11 +129,18 @@ private object ReplicatedDataSerializer {
override def getValue(entry: rd.ORMap.Entry): dm.OtherMessage = entry.getValue
}
implicit object LWWMapEntry extends ProtoMapEntryWriter[rd.LWWMap.Entry, rd.LWWMap.Entry.Builder, rd.LWWRegister] with ProtoMapEntryReader[rd.LWWMap.Entry, rd.LWWRegister] {
override def setStringKey(builder: rd.LWWMap.Entry.Builder, key: String, value: rd.LWWRegister): rd.LWWMap.Entry = builder.setStringKey(key).setValue(value).build()
override def setLongKey(builder: rd.LWWMap.Entry.Builder, key: Long, value: rd.LWWRegister): rd.LWWMap.Entry = builder.setLongKey(key).setValue(value).build()
override def setIntKey(builder: rd.LWWMap.Entry.Builder, key: Int, value: rd.LWWRegister): rd.LWWMap.Entry = builder.setIntKey(key).setValue(value).build()
override def setOtherKey(builder: rd.LWWMap.Entry.Builder, key: OtherMessage, value: rd.LWWRegister): rd.LWWMap.Entry = builder.setOtherKey(key).setValue(value).build()
implicit object LWWMapEntry
extends ProtoMapEntryWriter[rd.LWWMap.Entry, rd.LWWMap.Entry.Builder, rd.LWWRegister]
with ProtoMapEntryReader[rd.LWWMap.Entry, rd.LWWRegister] {
override def setStringKey(builder: rd.LWWMap.Entry.Builder, key: String, value: rd.LWWRegister): rd.LWWMap.Entry =
builder.setStringKey(key).setValue(value).build()
override def setLongKey(builder: rd.LWWMap.Entry.Builder, key: Long, value: rd.LWWRegister): rd.LWWMap.Entry =
builder.setLongKey(key).setValue(value).build()
override def setIntKey(builder: rd.LWWMap.Entry.Builder, key: Int, value: rd.LWWRegister): rd.LWWMap.Entry =
builder.setIntKey(key).setValue(value).build()
override def setOtherKey(builder: rd.LWWMap.Entry.Builder,
key: OtherMessage,
value: rd.LWWRegister): rd.LWWMap.Entry = builder.setOtherKey(key).setValue(value).build()
override def hasStringKey(entry: rd.LWWMap.Entry): Boolean = entry.hasStringKey
override def getStringKey(entry: rd.LWWMap.Entry): String = entry.getStringKey
override def hasIntKey(entry: rd.LWWMap.Entry): Boolean = entry.hasIntKey
@ -120,11 +152,24 @@ private object ReplicatedDataSerializer {
override def getValue(entry: rd.LWWMap.Entry): rd.LWWRegister = entry.getValue
}
implicit object PNCounterMapEntry extends ProtoMapEntryWriter[rd.PNCounterMap.Entry, rd.PNCounterMap.Entry.Builder, rd.PNCounter] with ProtoMapEntryReader[rd.PNCounterMap.Entry, rd.PNCounter] {
override def setStringKey(builder: rd.PNCounterMap.Entry.Builder, key: String, value: rd.PNCounter): rd.PNCounterMap.Entry = builder.setStringKey(key).setValue(value).build()
override def setLongKey(builder: rd.PNCounterMap.Entry.Builder, key: Long, value: rd.PNCounter): rd.PNCounterMap.Entry = builder.setLongKey(key).setValue(value).build()
override def setIntKey(builder: rd.PNCounterMap.Entry.Builder, key: Int, value: rd.PNCounter): rd.PNCounterMap.Entry = builder.setIntKey(key).setValue(value).build()
override def setOtherKey(builder: rd.PNCounterMap.Entry.Builder, key: OtherMessage, value: rd.PNCounter): rd.PNCounterMap.Entry = builder.setOtherKey(key).setValue(value).build()
implicit object PNCounterMapEntry
extends ProtoMapEntryWriter[rd.PNCounterMap.Entry, rd.PNCounterMap.Entry.Builder, rd.PNCounter]
with ProtoMapEntryReader[rd.PNCounterMap.Entry, rd.PNCounter] {
override def setStringKey(builder: rd.PNCounterMap.Entry.Builder,
key: String,
value: rd.PNCounter): rd.PNCounterMap.Entry =
builder.setStringKey(key).setValue(value).build()
override def setLongKey(builder: rd.PNCounterMap.Entry.Builder,
key: Long,
value: rd.PNCounter): rd.PNCounterMap.Entry =
builder.setLongKey(key).setValue(value).build()
override def setIntKey(builder: rd.PNCounterMap.Entry.Builder,
key: Int,
value: rd.PNCounter): rd.PNCounterMap.Entry = builder.setIntKey(key).setValue(value).build()
override def setOtherKey(builder: rd.PNCounterMap.Entry.Builder,
key: OtherMessage,
value: rd.PNCounter): rd.PNCounterMap.Entry =
builder.setOtherKey(key).setValue(value).build()
override def hasStringKey(entry: rd.PNCounterMap.Entry): Boolean = entry.hasStringKey
override def getStringKey(entry: rd.PNCounterMap.Entry): String = entry.getStringKey
override def hasIntKey(entry: rd.PNCounterMap.Entry): Boolean = entry.hasIntKey
@ -136,11 +181,18 @@ private object ReplicatedDataSerializer {
override def getValue(entry: rd.PNCounterMap.Entry): rd.PNCounter = entry.getValue
}
implicit object ORMultiMapEntry extends ProtoMapEntryWriter[rd.ORMultiMap.Entry, rd.ORMultiMap.Entry.Builder, rd.ORSet] with ProtoMapEntryReader[rd.ORMultiMap.Entry, rd.ORSet] {
override def setStringKey(builder: rd.ORMultiMap.Entry.Builder, key: String, value: rd.ORSet): rd.ORMultiMap.Entry = builder.setStringKey(key).setValue(value).build()
override def setLongKey(builder: rd.ORMultiMap.Entry.Builder, key: Long, value: rd.ORSet): rd.ORMultiMap.Entry = builder.setLongKey(key).setValue(value).build()
override def setIntKey(builder: rd.ORMultiMap.Entry.Builder, key: Int, value: rd.ORSet): rd.ORMultiMap.Entry = builder.setIntKey(key).setValue(value).build()
override def setOtherKey(builder: rd.ORMultiMap.Entry.Builder, key: dm.OtherMessage, value: rd.ORSet): rd.ORMultiMap.Entry = builder.setOtherKey(key).setValue(value).build()
implicit object ORMultiMapEntry
extends ProtoMapEntryWriter[rd.ORMultiMap.Entry, rd.ORMultiMap.Entry.Builder, rd.ORSet]
with ProtoMapEntryReader[rd.ORMultiMap.Entry, rd.ORSet] {
override def setStringKey(builder: rd.ORMultiMap.Entry.Builder, key: String, value: rd.ORSet): rd.ORMultiMap.Entry =
builder.setStringKey(key).setValue(value).build()
override def setLongKey(builder: rd.ORMultiMap.Entry.Builder, key: Long, value: rd.ORSet): rd.ORMultiMap.Entry =
builder.setLongKey(key).setValue(value).build()
override def setIntKey(builder: rd.ORMultiMap.Entry.Builder, key: Int, value: rd.ORSet): rd.ORMultiMap.Entry =
builder.setIntKey(key).setValue(value).build()
override def setOtherKey(builder: rd.ORMultiMap.Entry.Builder,
key: dm.OtherMessage,
value: rd.ORSet): rd.ORMultiMap.Entry = builder.setOtherKey(key).setValue(value).build()
override def hasStringKey(entry: rd.ORMultiMap.Entry): Boolean = entry.hasStringKey
override def getStringKey(entry: rd.ORMultiMap.Entry): String = entry.getStringKey
override def hasIntKey(entry: rd.ORMultiMap.Entry): Boolean = entry.hasIntKey
@ -152,11 +204,25 @@ private object ReplicatedDataSerializer {
override def getValue(entry: rd.ORMultiMap.Entry): rd.ORSet = entry.getValue
}
implicit object ORMapDeltaGroupEntry extends ProtoMapEntryWriter[rd.ORMapDeltaGroup.MapEntry, rd.ORMapDeltaGroup.MapEntry.Builder, dm.OtherMessage] with ProtoMapEntryReader[rd.ORMapDeltaGroup.MapEntry, dm.OtherMessage] {
override def setStringKey(builder: rd.ORMapDeltaGroup.MapEntry.Builder, key: String, value: dm.OtherMessage): rd.ORMapDeltaGroup.MapEntry = builder.setStringKey(key).setValue(value).build()
override def setLongKey(builder: rd.ORMapDeltaGroup.MapEntry.Builder, key: Long, value: dm.OtherMessage): rd.ORMapDeltaGroup.MapEntry = builder.setLongKey(key).setValue(value).build()
override def setIntKey(builder: rd.ORMapDeltaGroup.MapEntry.Builder, key: Int, value: dm.OtherMessage): rd.ORMapDeltaGroup.MapEntry = builder.setIntKey(key).setValue(value).build()
override def setOtherKey(builder: rd.ORMapDeltaGroup.MapEntry.Builder, key: dm.OtherMessage, value: dm.OtherMessage): rd.ORMapDeltaGroup.MapEntry = builder.setOtherKey(key).setValue(value).build()
implicit object ORMapDeltaGroupEntry
extends ProtoMapEntryWriter[rd.ORMapDeltaGroup.MapEntry, rd.ORMapDeltaGroup.MapEntry.Builder, dm.OtherMessage]
with ProtoMapEntryReader[rd.ORMapDeltaGroup.MapEntry, dm.OtherMessage] {
override def setStringKey(builder: rd.ORMapDeltaGroup.MapEntry.Builder,
key: String,
value: dm.OtherMessage): rd.ORMapDeltaGroup.MapEntry =
builder.setStringKey(key).setValue(value).build()
override def setLongKey(builder: rd.ORMapDeltaGroup.MapEntry.Builder,
key: Long,
value: dm.OtherMessage): rd.ORMapDeltaGroup.MapEntry =
builder.setLongKey(key).setValue(value).build()
override def setIntKey(builder: rd.ORMapDeltaGroup.MapEntry.Builder,
key: Int,
value: dm.OtherMessage): rd.ORMapDeltaGroup.MapEntry =
builder.setIntKey(key).setValue(value).build()
override def setOtherKey(builder: rd.ORMapDeltaGroup.MapEntry.Builder,
key: dm.OtherMessage,
value: dm.OtherMessage): rd.ORMapDeltaGroup.MapEntry =
builder.setOtherKey(key).setValue(value).build()
override def hasStringKey(entry: rd.ORMapDeltaGroup.MapEntry): Boolean = entry.hasStringKey
override def getStringKey(entry: rd.ORMapDeltaGroup.MapEntry): String = entry.getStringKey
override def hasIntKey(entry: rd.ORMapDeltaGroup.MapEntry): Boolean = entry.hasIntKey
@ -174,7 +240,9 @@ private object ReplicatedDataSerializer {
* Protobuf serializer of ReplicatedData.
*/
class ReplicatedDataSerializer(val system: ExtendedActorSystem)
extends SerializerWithStringManifest with SerializationSupport with BaseSerializer {
extends SerializerWithStringManifest
with SerializationSupport
with BaseSerializer {
import ReplicatedDataSerializer._
@ -232,7 +300,6 @@ class ReplicatedDataSerializer(val system: ExtendedActorSystem)
ORMultiMapManifest -> multimapFromBinary,
DeletedDataManifest -> (_ => DeletedData),
VersionVectorManifest -> versionVectorFromBinary,
GSetKeyManifest -> (bytes => GSetKey(keyIdFromBinary(bytes))),
ORSetKeyManifest -> (bytes => ORSetKey(keyIdFromBinary(bytes))),
FlagKeyManifest -> (bytes => FlagKey(keyIdFromBinary(bytes))),
@ -264,20 +331,20 @@ class ReplicatedDataSerializer(val system: ExtendedActorSystem)
case DeletedData => DeletedDataManifest
case _: VersionVector => VersionVectorManifest
case _: ORSetKey[_] => ORSetKeyManifest
case _: GSetKey[_] => GSetKeyManifest
case _: GCounterKey => GCounterKeyManifest
case _: PNCounterKey => PNCounterKeyManifest
case _: FlagKey => FlagKeyManifest
case _: LWWRegisterKey[_] => LWWRegisterKeyManifest
case _: ORMapKey[_, _] => ORMapKeyManifest
case _: LWWMapKey[_, _] => LWWMapKeyManifest
case _: PNCounterMapKey[_] => PNCounterMapKeyManifest
case _: ORMultiMapKey[_, _] => ORMultiMapKeyManifest
case _: ORSetKey[_] => ORSetKeyManifest
case _: GSetKey[_] => GSetKeyManifest
case _: GCounterKey => GCounterKeyManifest
case _: PNCounterKey => PNCounterKeyManifest
case _: FlagKey => FlagKeyManifest
case _: LWWRegisterKey[_] => LWWRegisterKeyManifest
case _: ORMapKey[_, _] => ORMapKeyManifest
case _: LWWMapKey[_, _] => LWWMapKeyManifest
case _: PNCounterMapKey[_] => PNCounterMapKeyManifest
case _: ORMultiMapKey[_, _] => ORMultiMapKeyManifest
case _: ORSet.DeltaGroup[_] => ORSetDeltaGroupManifest
case _: ORMap.DeltaGroup[_, _] => ORMapDeltaGroupManifest
case _: ORSet.FullStateDeltaOp[_] => ORSetFullManifest
case _: ORSet.DeltaGroup[_] => ORSetDeltaGroupManifest
case _: ORMap.DeltaGroup[_, _] => ORMapDeltaGroupManifest
case _: ORSet.FullStateDeltaOp[_] => ORSetFullManifest
case _ =>
throw new IllegalArgumentException(s"Can't serialize object of type ${obj.getClass} in [${getClass.getName}]")
@ -313,8 +380,9 @@ class ReplicatedDataSerializer(val system: ExtendedActorSystem)
override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef =
fromBinaryMap.get(manifest) match {
case Some(f) => f(bytes)
case None => throw new NotSerializableException(
s"Unimplemented deserialization of message with manifest [$manifest] in [${getClass.getName}]")
case None =>
throw new NotSerializableException(
s"Unimplemented deserialization of message with manifest [$manifest] in [${getClass.getName}]")
}
def gsetToProto(gset: GSet[_]): rd.GSet = {
@ -361,10 +429,10 @@ class ReplicatedDataSerializer(val system: ExtendedActorSystem)
def gsetFromProto(gset: rd.GSet): GSet[Any] = {
val elements: Iterator[Any] = {
gset.getStringElementsList.iterator.asScala ++
gset.getIntElementsList.iterator.asScala ++
gset.getLongElementsList.iterator.asScala ++
gset.getOtherElementsList.iterator.asScala.map(otherMessageFromProto) ++
gset.getActorRefElementsList.iterator.asScala.map(resolveActorRef)
gset.getIntElementsList.iterator.asScala ++
gset.getLongElementsList.iterator.asScala ++
gset.getOtherElementsList.iterator.asScala.map(otherMessageFromProto) ++
gset.getActorRefElementsList.iterator.asScala.map(resolveActorRef)
}
GSet(elements.toSet)
}
@ -451,9 +519,7 @@ class ReplicatedDataSerializer(val system: ExtendedActorSystem)
private def orsetDeltaGroupToProto(deltaGroup: ORSet.DeltaGroup[_]): rd.ORSetDeltaGroup = {
def createEntry(opType: rd.ORSetDeltaOp, u: ORSet[_]) = {
rd.ORSetDeltaGroup.Entry.newBuilder()
.setOperation(opType)
.setUnderlying(orsetToProto(u))
rd.ORSetDeltaGroup.Entry.newBuilder().setOperation(opType).setUnderlying(orsetToProto(u))
}
val b = rd.ORSetDeltaGroup.newBuilder()
@ -473,26 +539,28 @@ class ReplicatedDataSerializer(val system: ExtendedActorSystem)
private def orsetDeltaGroupFromBinary(bytes: Array[Byte]): ORSet.DeltaGroup[Any] = {
val deltaGroup = rd.ORSetDeltaGroup.parseFrom(bytes)
val ops: Vector[ORSet.DeltaOp] =
deltaGroup.getEntriesList.asScala.iterator.map { entry =>
if (entry.getOperation == rd.ORSetDeltaOp.Add)
ORSet.AddDeltaOp(orsetFromProto(entry.getUnderlying))
else if (entry.getOperation == rd.ORSetDeltaOp.Remove)
ORSet.RemoveDeltaOp(orsetFromProto(entry.getUnderlying))
else if (entry.getOperation == rd.ORSetDeltaOp.Full)
ORSet.FullStateDeltaOp(orsetFromProto(entry.getUnderlying))
else
throw new NotSerializableException(s"Unknow ORSet delta operation ${entry.getOperation}")
}.to(immutable.Vector)
deltaGroup.getEntriesList.asScala.iterator
.map { entry =>
if (entry.getOperation == rd.ORSetDeltaOp.Add)
ORSet.AddDeltaOp(orsetFromProto(entry.getUnderlying))
else if (entry.getOperation == rd.ORSetDeltaOp.Remove)
ORSet.RemoveDeltaOp(orsetFromProto(entry.getUnderlying))
else if (entry.getOperation == rd.ORSetDeltaOp.Full)
ORSet.FullStateDeltaOp(orsetFromProto(entry.getUnderlying))
else
throw new NotSerializableException(s"Unknow ORSet delta operation ${entry.getOperation}")
}
.to(immutable.Vector)
ORSet.DeltaGroup(ops)
}
def orsetFromProto(orset: rd.ORSet): ORSet[Any] = {
val elements: Iterator[Any] = {
orset.getStringElementsList.iterator.asScala ++
orset.getIntElementsList.iterator.asScala ++
orset.getLongElementsList.iterator.asScala ++
orset.getOtherElementsList.iterator.asScala.map(otherMessageFromProto) ++
orset.getActorRefElementsList.iterator.asScala.map(resolveActorRef)
orset.getIntElementsList.iterator.asScala ++
orset.getLongElementsList.iterator.asScala ++
orset.getOtherElementsList.iterator.asScala.map(otherMessageFromProto) ++
orset.getActorRefElementsList.iterator.asScala.map(resolveActorRef)
}
val dots = orset.getDotsList.asScala.map(versionVectorFromProto).iterator
@ -511,26 +579,30 @@ class ReplicatedDataSerializer(val system: ExtendedActorSystem)
if (flag.getEnabled) Flag.Enabled else Flag.Disabled
def lwwRegisterToProto(lwwRegister: LWWRegister[_]): rd.LWWRegister =
rd.LWWRegister.newBuilder().
setTimestamp(lwwRegister.timestamp).
setNode(uniqueAddressToProto(lwwRegister.node)).
setState(otherMessageToProto(lwwRegister.value)).
build()
rd.LWWRegister
.newBuilder()
.setTimestamp(lwwRegister.timestamp)
.setNode(uniqueAddressToProto(lwwRegister.node))
.setState(otherMessageToProto(lwwRegister.value))
.build()
def lwwRegisterFromBinary(bytes: Array[Byte]): LWWRegister[Any] =
lwwRegisterFromProto(rd.LWWRegister.parseFrom(bytes))
def lwwRegisterFromProto(lwwRegister: rd.LWWRegister): LWWRegister[Any] =
new LWWRegister(
uniqueAddressFromProto(lwwRegister.getNode),
otherMessageFromProto(lwwRegister.getState),
lwwRegister.getTimestamp)
new LWWRegister(uniqueAddressFromProto(lwwRegister.getNode),
otherMessageFromProto(lwwRegister.getState),
lwwRegister.getTimestamp)
def gcounterToProto(gcounter: GCounter): rd.GCounter = {
val b = rd.GCounter.newBuilder()
gcounter.state.toVector.sortBy { case (address, _) => address }.foreach {
case (address, value) => b.addEntries(rd.GCounter.Entry.newBuilder().
setNode(uniqueAddressToProto(address)).setValue(ByteString.copyFrom(value.toByteArray)))
case (address, value) =>
b.addEntries(
rd.GCounter.Entry
.newBuilder()
.setNode(uniqueAddressToProto(address))
.setValue(ByteString.copyFrom(value.toByteArray)))
}
b.build()
}
@ -539,82 +611,101 @@ class ReplicatedDataSerializer(val system: ExtendedActorSystem)
gcounterFromProto(rd.GCounter.parseFrom(bytes))
def gcounterFromProto(gcounter: rd.GCounter): GCounter = {
new GCounter(state = gcounter.getEntriesList.asScala.iterator.map(entry =>
uniqueAddressFromProto(entry.getNode) -> BigInt(entry.getValue.toByteArray)).toMap)
new GCounter(
state = gcounter.getEntriesList.asScala.iterator
.map(entry => uniqueAddressFromProto(entry.getNode) -> BigInt(entry.getValue.toByteArray))
.toMap)
}
def pncounterToProto(pncounter: PNCounter): rd.PNCounter =
rd.PNCounter.newBuilder().
setIncrements(gcounterToProto(pncounter.increments)).
setDecrements(gcounterToProto(pncounter.decrements)).
build()
rd.PNCounter
.newBuilder()
.setIncrements(gcounterToProto(pncounter.increments))
.setDecrements(gcounterToProto(pncounter.decrements))
.build()
def pncounterFromBinary(bytes: Array[Byte]): PNCounter =
pncounterFromProto(rd.PNCounter.parseFrom(bytes))
def pncounterFromProto(pncounter: rd.PNCounter): PNCounter = {
new PNCounter(
increments = gcounterFromProto(pncounter.getIncrements),
decrements = gcounterFromProto(pncounter.getDecrements))
new PNCounter(increments = gcounterFromProto(pncounter.getIncrements),
decrements = gcounterFromProto(pncounter.getDecrements))
}
/*
* Convert a Map[A, B] to an Iterable[Entry] where Entry is the protobuf map entry.
*/
private def getEntries[IKey, IValue, EntryBuilder <: GeneratedMessage.Builder[EntryBuilder], PEntry <: GeneratedMessage, PValue <: GeneratedMessage](input: Map[IKey, IValue], createBuilder: () => EntryBuilder, valueConverter: IValue => PValue)(implicit comparator: Comparator[PEntry], eh: ProtoMapEntryWriter[PEntry, EntryBuilder, PValue]): java.lang.Iterable[PEntry] = {
private def getEntries[IKey,
IValue,
EntryBuilder <: GeneratedMessage.Builder[EntryBuilder],
PEntry <: GeneratedMessage,
PValue <: GeneratedMessage](input: Map[IKey, IValue],
createBuilder: () => EntryBuilder,
valueConverter: IValue => PValue)(
implicit comparator: Comparator[PEntry],
eh: ProtoMapEntryWriter[PEntry, EntryBuilder, PValue]): java.lang.Iterable[PEntry] = {
// The resulting Iterable needs to be ordered deterministically in order to create same signature upon serializing same data
val protoEntries = new TreeSet[PEntry](comparator)
input.foreach {
case (key: String, value) => protoEntries.add(eh.setStringKey(createBuilder(), key, valueConverter(value)))
case (key: Int, value) => protoEntries.add(eh.setIntKey(createBuilder(), key, valueConverter(value)))
case (key: Long, value) => protoEntries.add(eh.setLongKey(createBuilder(), key, valueConverter(value)))
case (key, value) => protoEntries.add(eh.setOtherKey(createBuilder(), otherMessageToProto(key), valueConverter(value)))
case (key, value) =>
protoEntries.add(eh.setOtherKey(createBuilder(), otherMessageToProto(key), valueConverter(value)))
}
protoEntries
}
def ormapToProto(ormap: ORMap[_, _]): rd.ORMap = {
val ormapBuilder = rd.ORMap.newBuilder()
val entries: jl.Iterable[rd.ORMap.Entry] = getEntries(ormap.values, rd.ORMap.Entry.newBuilder _, otherMessageToProto)
val entries: jl.Iterable[rd.ORMap.Entry] =
getEntries(ormap.values, rd.ORMap.Entry.newBuilder _, otherMessageToProto)
ormapBuilder.setKeys(orsetToProto(ormap.keys)).addAllEntries(entries).build()
}
def ormapFromBinary(bytes: Array[Byte]): ORMap[Any, ReplicatedData] =
ormapFromProto(rd.ORMap.parseFrom(decompress(bytes)))
def mapTypeFromProto[PEntry <: GeneratedMessage, A <: GeneratedMessage, B <: ReplicatedData](input: util.List[PEntry], valueCreator: A => B)(implicit eh: ProtoMapEntryReader[PEntry, A]): Map[Any, B] = {
def mapTypeFromProto[PEntry <: GeneratedMessage, A <: GeneratedMessage, B <: ReplicatedData](
input: util.List[PEntry],
valueCreator: A => B)(implicit eh: ProtoMapEntryReader[PEntry, A]): Map[Any, B] = {
input.asScala.map { entry =>
if (eh.hasStringKey(entry)) eh.getStringKey(entry) -> valueCreator(eh.getValue(entry))
else if (eh.hasIntKey(entry)) eh.getIntKey(entry) -> valueCreator(eh.getValue(entry))
else if (eh.hasLongKey(entry)) eh.getLongKey(entry) -> valueCreator(eh.getValue(entry))
else if (eh.hasOtherKey(entry)) otherMessageFromProto(eh.getOtherKey(entry)) -> valueCreator(eh.getValue(entry))
else throw new IllegalArgumentException(s"Can't deserialize ${entry.getClass} because it does not have any key in the serialized message.")
else
throw new IllegalArgumentException(
s"Can't deserialize ${entry.getClass} because it does not have any key in the serialized message.")
}.toMap
}
def ormapFromProto(ormap: rd.ORMap): ORMap[Any, ReplicatedData] = {
val entries = mapTypeFromProto(ormap.getEntriesList, (v: dm.OtherMessage) => otherMessageFromProto(v).asInstanceOf[ReplicatedData])
new ORMap(
keys = orsetFromProto(ormap.getKeys),
entries,
ORMap.VanillaORMapTag)
val entries = mapTypeFromProto(ormap.getEntriesList,
(v: dm.OtherMessage) => otherMessageFromProto(v).asInstanceOf[ReplicatedData])
new ORMap(keys = orsetFromProto(ormap.getKeys), entries, ORMap.VanillaORMapTag)
}
def singleMapEntryFromProto[PEntry <: GeneratedMessage, A <: GeneratedMessage, B <: ReplicatedData](input: util.List[PEntry], valueCreator: A => B)(implicit eh: ProtoMapEntryReader[PEntry, A]): Map[Any, B] = {
def singleMapEntryFromProto[PEntry <: GeneratedMessage, A <: GeneratedMessage, B <: ReplicatedData](
input: util.List[PEntry],
valueCreator: A => B)(implicit eh: ProtoMapEntryReader[PEntry, A]): Map[Any, B] = {
val map = mapTypeFromProto(input, valueCreator)
if (map.size > 1)
throw new IllegalArgumentException(s"Can't deserialize the key/value pair in the ORMap delta - too many pairs on the wire")
throw new IllegalArgumentException(
s"Can't deserialize the key/value pair in the ORMap delta - too many pairs on the wire")
else
map
}
def singleKeyEntryFromProto[PEntry <: GeneratedMessage, A <: GeneratedMessage](entryOption: Option[PEntry])(implicit eh: ProtoMapEntryReader[PEntry, A]): Any =
def singleKeyEntryFromProto[PEntry <: GeneratedMessage, A <: GeneratedMessage](entryOption: Option[PEntry])(
implicit eh: ProtoMapEntryReader[PEntry, A]): Any =
entryOption match {
case Some(entry) => if (eh.hasStringKey(entry)) eh.getStringKey(entry)
else if (eh.hasIntKey(entry)) eh.getIntKey(entry)
else if (eh.hasLongKey(entry)) eh.getLongKey(entry)
else if (eh.hasOtherKey(entry)) otherMessageFromProto(eh.getOtherKey(entry))
else throw new IllegalArgumentException(s"Can't deserialize the key in the ORMap delta")
case Some(entry) =>
if (eh.hasStringKey(entry)) eh.getStringKey(entry)
else if (eh.hasIntKey(entry)) eh.getIntKey(entry)
else if (eh.hasLongKey(entry)) eh.getLongKey(entry)
else if (eh.hasOtherKey(entry)) otherMessageFromProto(eh.getOtherKey(entry))
else throw new IllegalArgumentException(s"Can't deserialize the key in the ORMap delta")
case _ => throw new IllegalArgumentException(s"Can't deserialize the key in the ORMap delta")
}
@ -656,12 +747,12 @@ class ReplicatedDataSerializer(val system: ExtendedActorSystem)
// this can be made client-extendable in the same way as Http codes in Spray are
private def zeroTagFromCode(code: Int) = code match {
case ORMap.VanillaORMapTag.value => ORMap.VanillaORMapTag
case PNCounterMap.PNCounterMapTag.value => PNCounterMap.PNCounterMapTag
case ORMultiMap.ORMultiMapTag.value => ORMultiMap.ORMultiMapTag
case ORMap.VanillaORMapTag.value => ORMap.VanillaORMapTag
case PNCounterMap.PNCounterMapTag.value => PNCounterMap.PNCounterMapTag
case ORMultiMap.ORMultiMapTag.value => ORMultiMap.ORMultiMapTag
case ORMultiMap.ORMultiMapWithValueDeltasTag.value => ORMultiMap.ORMultiMapWithValueDeltasTag
case LWWMap.LWWMapTag.value => LWWMap.LWWMapTag
case _ => throw new IllegalArgumentException("Invalid ZeroTag code")
case LWWMap.LWWMapTag.value => LWWMap.LWWMapTag
case _ => throw new IllegalArgumentException("Invalid ZeroTag code")
}
private def ormapDeltaGroupFromBinary(bytes: Array[Byte]): ORMap.DeltaGroup[Any, ReplicatedData] = {
@ -671,21 +762,33 @@ class ReplicatedDataSerializer(val system: ExtendedActorSystem)
private def ormapDeltaGroupOpsFromBinary(bytes: Array[Byte]): scala.collection.immutable.IndexedSeq[ORMap.DeltaOp] = {
val deltaGroup = rd.ORMapDeltaGroup.parseFrom(bytes)
val ops: Vector[ORMap.DeltaOp] =
deltaGroup.getEntriesList.asScala.iterator.map { entry =>
if (entry.getOperation == rd.ORMapDeltaOp.ORMapPut) {
val map = singleMapEntryFromProto(entry.getEntryDataList, (v: dm.OtherMessage) => otherMessageFromProto(v).asInstanceOf[ReplicatedData])
ORMap.PutDeltaOp(ORSet.AddDeltaOp(orsetFromProto(entry.getUnderlying)), map.head, zeroTagFromCode(entry.getZeroTag))
} else if (entry.getOperation == rd.ORMapDeltaOp.ORMapRemove) {
ORMap.RemoveDeltaOp(ORSet.RemoveDeltaOp(orsetFromProto(entry.getUnderlying)), zeroTagFromCode(entry.getZeroTag))
} else if (entry.getOperation == rd.ORMapDeltaOp.ORMapRemoveKey) {
val elem = singleKeyEntryFromProto(entry.getEntryDataList.asScala.headOption)
ORMap.RemoveKeyDeltaOp(ORSet.RemoveDeltaOp(orsetFromProto(entry.getUnderlying)), elem, zeroTagFromCode(entry.getZeroTag))
} else if (entry.getOperation == rd.ORMapDeltaOp.ORMapUpdate) {
val map = mapTypeFromProto(entry.getEntryDataList, (v: dm.OtherMessage) => otherMessageFromProto(v).asInstanceOf[ReplicatedDelta])
ORMap.UpdateDeltaOp(ORSet.AddDeltaOp(orsetFromProto(entry.getUnderlying)), map, zeroTagFromCode(entry.getZeroTag))
} else
throw new NotSerializableException(s"Unknown ORMap delta operation ${entry.getOperation}")
}.to(immutable.Vector)
deltaGroup.getEntriesList.asScala.iterator
.map { entry =>
if (entry.getOperation == rd.ORMapDeltaOp.ORMapPut) {
val map =
singleMapEntryFromProto(entry.getEntryDataList,
(v: dm.OtherMessage) => otherMessageFromProto(v).asInstanceOf[ReplicatedData])
ORMap.PutDeltaOp(ORSet.AddDeltaOp(orsetFromProto(entry.getUnderlying)),
map.head,
zeroTagFromCode(entry.getZeroTag))
} else if (entry.getOperation == rd.ORMapDeltaOp.ORMapRemove) {
ORMap.RemoveDeltaOp(ORSet.RemoveDeltaOp(orsetFromProto(entry.getUnderlying)),
zeroTagFromCode(entry.getZeroTag))
} else if (entry.getOperation == rd.ORMapDeltaOp.ORMapRemoveKey) {
val elem = singleKeyEntryFromProto(entry.getEntryDataList.asScala.headOption)
ORMap.RemoveKeyDeltaOp(ORSet.RemoveDeltaOp(orsetFromProto(entry.getUnderlying)),
elem,
zeroTagFromCode(entry.getZeroTag))
} else if (entry.getOperation == rd.ORMapDeltaOp.ORMapUpdate) {
val map = mapTypeFromProto(entry.getEntryDataList,
(v: dm.OtherMessage) => otherMessageFromProto(v).asInstanceOf[ReplicatedDelta])
ORMap.UpdateDeltaOp(ORSet.AddDeltaOp(orsetFromProto(entry.getUnderlying)),
map,
zeroTagFromCode(entry.getZeroTag))
} else
throw new NotSerializableException(s"Unknown ORMap delta operation ${entry.getOperation}")
}
.to(immutable.Vector)
ops
}
@ -714,15 +817,25 @@ class ReplicatedDataSerializer(val system: ExtendedActorSystem)
if (m.size > 1 && opType != rd.ORMapDeltaOp.ORMapUpdate)
throw new IllegalArgumentException("Invalid size of ORMap delta map")
else {
val builder = rd.ORMapDeltaGroup.Entry.newBuilder()
.setOperation(opType)
.setUnderlying(orsetToProto(u))
.setZeroTag(zt)
val builder =
rd.ORMapDeltaGroup.Entry.newBuilder().setOperation(opType).setUnderlying(orsetToProto(u)).setZeroTag(zt)
m.foreach {
case (key: String, value) => builder.addEntryData(rd.ORMapDeltaGroup.MapEntry.newBuilder().setStringKey(key).setValue(otherMessageToProto(value)).build())
case (key: Int, value) => builder.addEntryData(rd.ORMapDeltaGroup.MapEntry.newBuilder().setIntKey(key).setValue(otherMessageToProto(value)).build())
case (key: Long, value) => builder.addEntryData(rd.ORMapDeltaGroup.MapEntry.newBuilder().setLongKey(key).setValue(otherMessageToProto(value)).build())
case (key, value) => builder.addEntryData(rd.ORMapDeltaGroup.MapEntry.newBuilder().setOtherKey(otherMessageToProto(key)).setValue(otherMessageToProto(value)).build())
case (key: String, value) =>
builder.addEntryData(
rd.ORMapDeltaGroup.MapEntry.newBuilder().setStringKey(key).setValue(otherMessageToProto(value)).build())
case (key: Int, value) =>
builder.addEntryData(
rd.ORMapDeltaGroup.MapEntry.newBuilder().setIntKey(key).setValue(otherMessageToProto(value)).build())
case (key: Long, value) =>
builder.addEntryData(
rd.ORMapDeltaGroup.MapEntry.newBuilder().setLongKey(key).setValue(otherMessageToProto(value)).build())
case (key, value) =>
builder.addEntryData(
rd.ORMapDeltaGroup.MapEntry
.newBuilder()
.setOtherKey(otherMessageToProto(key))
.setValue(otherMessageToProto(value))
.build())
}
builder
}
@ -736,10 +849,8 @@ class ReplicatedDataSerializer(val system: ExtendedActorSystem)
case key: Long => entryDataBuilder.setLongKey(key)
case key => entryDataBuilder.setOtherKey(otherMessageToProto(key))
}
val builder = rd.ORMapDeltaGroup.Entry.newBuilder()
.setOperation(opType)
.setUnderlying(orsetToProto(u))
.setZeroTag(zt)
val builder =
rd.ORMapDeltaGroup.Entry.newBuilder().setOperation(opType).setUnderlying(orsetToProto(u)).setZeroTag(zt)
builder.addEntryData(entryDataBuilder.build())
builder
}
@ -747,13 +858,23 @@ class ReplicatedDataSerializer(val system: ExtendedActorSystem)
val b = rd.ORMapDeltaGroup.newBuilder()
deltaGroupOps.foreach {
case ORMap.PutDeltaOp(op, pair, zt) =>
b.addEntries(createEntry(rd.ORMapDeltaOp.ORMapPut, op.asInstanceOf[ORSet.AddDeltaOp[_]].underlying, Map(pair), zt.value))
b.addEntries(
createEntry(rd.ORMapDeltaOp.ORMapPut, op.asInstanceOf[ORSet.AddDeltaOp[_]].underlying, Map(pair), zt.value))
case ORMap.RemoveDeltaOp(op, zt) =>
b.addEntries(createEntry(rd.ORMapDeltaOp.ORMapRemove, op.asInstanceOf[ORSet.RemoveDeltaOp[_]].underlying, Map.empty, zt.value))
b.addEntries(
createEntry(rd.ORMapDeltaOp.ORMapRemove,
op.asInstanceOf[ORSet.RemoveDeltaOp[_]].underlying,
Map.empty,
zt.value))
case ORMap.RemoveKeyDeltaOp(op, k, zt) =>
b.addEntries(createEntryWithKey(rd.ORMapDeltaOp.ORMapRemoveKey, op.asInstanceOf[ORSet.RemoveDeltaOp[_]].underlying, k, zt.value))
b.addEntries(
createEntryWithKey(rd.ORMapDeltaOp.ORMapRemoveKey,
op.asInstanceOf[ORSet.RemoveDeltaOp[_]].underlying,
k,
zt.value))
case ORMap.UpdateDeltaOp(op, m, zt) =>
b.addEntries(createEntry(rd.ORMapDeltaOp.ORMapUpdate, op.asInstanceOf[ORSet.AddDeltaOp[_]].underlying, m, zt.value))
b.addEntries(
createEntry(rd.ORMapDeltaOp.ORMapUpdate, op.asInstanceOf[ORSet.AddDeltaOp[_]].underlying, m, zt.value))
case ORMap.DeltaGroup(u) =>
throw new IllegalArgumentException("ORMap.DeltaGroup should not be nested")
}
@ -762,7 +883,8 @@ class ReplicatedDataSerializer(val system: ExtendedActorSystem)
def lwwmapToProto(lwwmap: LWWMap[_, _]): rd.LWWMap = {
val lwwmapBuilder = rd.LWWMap.newBuilder()
val entries: jl.Iterable[rd.LWWMap.Entry] = getEntries(lwwmap.underlying.entries, rd.LWWMap.Entry.newBuilder _, lwwRegisterToProto)
val entries: jl.Iterable[rd.LWWMap.Entry] =
getEntries(lwwmap.underlying.entries, rd.LWWMap.Entry.newBuilder _, lwwRegisterToProto)
lwwmapBuilder.setKeys(orsetToProto(lwwmap.underlying.keys)).addAllEntries(entries).build()
}
@ -771,14 +893,13 @@ class ReplicatedDataSerializer(val system: ExtendedActorSystem)
def lwwmapFromProto(lwwmap: rd.LWWMap): LWWMap[Any, Any] = {
val entries = mapTypeFromProto(lwwmap.getEntriesList, lwwRegisterFromProto)
new LWWMap(new ORMap(
keys = orsetFromProto(lwwmap.getKeys),
entries, LWWMap.LWWMapTag))
new LWWMap(new ORMap(keys = orsetFromProto(lwwmap.getKeys), entries, LWWMap.LWWMapTag))
}
def pncountermapToProto(pncountermap: PNCounterMap[_]): rd.PNCounterMap = {
val pncountermapBuilder = rd.PNCounterMap.newBuilder()
val entries: jl.Iterable[rd.PNCounterMap.Entry] = getEntries(pncountermap.underlying.entries, rd.PNCounterMap.Entry.newBuilder _, pncounterToProto)
val entries: jl.Iterable[rd.PNCounterMap.Entry] =
getEntries(pncountermap.underlying.entries, rd.PNCounterMap.Entry.newBuilder _, pncounterToProto)
pncountermapBuilder.setKeys(orsetToProto(pncountermap.underlying.keys)).addAllEntries(entries).build()
}
@ -787,14 +908,13 @@ class ReplicatedDataSerializer(val system: ExtendedActorSystem)
def pncountermapFromProto(pncountermap: rd.PNCounterMap): PNCounterMap[_] = {
val entries = mapTypeFromProto(pncountermap.getEntriesList, pncounterFromProto)
new PNCounterMap(new ORMap(
keys = orsetFromProto(pncountermap.getKeys),
entries, PNCounterMap.PNCounterMapTag))
new PNCounterMap(new ORMap(keys = orsetFromProto(pncountermap.getKeys), entries, PNCounterMap.PNCounterMapTag))
}
def multimapToProto(multimap: ORMultiMap[_, _]): rd.ORMultiMap = {
val ormultimapBuilder = rd.ORMultiMap.newBuilder()
val entries: jl.Iterable[rd.ORMultiMap.Entry] = getEntries(multimap.underlying.entries, rd.ORMultiMap.Entry.newBuilder _, orsetToProto)
val entries: jl.Iterable[rd.ORMultiMap.Entry] =
getEntries(multimap.underlying.entries, rd.ORMultiMap.Entry.newBuilder _, orsetToProto)
ormultimapBuilder.setKeys(orsetToProto(multimap.underlying.keys)).addAllEntries(entries)
if (multimap.withValueDeltas)
ormultimapBuilder.setWithValueDeltas(true)
@ -806,18 +926,17 @@ class ReplicatedDataSerializer(val system: ExtendedActorSystem)
def multimapFromProto(multimap: rd.ORMultiMap): ORMultiMap[Any, Any] = {
val entries = mapTypeFromProto(multimap.getEntriesList, orsetFromProto)
val withValueDeltas = if (multimap.hasWithValueDeltas)
multimap.getWithValueDeltas
else false
new ORMultiMap(
new ORMap(
keys = orsetFromProto(multimap.getKeys),
entries,
if (withValueDeltas)
ORMultiMap.ORMultiMapWithValueDeltasTag
else
ORMultiMap.ORMultiMapTag),
withValueDeltas)
val withValueDeltas =
if (multimap.hasWithValueDeltas)
multimap.getWithValueDeltas
else false
new ORMultiMap(new ORMap(keys = orsetFromProto(multimap.getKeys),
entries,
if (withValueDeltas)
ORMultiMap.ORMultiMapWithValueDeltasTag
else
ORMultiMap.ORMultiMapTag),
withValueDeltas)
}
def keyIdToBinary(id: String): Array[Byte] =

View file

@ -147,11 +147,14 @@ import akka.util.ccompat._
* Protobuf serializer of ReplicatorMessage messages.
*/
class ReplicatorMessageSerializer(val system: ExtendedActorSystem)
extends SerializerWithStringManifest with SerializationSupport with BaseSerializer {
extends SerializerWithStringManifest
with SerializationSupport
with BaseSerializer {
import ReplicatorMessageSerializer.SmallCache
private val cacheTimeToLive = system.settings.config.getDuration(
"akka.cluster.distributed-data.serializer-cache-time-to-live", TimeUnit.MILLISECONDS).millis
private val cacheTimeToLive = system.settings.config
.getDuration("akka.cluster.distributed-data.serializer-cache-time-to-live", TimeUnit.MILLISECONDS)
.millis
private val readCache = new SmallCache[Read, Array[Byte]](4, cacheTimeToLive, m => readToProto(m).toByteArray)
private val writeCache = new SmallCache[Write, Array[Byte]](4, cacheTimeToLive, m => writeToProto(m).toByteArray)
system.scheduler.schedule(cacheTimeToLive, cacheTimeToLive / 2) {
@ -181,25 +184,27 @@ class ReplicatorMessageSerializer(val system: ExtendedActorSystem)
val DeltaPropagationManifest = "Q"
val DeltaNackManifest = "R"
private val fromBinaryMap = collection.immutable.HashMap[String, Array[Byte] => AnyRef](
GetManifest -> getFromBinary,
GetSuccessManifest -> getSuccessFromBinary,
NotFoundManifest -> notFoundFromBinary,
GetFailureManifest -> getFailureFromBinary,
SubscribeManifest -> subscribeFromBinary,
UnsubscribeManifest -> unsubscribeFromBinary,
ChangedManifest -> changedFromBinary,
DataEnvelopeManifest -> dataEnvelopeFromBinary,
WriteManifest -> writeFromBinary,
WriteAckManifest -> (_ => WriteAck),
ReadManifest -> readFromBinary,
ReadResultManifest -> readResultFromBinary,
StatusManifest -> statusFromBinary,
GossipManifest -> gossipFromBinary,
DeltaPropagationManifest -> deltaPropagationFromBinary,
WriteNackManifest -> (_ => WriteNack),
DeltaNackManifest -> (_ => DeltaNack),
DurableDataEnvelopeManifest -> durableDataEnvelopeFromBinary)
private val fromBinaryMap = collection.immutable.HashMap[String, Array[Byte] => AnyRef](GetManifest -> getFromBinary,
GetSuccessManifest -> getSuccessFromBinary,
NotFoundManifest -> notFoundFromBinary,
GetFailureManifest -> getFailureFromBinary,
SubscribeManifest -> subscribeFromBinary,
UnsubscribeManifest -> unsubscribeFromBinary,
ChangedManifest -> changedFromBinary,
DataEnvelopeManifest -> dataEnvelopeFromBinary,
WriteManifest -> writeFromBinary,
WriteAckManifest -> (_ =>
WriteAck),
ReadManifest -> readFromBinary,
ReadResultManifest -> readResultFromBinary,
StatusManifest -> statusFromBinary,
GossipManifest -> gossipFromBinary,
DeltaPropagationManifest -> deltaPropagationFromBinary,
WriteNackManifest -> (_ =>
WriteNack),
DeltaNackManifest -> (_ =>
DeltaNack),
DurableDataEnvelopeManifest -> durableDataEnvelopeFromBinary)
override def manifest(obj: AnyRef): String = obj match {
case _: DataEnvelope => DataEnvelopeManifest
@ -250,8 +255,9 @@ class ReplicatorMessageSerializer(val system: ExtendedActorSystem)
override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef =
fromBinaryMap.get(manifest) match {
case Some(f) => f(bytes)
case None => throw new NotSerializableException(
s"Unimplemented deserialization of message with manifest [$manifest] in [${getClass.getName}]")
case None =>
throw new NotSerializableException(
s"Unimplemented deserialization of message with manifest [$manifest] in [${getClass.getName}]")
}
private def statusToProto(status: Status): dm.Status = {
@ -259,48 +265,41 @@ class ReplicatorMessageSerializer(val system: ExtendedActorSystem)
b.setChunk(status.chunk).setTotChunks(status.totChunks)
val entries = status.digests.foreach {
case (key, digest) =>
b.addEntries(dm.Status.Entry.newBuilder().
setKey(key).
setDigest(ByteString.copyFrom(digest.toArray)))
b.addEntries(dm.Status.Entry.newBuilder().setKey(key).setDigest(ByteString.copyFrom(digest.toArray)))
}
b.build()
}
private def statusFromBinary(bytes: Array[Byte]): Status = {
val status = dm.Status.parseFrom(bytes)
Status(
status.getEntriesList.asScala.iterator.map(e =>
e.getKey -> AkkaByteString(e.getDigest.toByteArray())).toMap,
status.getChunk, status.getTotChunks)
Status(status.getEntriesList.asScala.iterator.map(e => e.getKey -> AkkaByteString(e.getDigest.toByteArray())).toMap,
status.getChunk,
status.getTotChunks)
}
private def gossipToProto(gossip: Gossip): dm.Gossip = {
val b = dm.Gossip.newBuilder().setSendBack(gossip.sendBack)
val entries = gossip.updatedData.foreach {
case (key, data) =>
b.addEntries(dm.Gossip.Entry.newBuilder().
setKey(key).
setEnvelope(dataEnvelopeToProto(data)))
b.addEntries(dm.Gossip.Entry.newBuilder().setKey(key).setEnvelope(dataEnvelopeToProto(data)))
}
b.build()
}
private def gossipFromBinary(bytes: Array[Byte]): Gossip = {
val gossip = dm.Gossip.parseFrom(decompress(bytes))
Gossip(
gossip.getEntriesList.asScala.iterator.map(e =>
e.getKey -> dataEnvelopeFromProto(e.getEnvelope)).toMap,
sendBack = gossip.getSendBack)
Gossip(gossip.getEntriesList.asScala.iterator.map(e => e.getKey -> dataEnvelopeFromProto(e.getEnvelope)).toMap,
sendBack = gossip.getSendBack)
}
private def deltaPropagationToProto(deltaPropagation: DeltaPropagation): dm.DeltaPropagation = {
val b = dm.DeltaPropagation.newBuilder()
.setFromNode(uniqueAddressToProto(deltaPropagation.fromNode))
val b = dm.DeltaPropagation.newBuilder().setFromNode(uniqueAddressToProto(deltaPropagation.fromNode))
if (deltaPropagation.reply)
b.setReply(deltaPropagation.reply)
val entries = deltaPropagation.deltas.foreach {
case (key, Delta(data, fromSeqNr, toSeqNr)) =>
val b2 = dm.DeltaPropagation.Entry.newBuilder()
val b2 = dm.DeltaPropagation.Entry
.newBuilder()
.setKey(key)
.setEnvelope(dataEnvelopeToProto(data))
.setFromSeqNr(fromSeqNr)
@ -314,14 +313,13 @@ class ReplicatorMessageSerializer(val system: ExtendedActorSystem)
private def deltaPropagationFromBinary(bytes: Array[Byte]): DeltaPropagation = {
val deltaPropagation = dm.DeltaPropagation.parseFrom(bytes)
val reply = deltaPropagation.hasReply && deltaPropagation.getReply
DeltaPropagation(
uniqueAddressFromProto(deltaPropagation.getFromNode),
reply,
deltaPropagation.getEntriesList.asScala.iterator.map { e =>
val fromSeqNr = e.getFromSeqNr
val toSeqNr = if (e.hasToSeqNr) e.getToSeqNr else fromSeqNr
e.getKey -> Delta(dataEnvelopeFromProto(e.getEnvelope), fromSeqNr, toSeqNr)
}.toMap)
DeltaPropagation(uniqueAddressFromProto(deltaPropagation.getFromNode),
reply,
deltaPropagation.getEntriesList.asScala.iterator.map { e =>
val fromSeqNr = e.getFromSeqNr
val toSeqNr = if (e.hasToSeqNr) e.getToSeqNr else fromSeqNr
e.getKey -> Delta(dataEnvelopeFromProto(e.getEnvelope), fromSeqNr, toSeqNr)
}.toMap)
}
private def getToProto(get: Get[_]): dm.Get = {
@ -332,10 +330,11 @@ class ReplicatorMessageSerializer(val system: ExtendedActorSystem)
case _: ReadAll => -1
}
val b = dm.Get.newBuilder().
setKey(otherMessageToProto(get.key)).
setConsistency(consistencyValue).
setTimeout(get.consistency.timeout.toMillis.toInt)
val b = dm.Get
.newBuilder()
.setKey(otherMessageToProto(get.key))
.setConsistency(consistencyValue)
.setTimeout(get.consistency.timeout.toMillis.toInt)
get.request.foreach(o => b.setRequest(otherMessageToProto(o)))
b.build()
@ -356,9 +355,10 @@ class ReplicatorMessageSerializer(val system: ExtendedActorSystem)
}
private def getSuccessToProto(getSuccess: GetSuccess[_]): dm.GetSuccess = {
val b = dm.GetSuccess.newBuilder().
setKey(otherMessageToProto(getSuccess.key)).
setData(otherMessageToProto(getSuccess.dataValue))
val b = dm.GetSuccess
.newBuilder()
.setKey(otherMessageToProto(getSuccess.key))
.setData(otherMessageToProto(getSuccess.dataValue))
getSuccess.request.foreach(o => b.setRequest(otherMessageToProto(o)))
b.build()
@ -399,10 +399,11 @@ class ReplicatorMessageSerializer(val system: ExtendedActorSystem)
}
private def subscribeToProto(subscribe: Subscribe[_]): dm.Subscribe =
dm.Subscribe.newBuilder().
setKey(otherMessageToProto(subscribe.key)).
setRef(Serialization.serializedActorPath(subscribe.subscriber)).
build()
dm.Subscribe
.newBuilder()
.setKey(otherMessageToProto(subscribe.key))
.setRef(Serialization.serializedActorPath(subscribe.subscriber))
.build()
private def subscribeFromBinary(bytes: Array[Byte]): Subscribe[_] = {
val subscribe = dm.Subscribe.parseFrom(bytes)
@ -411,10 +412,11 @@ class ReplicatorMessageSerializer(val system: ExtendedActorSystem)
}
private def unsubscribeToProto(unsubscribe: Unsubscribe[_]): dm.Unsubscribe =
dm.Unsubscribe.newBuilder().
setKey(otherMessageToProto(unsubscribe.key)).
setRef(Serialization.serializedActorPath(unsubscribe.subscriber)).
build()
dm.Unsubscribe
.newBuilder()
.setKey(otherMessageToProto(unsubscribe.key))
.setRef(Serialization.serializedActorPath(unsubscribe.subscriber))
.build()
private def unsubscribeFromBinary(bytes: Array[Byte]): Unsubscribe[_] = {
val unsubscribe = dm.Unsubscribe.parseFrom(bytes)
@ -423,10 +425,11 @@ class ReplicatorMessageSerializer(val system: ExtendedActorSystem)
}
private def changedToProto(changed: Changed[_]): dm.Changed =
dm.Changed.newBuilder().
setKey(otherMessageToProto(changed.key)).
setData(otherMessageToProto(changed.dataValue)).
build()
dm.Changed
.newBuilder()
.setKey(otherMessageToProto(changed.key))
.setData(otherMessageToProto(changed.dataValue))
.build()
private def changedFromBinary(bytes: Array[Byte]): Changed[_] = {
val changed = dm.Changed.parseFrom(bytes)
@ -438,11 +441,12 @@ class ReplicatorMessageSerializer(val system: ExtendedActorSystem)
private def pruningToProto(entries: Map[UniqueAddress, PruningState]): Iterable[dm.DataEnvelope.PruningEntry] = {
entries.map {
case (removedAddress, state) =>
val b = dm.DataEnvelope.PruningEntry.newBuilder().
setRemovedAddress(uniqueAddressToProto(removedAddress))
val b = dm.DataEnvelope.PruningEntry.newBuilder().setRemovedAddress(uniqueAddressToProto(removedAddress))
state match {
case PruningState.PruningInitialized(owner, seen) =>
seen.toVector.sorted(Member.addressOrdering).map(addressToProto).foreach { a => b.addSeen(a) }
seen.toVector.sorted(Member.addressOrdering).map(addressToProto).foreach { a =>
b.addSeen(a)
}
b.setOwnerAddress(uniqueAddressToProto(owner))
b.setPerformed(false)
case PruningState.PruningPerformed(obsoleteTime) =>
@ -456,8 +460,7 @@ class ReplicatorMessageSerializer(val system: ExtendedActorSystem)
}
private def dataEnvelopeToProto(dataEnvelope: DataEnvelope): dm.DataEnvelope = {
val dataEnvelopeBuilder = dm.DataEnvelope.newBuilder().
setData(otherMessageToProto(dataEnvelope.data))
val dataEnvelopeBuilder = dm.DataEnvelope.newBuilder().setData(otherMessageToProto(dataEnvelope.data))
dataEnvelopeBuilder.addAllPruning(pruningToProto(dataEnvelope.pruning).asJava)
@ -479,7 +482,8 @@ class ReplicatorMessageSerializer(val system: ExtendedActorSystem)
DataEnvelope(data, pruning, deltaVersions)
}
private def pruningFromProto(pruningEntries: java.util.List[dm.DataEnvelope.PruningEntry]): Map[UniqueAddress, PruningState] = {
private def pruningFromProto(
pruningEntries: java.util.List[dm.DataEnvelope.PruningEntry]): Map[UniqueAddress, PruningState] = {
if (pruningEntries.isEmpty)
Map.empty
else
@ -499,10 +503,7 @@ class ReplicatorMessageSerializer(val system: ExtendedActorSystem)
}
private def writeToProto(write: Write): dm.Write =
dm.Write.newBuilder().
setKey(write.key).
setEnvelope(dataEnvelopeToProto(write.envelope)).
build()
dm.Write.newBuilder().setKey(write.key).setEnvelope(dataEnvelopeToProto(write.envelope)).build()
private def writeFromBinary(bytes: Array[Byte]): Write = {
val write = dm.Write.parseFrom(bytes)
@ -539,8 +540,7 @@ class ReplicatorMessageSerializer(val system: ExtendedActorSystem)
case _ => false
}
val builder = dm.DurableDataEnvelope.newBuilder()
.setData(otherMessageToProto(durableDataEnvelope.data))
val builder = dm.DurableDataEnvelope.newBuilder().setData(otherMessageToProto(durableDataEnvelope.data))
builder.addAllPruning(pruningToProto(pruning).asJava)

View file

@ -90,26 +90,26 @@ trait SerializationSupport {
Address(addressProtocol, system.name, address.getHostname, address.getPort)
def uniqueAddressToProto(uniqueAddress: UniqueAddress): dm.UniqueAddress.Builder =
dm.UniqueAddress.newBuilder().setAddress(addressToProto(uniqueAddress.address))
dm.UniqueAddress
.newBuilder()
.setAddress(addressToProto(uniqueAddress.address))
.setUid(uniqueAddress.longUid.toInt)
.setUid2((uniqueAddress.longUid >> 32).toInt)
def uniqueAddressFromProto(uniqueAddress: dm.UniqueAddress): UniqueAddress =
UniqueAddress(
addressFromProto(uniqueAddress.getAddress),
if (uniqueAddress.hasUid2) {
// new remote node join the two parts of the long uid back
(uniqueAddress.getUid2.toLong << 32) | (uniqueAddress.getUid & 0xFFFFFFFFL)
} else {
// old remote node
uniqueAddress.getUid.toLong
})
UniqueAddress(addressFromProto(uniqueAddress.getAddress), if (uniqueAddress.hasUid2) {
// new remote node join the two parts of the long uid back
(uniqueAddress.getUid2.toLong << 32) | (uniqueAddress.getUid & 0XFFFFFFFFL)
} else {
// old remote node
uniqueAddress.getUid.toLong
})
def versionVectorToProto(versionVector: VersionVector): dm.VersionVector = {
val b = dm.VersionVector.newBuilder()
versionVector.versionsIterator.foreach {
case (node, value) => b.addEntries(dm.VersionVector.Entry.newBuilder().
setNode(uniqueAddressToProto(node)).setVersion(value))
case (node, value) =>
b.addEntries(dm.VersionVector.Entry.newBuilder().setNode(uniqueAddressToProto(node)).setVersion(value))
}
b.build()
}
@ -124,8 +124,9 @@ trait SerializationSupport {
else if (entries.size == 1)
VersionVector(uniqueAddressFromProto(entries.get(0).getNode), entries.get(0).getVersion)
else {
val versions: TreeMap[UniqueAddress, Long] = scala.collection.immutable.TreeMap.from(versionVector.getEntriesList.asScala.iterator.map(entry =>
uniqueAddressFromProto(entry.getNode) -> entry.getVersion))
val versions: TreeMap[UniqueAddress, Long] =
scala.collection.immutable.TreeMap.from(versionVector.getEntriesList.asScala.iterator.map(entry =>
uniqueAddressFromProto(entry.getNode) -> entry.getVersion))
VersionVector(versions)
}
}
@ -137,8 +138,9 @@ trait SerializationSupport {
def buildOther(): dm.OtherMessage = {
val m = msg.asInstanceOf[AnyRef]
val msgSerializer = serialization.findSerializerFor(m)
val builder = dm.OtherMessage.newBuilder().
setEnclosedMessage(ByteString.copyFrom(msgSerializer.toBinary(m)))
val builder = dm.OtherMessage
.newBuilder()
.setEnclosedMessage(ByteString.copyFrom(msgSerializer.toBinary(m)))
.setSerializerId(msgSerializer.identifier)
val ms = Serializers.manifestFor(msgSerializer, m)
@ -164,10 +166,7 @@ trait SerializationSupport {
def otherMessageFromProto(other: dm.OtherMessage): AnyRef = {
val manifest = if (other.hasMessageManifest) other.getMessageManifest.toStringUtf8 else ""
serialization.deserialize(
other.getEnclosedMessage.toByteArray,
other.getSerializerId,
manifest).get
serialization.deserialize(other.getEnclosedMessage.toByteArray, other.getSerializerId, manifest).get
}
}