diff --git a/akka-actor/src/main/scala/akka/util/Convert.scala b/akka-actor/src/main/scala/akka/util/Convert.scala new file mode 100644 index 0000000000..85f274d26f --- /dev/null +++ b/akka-actor/src/main/scala/akka/util/Convert.scala @@ -0,0 +1,48 @@ +/** + * Copyright (C) 2009-2011 Scalable Solutions AB + */ + +package akka.util + +/** + * @author Jonas Bonér + */ +object Convert { + + def intToBytes(value: Int): Array[Byte] = { + val bytes = Array.fill[Byte](4)(0) + bytes(0) = (value >>> 24).asInstanceOf[Byte] + bytes(1) = (value >>> 16).asInstanceOf[Byte] + bytes(2) = (value >>> 8).asInstanceOf[Byte] + bytes(3) = value.asInstanceOf[Byte] + bytes + } + + def bytesToInt(bytes: Array[Byte], offset: Int): Int = { + (0 until 4).foldLeft(0)((value, index) => value + ((bytes(index + offset) & 0x000000FF) << ((4 - 1 - index) * 8))) + } + + def longToBytes(value: Long): Array[Byte] = { + val writeBuffer = Array.fill[Byte](8)(0) + writeBuffer(0) = (value >>> 56).asInstanceOf[Byte] + writeBuffer(1) = (value >>> 48).asInstanceOf[Byte] + writeBuffer(2) = (value >>> 40).asInstanceOf[Byte] + writeBuffer(3) = (value >>> 32).asInstanceOf[Byte] + writeBuffer(4) = (value >>> 24).asInstanceOf[Byte] + writeBuffer(5) = (value >>> 16).asInstanceOf[Byte] + writeBuffer(6) = (value >>> 8).asInstanceOf[Byte] + writeBuffer(7) = (value >>> 0).asInstanceOf[Byte] + writeBuffer + } + + def bytesToLong(buf: Array[Byte]): Long = { + ((buf(0) & 0xFFL) << 56) | + ((buf(1) & 0xFFL) << 48) | + ((buf(2) & 0xFFL) << 40) | + ((buf(3) & 0xFFL) << 32) | + ((buf(4) & 0xFFL) << 24) | + ((buf(5) & 0xFFL) << 16) | + ((buf(6) & 0xFFL) << 8) | + ((buf(7) & 0xFFL) << 0) + } +} diff --git a/akka-actor/src/main/scala/akka/util/Helpers.scala b/akka-actor/src/main/scala/akka/util/Helpers.scala index 8949f73f8b..998df26885 100644 --- a/akka-actor/src/main/scala/akka/util/Helpers.scala +++ b/akka-actor/src/main/scala/akka/util/Helpers.scala @@ -4,6 +4,8 @@ package akka.util +import akka.event.EventHandler + /** * @author Jonas Bonér */ @@ -24,6 +26,30 @@ object Helpers { (0 until 4).foldLeft(0)((value, index) => value + ((bytes(index + offset) & 0x000000FF) << ((4 - 1 - index) * 8))) } + def flatten[T: ClassManifest](array: Array[Any]) = array.flatMap { + case arr: Array[T] => arr + case elem: T => Array(elem) + } + + def ignore[E : Manifest](body: => Unit): Unit = { + try { + body + } + catch { + case e if manifest[E].erasure.isAssignableFrom(e.getClass) => () + } + } + + def withPrintStackTraceOnError(body: => Unit) = { + try { + body + } catch { + case e: Throwable => + EventHandler.error(e, this, "") + throw e + } + } + /** * Convenience helper to cast the given Option of Any to an Option of the given type. Will throw a ClassCastException * if the actual type is not assignable from the given one. diff --git a/akka-actor/src/main/scala/akka/util/JMX.scala b/akka-actor/src/main/scala/akka/util/JMX.scala new file mode 100644 index 0000000000..05ba9c9ff1 --- /dev/null +++ b/akka-actor/src/main/scala/akka/util/JMX.scala @@ -0,0 +1,37 @@ +/** + * Copyright (C) 2009-2011 Scalable Solutions AB + */ + +package akka.util + +import akka.event.EventHandler + +import java.lang.management.ManagementFactory +import javax.management.{ObjectInstance, ObjectName, InstanceAlreadyExistsException, InstanceNotFoundException} + +/** + * @author Jonas Bonér + */ +object JMX { + private val mbeanServer = ManagementFactory.getPlatformMBeanServer + + def nameFor(hostname: String, service: String, bean: String): ObjectName = + new ObjectName("akka.%s:type=%s,name=%s".format(hostname, service, bean.replace(":", "_"))) + + def register(name: ObjectName, mbean: AnyRef): Option[ObjectInstance] = try { + Some(mbeanServer.registerMBean(mbean, name)) + } catch { + case e: InstanceAlreadyExistsException => + Some(mbeanServer.getObjectInstance(name)) + case e: Exception => + EventHandler.error(e, this, "Error when registering mbean [%s]".format(mbean)) + None + } + + def unregister(mbean: ObjectName) = try { + mbeanServer.unregisterMBean(mbean) + } catch { + case e: InstanceNotFoundException => {} + case e: Exception => EventHandler.error(e, this, "Error while unregistering mbean [%s]".format(mbean)) + } +} diff --git a/akka-cluster/src/main/java/akka/cloud/cluster/EventProtocol.java b/akka-cluster/src/main/java/akka/cloud/cluster/EventProtocol.java deleted file mode 100644 index 278e5b3a75..0000000000 --- a/akka-cluster/src/main/java/akka/cloud/cluster/EventProtocol.java +++ /dev/null @@ -1,2485 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: EventProtocol.proto - -package akka.cloud.cluster; - -public final class EventProtocol { - private EventProtocol() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public static final class GenericLoggingEvent extends - com.google.protobuf.GeneratedMessage { - // Use GenericLoggingEvent.newBuilder() to construct. - private GenericLoggingEvent() { - initFields(); - } - private GenericLoggingEvent(boolean noInit) {} - - private static final GenericLoggingEvent defaultInstance; - public static GenericLoggingEvent getDefaultInstance() { - return defaultInstance; - } - - public GenericLoggingEvent getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return akka.cloud.cluster.EventProtocol.internal_static_GenericLoggingEvent_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return akka.cloud.cluster.EventProtocol.internal_static_GenericLoggingEvent_fieldAccessorTable; - } - - // required string context = 1; - public static final int CONTEXT_FIELD_NUMBER = 1; - private boolean hasContext; - private java.lang.String context_ = ""; - public boolean hasContext() { return hasContext; } - public java.lang.String getContext() { return context_; } - - // required string message = 2; - public static final int MESSAGE_FIELD_NUMBER = 2; - private boolean hasMessage; - private java.lang.String message_ = ""; - public boolean hasMessage() { return hasMessage; } - public java.lang.String getMessage() { return message_; } - - // required uint64 time = 3; - public static final int TIME_FIELD_NUMBER = 3; - private boolean hasTime; - private long time_ = 0L; - public boolean hasTime() { return hasTime; } - public long getTime() { return time_; } - - private void initFields() { - } - public final boolean isInitialized() { - if (!hasContext) return false; - if (!hasMessage) return false; - if (!hasTime) return false; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (hasContext()) { - output.writeString(1, getContext()); - } - if (hasMessage()) { - output.writeString(2, getMessage()); - } - if (hasTime()) { - output.writeUInt64(3, getTime()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (hasContext()) { - size += com.google.protobuf.CodedOutputStream - .computeStringSize(1, getContext()); - } - if (hasMessage()) { - size += com.google.protobuf.CodedOutputStream - .computeStringSize(2, getMessage()); - } - if (hasTime()) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(3, getTime()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - public static akka.cloud.cluster.EventProtocol.GenericLoggingEvent parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.GenericLoggingEvent parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.GenericLoggingEvent parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.GenericLoggingEvent parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.GenericLoggingEvent parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.GenericLoggingEvent parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.GenericLoggingEvent parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static akka.cloud.cluster.EventProtocol.GenericLoggingEvent parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static akka.cloud.cluster.EventProtocol.GenericLoggingEvent parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.GenericLoggingEvent parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(akka.cloud.cluster.EventProtocol.GenericLoggingEvent prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder { - private akka.cloud.cluster.EventProtocol.GenericLoggingEvent result; - - // Construct using akka.cloud.cluster.EventProtocol.GenericLoggingEvent.newBuilder() - private Builder() {} - - private static Builder create() { - Builder builder = new Builder(); - builder.result = new akka.cloud.cluster.EventProtocol.GenericLoggingEvent(); - return builder; - } - - protected akka.cloud.cluster.EventProtocol.GenericLoggingEvent internalGetResult() { - return result; - } - - public Builder clear() { - if (result == null) { - throw new IllegalStateException( - "Cannot call clear() after build()."); - } - result = new akka.cloud.cluster.EventProtocol.GenericLoggingEvent(); - return this; - } - - public Builder clone() { - return create().mergeFrom(result); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return akka.cloud.cluster.EventProtocol.GenericLoggingEvent.getDescriptor(); - } - - public akka.cloud.cluster.EventProtocol.GenericLoggingEvent getDefaultInstanceForType() { - return akka.cloud.cluster.EventProtocol.GenericLoggingEvent.getDefaultInstance(); - } - - public boolean isInitialized() { - return result.isInitialized(); - } - public akka.cloud.cluster.EventProtocol.GenericLoggingEvent build() { - if (result != null && !isInitialized()) { - throw newUninitializedMessageException(result); - } - return buildPartial(); - } - - private akka.cloud.cluster.EventProtocol.GenericLoggingEvent buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - if (!isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return buildPartial(); - } - - public akka.cloud.cluster.EventProtocol.GenericLoggingEvent buildPartial() { - if (result == null) { - throw new IllegalStateException( - "build() has already been called on this Builder."); - } - akka.cloud.cluster.EventProtocol.GenericLoggingEvent returnMe = result; - result = null; - return returnMe; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof akka.cloud.cluster.EventProtocol.GenericLoggingEvent) { - return mergeFrom((akka.cloud.cluster.EventProtocol.GenericLoggingEvent)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(akka.cloud.cluster.EventProtocol.GenericLoggingEvent other) { - if (other == akka.cloud.cluster.EventProtocol.GenericLoggingEvent.getDefaultInstance()) return this; - if (other.hasContext()) { - setContext(other.getContext()); - } - if (other.hasMessage()) { - setMessage(other.getMessage()); - } - if (other.hasTime()) { - setTime(other.getTime()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - return this; - } - break; - } - case 10: { - setContext(input.readString()); - break; - } - case 18: { - setMessage(input.readString()); - break; - } - case 24: { - setTime(input.readUInt64()); - break; - } - } - } - } - - - // required string context = 1; - public boolean hasContext() { - return result.hasContext(); - } - public java.lang.String getContext() { - return result.getContext(); - } - public Builder setContext(java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - result.hasContext = true; - result.context_ = value; - return this; - } - public Builder clearContext() { - result.hasContext = false; - result.context_ = getDefaultInstance().getContext(); - return this; - } - - // required string message = 2; - public boolean hasMessage() { - return result.hasMessage(); - } - public java.lang.String getMessage() { - return result.getMessage(); - } - public Builder setMessage(java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - result.hasMessage = true; - result.message_ = value; - return this; - } - public Builder clearMessage() { - result.hasMessage = false; - result.message_ = getDefaultInstance().getMessage(); - return this; - } - - // required uint64 time = 3; - public boolean hasTime() { - return result.hasTime(); - } - public long getTime() { - return result.getTime(); - } - public Builder setTime(long value) { - result.hasTime = true; - result.time_ = value; - return this; - } - public Builder clearTime() { - result.hasTime = false; - result.time_ = 0L; - return this; - } - - // @@protoc_insertion_point(builder_scope:GenericLoggingEvent) - } - - static { - defaultInstance = new GenericLoggingEvent(true); - akka.cloud.cluster.EventProtocol.internalForceInit(); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:GenericLoggingEvent) - } - - public static final class AuditEvent extends - com.google.protobuf.GeneratedMessage { - // Use AuditEvent.newBuilder() to construct. - private AuditEvent() { - initFields(); - } - private AuditEvent(boolean noInit) {} - - private static final AuditEvent defaultInstance; - public static AuditEvent getDefaultInstance() { - return defaultInstance; - } - - public AuditEvent getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return akka.cloud.cluster.EventProtocol.internal_static_AuditEvent_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return akka.cloud.cluster.EventProtocol.internal_static_AuditEvent_fieldAccessorTable; - } - - // required string name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private boolean hasName; - private java.lang.String name_ = ""; - public boolean hasName() { return hasName; } - public java.lang.String getName() { return name_; } - - // required string context = 2; - public static final int CONTEXT_FIELD_NUMBER = 2; - private boolean hasContext; - private java.lang.String context_ = ""; - public boolean hasContext() { return hasContext; } - public java.lang.String getContext() { return context_; } - - // required string message = 3; - public static final int MESSAGE_FIELD_NUMBER = 3; - private boolean hasMessage; - private java.lang.String message_ = ""; - public boolean hasMessage() { return hasMessage; } - public java.lang.String getMessage() { return message_; } - - // required uint64 time = 4; - public static final int TIME_FIELD_NUMBER = 4; - private boolean hasTime; - private long time_ = 0L; - public boolean hasTime() { return hasTime; } - public long getTime() { return time_; } - - private void initFields() { - } - public final boolean isInitialized() { - if (!hasName) return false; - if (!hasContext) return false; - if (!hasMessage) return false; - if (!hasTime) return false; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (hasName()) { - output.writeString(1, getName()); - } - if (hasContext()) { - output.writeString(2, getContext()); - } - if (hasMessage()) { - output.writeString(3, getMessage()); - } - if (hasTime()) { - output.writeUInt64(4, getTime()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (hasName()) { - size += com.google.protobuf.CodedOutputStream - .computeStringSize(1, getName()); - } - if (hasContext()) { - size += com.google.protobuf.CodedOutputStream - .computeStringSize(2, getContext()); - } - if (hasMessage()) { - size += com.google.protobuf.CodedOutputStream - .computeStringSize(3, getMessage()); - } - if (hasTime()) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(4, getTime()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - public static akka.cloud.cluster.EventProtocol.AuditEvent parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AuditEvent parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AuditEvent parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AuditEvent parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AuditEvent parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AuditEvent parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AuditEvent parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static akka.cloud.cluster.EventProtocol.AuditEvent parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static akka.cloud.cluster.EventProtocol.AuditEvent parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AuditEvent parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(akka.cloud.cluster.EventProtocol.AuditEvent prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder { - private akka.cloud.cluster.EventProtocol.AuditEvent result; - - // Construct using akka.cloud.cluster.EventProtocol.AuditEvent.newBuilder() - private Builder() {} - - private static Builder create() { - Builder builder = new Builder(); - builder.result = new akka.cloud.cluster.EventProtocol.AuditEvent(); - return builder; - } - - protected akka.cloud.cluster.EventProtocol.AuditEvent internalGetResult() { - return result; - } - - public Builder clear() { - if (result == null) { - throw new IllegalStateException( - "Cannot call clear() after build()."); - } - result = new akka.cloud.cluster.EventProtocol.AuditEvent(); - return this; - } - - public Builder clone() { - return create().mergeFrom(result); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return akka.cloud.cluster.EventProtocol.AuditEvent.getDescriptor(); - } - - public akka.cloud.cluster.EventProtocol.AuditEvent getDefaultInstanceForType() { - return akka.cloud.cluster.EventProtocol.AuditEvent.getDefaultInstance(); - } - - public boolean isInitialized() { - return result.isInitialized(); - } - public akka.cloud.cluster.EventProtocol.AuditEvent build() { - if (result != null && !isInitialized()) { - throw newUninitializedMessageException(result); - } - return buildPartial(); - } - - private akka.cloud.cluster.EventProtocol.AuditEvent buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - if (!isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return buildPartial(); - } - - public akka.cloud.cluster.EventProtocol.AuditEvent buildPartial() { - if (result == null) { - throw new IllegalStateException( - "build() has already been called on this Builder."); - } - akka.cloud.cluster.EventProtocol.AuditEvent returnMe = result; - result = null; - return returnMe; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof akka.cloud.cluster.EventProtocol.AuditEvent) { - return mergeFrom((akka.cloud.cluster.EventProtocol.AuditEvent)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(akka.cloud.cluster.EventProtocol.AuditEvent other) { - if (other == akka.cloud.cluster.EventProtocol.AuditEvent.getDefaultInstance()) return this; - if (other.hasName()) { - setName(other.getName()); - } - if (other.hasContext()) { - setContext(other.getContext()); - } - if (other.hasMessage()) { - setMessage(other.getMessage()); - } - if (other.hasTime()) { - setTime(other.getTime()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - return this; - } - break; - } - case 10: { - setName(input.readString()); - break; - } - case 18: { - setContext(input.readString()); - break; - } - case 26: { - setMessage(input.readString()); - break; - } - case 32: { - setTime(input.readUInt64()); - break; - } - } - } - } - - - // required string name = 1; - public boolean hasName() { - return result.hasName(); - } - public java.lang.String getName() { - return result.getName(); - } - public Builder setName(java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - result.hasName = true; - result.name_ = value; - return this; - } - public Builder clearName() { - result.hasName = false; - result.name_ = getDefaultInstance().getName(); - return this; - } - - // required string context = 2; - public boolean hasContext() { - return result.hasContext(); - } - public java.lang.String getContext() { - return result.getContext(); - } - public Builder setContext(java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - result.hasContext = true; - result.context_ = value; - return this; - } - public Builder clearContext() { - result.hasContext = false; - result.context_ = getDefaultInstance().getContext(); - return this; - } - - // required string message = 3; - public boolean hasMessage() { - return result.hasMessage(); - } - public java.lang.String getMessage() { - return result.getMessage(); - } - public Builder setMessage(java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - result.hasMessage = true; - result.message_ = value; - return this; - } - public Builder clearMessage() { - result.hasMessage = false; - result.message_ = getDefaultInstance().getMessage(); - return this; - } - - // required uint64 time = 4; - public boolean hasTime() { - return result.hasTime(); - } - public long getTime() { - return result.getTime(); - } - public Builder setTime(long value) { - result.hasTime = true; - result.time_ = value; - return this; - } - public Builder clearTime() { - result.hasTime = false; - result.time_ = 0L; - return this; - } - - // @@protoc_insertion_point(builder_scope:AuditEvent) - } - - static { - defaultInstance = new AuditEvent(true); - akka.cloud.cluster.EventProtocol.internalForceInit(); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:AuditEvent) - } - - public static final class CounterAddEvent extends - com.google.protobuf.GeneratedMessage { - // Use CounterAddEvent.newBuilder() to construct. - private CounterAddEvent() { - initFields(); - } - private CounterAddEvent(boolean noInit) {} - - private static final CounterAddEvent defaultInstance; - public static CounterAddEvent getDefaultInstance() { - return defaultInstance; - } - - public CounterAddEvent getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return akka.cloud.cluster.EventProtocol.internal_static_CounterAddEvent_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return akka.cloud.cluster.EventProtocol.internal_static_CounterAddEvent_fieldAccessorTable; - } - - // required string name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private boolean hasName; - private java.lang.String name_ = ""; - public boolean hasName() { return hasName; } - public java.lang.String getName() { return name_; } - - // required uint64 delta = 2; - public static final int DELTA_FIELD_NUMBER = 2; - private boolean hasDelta; - private long delta_ = 0L; - public boolean hasDelta() { return hasDelta; } - public long getDelta() { return delta_; } - - private void initFields() { - } - public final boolean isInitialized() { - if (!hasName) return false; - if (!hasDelta) return false; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (hasName()) { - output.writeString(1, getName()); - } - if (hasDelta()) { - output.writeUInt64(2, getDelta()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (hasName()) { - size += com.google.protobuf.CodedOutputStream - .computeStringSize(1, getName()); - } - if (hasDelta()) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(2, getDelta()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - public static akka.cloud.cluster.EventProtocol.CounterAddEvent parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterAddEvent parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterAddEvent parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterAddEvent parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterAddEvent parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterAddEvent parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterAddEvent parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static akka.cloud.cluster.EventProtocol.CounterAddEvent parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static akka.cloud.cluster.EventProtocol.CounterAddEvent parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterAddEvent parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(akka.cloud.cluster.EventProtocol.CounterAddEvent prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder { - private akka.cloud.cluster.EventProtocol.CounterAddEvent result; - - // Construct using akka.cloud.cluster.EventProtocol.CounterAddEvent.newBuilder() - private Builder() {} - - private static Builder create() { - Builder builder = new Builder(); - builder.result = new akka.cloud.cluster.EventProtocol.CounterAddEvent(); - return builder; - } - - protected akka.cloud.cluster.EventProtocol.CounterAddEvent internalGetResult() { - return result; - } - - public Builder clear() { - if (result == null) { - throw new IllegalStateException( - "Cannot call clear() after build()."); - } - result = new akka.cloud.cluster.EventProtocol.CounterAddEvent(); - return this; - } - - public Builder clone() { - return create().mergeFrom(result); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return akka.cloud.cluster.EventProtocol.CounterAddEvent.getDescriptor(); - } - - public akka.cloud.cluster.EventProtocol.CounterAddEvent getDefaultInstanceForType() { - return akka.cloud.cluster.EventProtocol.CounterAddEvent.getDefaultInstance(); - } - - public boolean isInitialized() { - return result.isInitialized(); - } - public akka.cloud.cluster.EventProtocol.CounterAddEvent build() { - if (result != null && !isInitialized()) { - throw newUninitializedMessageException(result); - } - return buildPartial(); - } - - private akka.cloud.cluster.EventProtocol.CounterAddEvent buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - if (!isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return buildPartial(); - } - - public akka.cloud.cluster.EventProtocol.CounterAddEvent buildPartial() { - if (result == null) { - throw new IllegalStateException( - "build() has already been called on this Builder."); - } - akka.cloud.cluster.EventProtocol.CounterAddEvent returnMe = result; - result = null; - return returnMe; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof akka.cloud.cluster.EventProtocol.CounterAddEvent) { - return mergeFrom((akka.cloud.cluster.EventProtocol.CounterAddEvent)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(akka.cloud.cluster.EventProtocol.CounterAddEvent other) { - if (other == akka.cloud.cluster.EventProtocol.CounterAddEvent.getDefaultInstance()) return this; - if (other.hasName()) { - setName(other.getName()); - } - if (other.hasDelta()) { - setDelta(other.getDelta()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - return this; - } - break; - } - case 10: { - setName(input.readString()); - break; - } - case 16: { - setDelta(input.readUInt64()); - break; - } - } - } - } - - - // required string name = 1; - public boolean hasName() { - return result.hasName(); - } - public java.lang.String getName() { - return result.getName(); - } - public Builder setName(java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - result.hasName = true; - result.name_ = value; - return this; - } - public Builder clearName() { - result.hasName = false; - result.name_ = getDefaultInstance().getName(); - return this; - } - - // required uint64 delta = 2; - public boolean hasDelta() { - return result.hasDelta(); - } - public long getDelta() { - return result.getDelta(); - } - public Builder setDelta(long value) { - result.hasDelta = true; - result.delta_ = value; - return this; - } - public Builder clearDelta() { - result.hasDelta = false; - result.delta_ = 0L; - return this; - } - - // @@protoc_insertion_point(builder_scope:CounterAddEvent) - } - - static { - defaultInstance = new CounterAddEvent(true); - akka.cloud.cluster.EventProtocol.internalForceInit(); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CounterAddEvent) - } - - public static final class CounterSetEvent extends - com.google.protobuf.GeneratedMessage { - // Use CounterSetEvent.newBuilder() to construct. - private CounterSetEvent() { - initFields(); - } - private CounterSetEvent(boolean noInit) {} - - private static final CounterSetEvent defaultInstance; - public static CounterSetEvent getDefaultInstance() { - return defaultInstance; - } - - public CounterSetEvent getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return akka.cloud.cluster.EventProtocol.internal_static_CounterSetEvent_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return akka.cloud.cluster.EventProtocol.internal_static_CounterSetEvent_fieldAccessorTable; - } - - // required string name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private boolean hasName; - private java.lang.String name_ = ""; - public boolean hasName() { return hasName; } - public java.lang.String getName() { return name_; } - - // required uint32 value = 2; - public static final int VALUE_FIELD_NUMBER = 2; - private boolean hasValue; - private int value_ = 0; - public boolean hasValue() { return hasValue; } - public int getValue() { return value_; } - - private void initFields() { - } - public final boolean isInitialized() { - if (!hasName) return false; - if (!hasValue) return false; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (hasName()) { - output.writeString(1, getName()); - } - if (hasValue()) { - output.writeUInt32(2, getValue()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (hasName()) { - size += com.google.protobuf.CodedOutputStream - .computeStringSize(1, getName()); - } - if (hasValue()) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(2, getValue()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - public static akka.cloud.cluster.EventProtocol.CounterSetEvent parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterSetEvent parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterSetEvent parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterSetEvent parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterSetEvent parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterSetEvent parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterSetEvent parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static akka.cloud.cluster.EventProtocol.CounterSetEvent parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static akka.cloud.cluster.EventProtocol.CounterSetEvent parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterSetEvent parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(akka.cloud.cluster.EventProtocol.CounterSetEvent prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder { - private akka.cloud.cluster.EventProtocol.CounterSetEvent result; - - // Construct using akka.cloud.cluster.EventProtocol.CounterSetEvent.newBuilder() - private Builder() {} - - private static Builder create() { - Builder builder = new Builder(); - builder.result = new akka.cloud.cluster.EventProtocol.CounterSetEvent(); - return builder; - } - - protected akka.cloud.cluster.EventProtocol.CounterSetEvent internalGetResult() { - return result; - } - - public Builder clear() { - if (result == null) { - throw new IllegalStateException( - "Cannot call clear() after build()."); - } - result = new akka.cloud.cluster.EventProtocol.CounterSetEvent(); - return this; - } - - public Builder clone() { - return create().mergeFrom(result); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return akka.cloud.cluster.EventProtocol.CounterSetEvent.getDescriptor(); - } - - public akka.cloud.cluster.EventProtocol.CounterSetEvent getDefaultInstanceForType() { - return akka.cloud.cluster.EventProtocol.CounterSetEvent.getDefaultInstance(); - } - - public boolean isInitialized() { - return result.isInitialized(); - } - public akka.cloud.cluster.EventProtocol.CounterSetEvent build() { - if (result != null && !isInitialized()) { - throw newUninitializedMessageException(result); - } - return buildPartial(); - } - - private akka.cloud.cluster.EventProtocol.CounterSetEvent buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - if (!isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return buildPartial(); - } - - public akka.cloud.cluster.EventProtocol.CounterSetEvent buildPartial() { - if (result == null) { - throw new IllegalStateException( - "build() has already been called on this Builder."); - } - akka.cloud.cluster.EventProtocol.CounterSetEvent returnMe = result; - result = null; - return returnMe; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof akka.cloud.cluster.EventProtocol.CounterSetEvent) { - return mergeFrom((akka.cloud.cluster.EventProtocol.CounterSetEvent)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(akka.cloud.cluster.EventProtocol.CounterSetEvent other) { - if (other == akka.cloud.cluster.EventProtocol.CounterSetEvent.getDefaultInstance()) return this; - if (other.hasName()) { - setName(other.getName()); - } - if (other.hasValue()) { - setValue(other.getValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - return this; - } - break; - } - case 10: { - setName(input.readString()); - break; - } - case 16: { - setValue(input.readUInt32()); - break; - } - } - } - } - - - // required string name = 1; - public boolean hasName() { - return result.hasName(); - } - public java.lang.String getName() { - return result.getName(); - } - public Builder setName(java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - result.hasName = true; - result.name_ = value; - return this; - } - public Builder clearName() { - result.hasName = false; - result.name_ = getDefaultInstance().getName(); - return this; - } - - // required uint32 value = 2; - public boolean hasValue() { - return result.hasValue(); - } - public int getValue() { - return result.getValue(); - } - public Builder setValue(int value) { - result.hasValue = true; - result.value_ = value; - return this; - } - public Builder clearValue() { - result.hasValue = false; - result.value_ = 0; - return this; - } - - // @@protoc_insertion_point(builder_scope:CounterSetEvent) - } - - static { - defaultInstance = new CounterSetEvent(true); - akka.cloud.cluster.EventProtocol.internalForceInit(); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CounterSetEvent) - } - - public static final class CounterResetEvent extends - com.google.protobuf.GeneratedMessage { - // Use CounterResetEvent.newBuilder() to construct. - private CounterResetEvent() { - initFields(); - } - private CounterResetEvent(boolean noInit) {} - - private static final CounterResetEvent defaultInstance; - public static CounterResetEvent getDefaultInstance() { - return defaultInstance; - } - - public CounterResetEvent getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return akka.cloud.cluster.EventProtocol.internal_static_CounterResetEvent_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return akka.cloud.cluster.EventProtocol.internal_static_CounterResetEvent_fieldAccessorTable; - } - - // required string name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private boolean hasName; - private java.lang.String name_ = ""; - public boolean hasName() { return hasName; } - public java.lang.String getName() { return name_; } - - private void initFields() { - } - public final boolean isInitialized() { - if (!hasName) return false; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (hasName()) { - output.writeString(1, getName()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (hasName()) { - size += com.google.protobuf.CodedOutputStream - .computeStringSize(1, getName()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - public static akka.cloud.cluster.EventProtocol.CounterResetEvent parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterResetEvent parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterResetEvent parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterResetEvent parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterResetEvent parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterResetEvent parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterResetEvent parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static akka.cloud.cluster.EventProtocol.CounterResetEvent parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static akka.cloud.cluster.EventProtocol.CounterResetEvent parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.CounterResetEvent parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(akka.cloud.cluster.EventProtocol.CounterResetEvent prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder { - private akka.cloud.cluster.EventProtocol.CounterResetEvent result; - - // Construct using akka.cloud.cluster.EventProtocol.CounterResetEvent.newBuilder() - private Builder() {} - - private static Builder create() { - Builder builder = new Builder(); - builder.result = new akka.cloud.cluster.EventProtocol.CounterResetEvent(); - return builder; - } - - protected akka.cloud.cluster.EventProtocol.CounterResetEvent internalGetResult() { - return result; - } - - public Builder clear() { - if (result == null) { - throw new IllegalStateException( - "Cannot call clear() after build()."); - } - result = new akka.cloud.cluster.EventProtocol.CounterResetEvent(); - return this; - } - - public Builder clone() { - return create().mergeFrom(result); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return akka.cloud.cluster.EventProtocol.CounterResetEvent.getDescriptor(); - } - - public akka.cloud.cluster.EventProtocol.CounterResetEvent getDefaultInstanceForType() { - return akka.cloud.cluster.EventProtocol.CounterResetEvent.getDefaultInstance(); - } - - public boolean isInitialized() { - return result.isInitialized(); - } - public akka.cloud.cluster.EventProtocol.CounterResetEvent build() { - if (result != null && !isInitialized()) { - throw newUninitializedMessageException(result); - } - return buildPartial(); - } - - private akka.cloud.cluster.EventProtocol.CounterResetEvent buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - if (!isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return buildPartial(); - } - - public akka.cloud.cluster.EventProtocol.CounterResetEvent buildPartial() { - if (result == null) { - throw new IllegalStateException( - "build() has already been called on this Builder."); - } - akka.cloud.cluster.EventProtocol.CounterResetEvent returnMe = result; - result = null; - return returnMe; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof akka.cloud.cluster.EventProtocol.CounterResetEvent) { - return mergeFrom((akka.cloud.cluster.EventProtocol.CounterResetEvent)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(akka.cloud.cluster.EventProtocol.CounterResetEvent other) { - if (other == akka.cloud.cluster.EventProtocol.CounterResetEvent.getDefaultInstance()) return this; - if (other.hasName()) { - setName(other.getName()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - return this; - } - break; - } - case 10: { - setName(input.readString()); - break; - } - } - } - } - - - // required string name = 1; - public boolean hasName() { - return result.hasName(); - } - public java.lang.String getName() { - return result.getName(); - } - public Builder setName(java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - result.hasName = true; - result.name_ = value; - return this; - } - public Builder clearName() { - result.hasName = false; - result.name_ = getDefaultInstance().getName(); - return this; - } - - // @@protoc_insertion_point(builder_scope:CounterResetEvent) - } - - static { - defaultInstance = new CounterResetEvent(true); - akka.cloud.cluster.EventProtocol.internalForceInit(); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CounterResetEvent) - } - - public static final class AverageAddEvent extends - com.google.protobuf.GeneratedMessage { - // Use AverageAddEvent.newBuilder() to construct. - private AverageAddEvent() { - initFields(); - } - private AverageAddEvent(boolean noInit) {} - - private static final AverageAddEvent defaultInstance; - public static AverageAddEvent getDefaultInstance() { - return defaultInstance; - } - - public AverageAddEvent getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return akka.cloud.cluster.EventProtocol.internal_static_AverageAddEvent_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return akka.cloud.cluster.EventProtocol.internal_static_AverageAddEvent_fieldAccessorTable; - } - - // required string name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private boolean hasName; - private java.lang.String name_ = ""; - public boolean hasName() { return hasName; } - public java.lang.String getName() { return name_; } - - // required uint64 value = 2; - public static final int VALUE_FIELD_NUMBER = 2; - private boolean hasValue; - private long value_ = 0L; - public boolean hasValue() { return hasValue; } - public long getValue() { return value_; } - - private void initFields() { - } - public final boolean isInitialized() { - if (!hasName) return false; - if (!hasValue) return false; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (hasName()) { - output.writeString(1, getName()); - } - if (hasValue()) { - output.writeUInt64(2, getValue()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (hasName()) { - size += com.google.protobuf.CodedOutputStream - .computeStringSize(1, getName()); - } - if (hasValue()) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(2, getValue()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - public static akka.cloud.cluster.EventProtocol.AverageAddEvent parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AverageAddEvent parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AverageAddEvent parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AverageAddEvent parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AverageAddEvent parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AverageAddEvent parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AverageAddEvent parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static akka.cloud.cluster.EventProtocol.AverageAddEvent parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static akka.cloud.cluster.EventProtocol.AverageAddEvent parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AverageAddEvent parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(akka.cloud.cluster.EventProtocol.AverageAddEvent prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder { - private akka.cloud.cluster.EventProtocol.AverageAddEvent result; - - // Construct using akka.cloud.cluster.EventProtocol.AverageAddEvent.newBuilder() - private Builder() {} - - private static Builder create() { - Builder builder = new Builder(); - builder.result = new akka.cloud.cluster.EventProtocol.AverageAddEvent(); - return builder; - } - - protected akka.cloud.cluster.EventProtocol.AverageAddEvent internalGetResult() { - return result; - } - - public Builder clear() { - if (result == null) { - throw new IllegalStateException( - "Cannot call clear() after build()."); - } - result = new akka.cloud.cluster.EventProtocol.AverageAddEvent(); - return this; - } - - public Builder clone() { - return create().mergeFrom(result); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return akka.cloud.cluster.EventProtocol.AverageAddEvent.getDescriptor(); - } - - public akka.cloud.cluster.EventProtocol.AverageAddEvent getDefaultInstanceForType() { - return akka.cloud.cluster.EventProtocol.AverageAddEvent.getDefaultInstance(); - } - - public boolean isInitialized() { - return result.isInitialized(); - } - public akka.cloud.cluster.EventProtocol.AverageAddEvent build() { - if (result != null && !isInitialized()) { - throw newUninitializedMessageException(result); - } - return buildPartial(); - } - - private akka.cloud.cluster.EventProtocol.AverageAddEvent buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - if (!isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return buildPartial(); - } - - public akka.cloud.cluster.EventProtocol.AverageAddEvent buildPartial() { - if (result == null) { - throw new IllegalStateException( - "build() has already been called on this Builder."); - } - akka.cloud.cluster.EventProtocol.AverageAddEvent returnMe = result; - result = null; - return returnMe; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof akka.cloud.cluster.EventProtocol.AverageAddEvent) { - return mergeFrom((akka.cloud.cluster.EventProtocol.AverageAddEvent)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(akka.cloud.cluster.EventProtocol.AverageAddEvent other) { - if (other == akka.cloud.cluster.EventProtocol.AverageAddEvent.getDefaultInstance()) return this; - if (other.hasName()) { - setName(other.getName()); - } - if (other.hasValue()) { - setValue(other.getValue()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - return this; - } - break; - } - case 10: { - setName(input.readString()); - break; - } - case 16: { - setValue(input.readUInt64()); - break; - } - } - } - } - - - // required string name = 1; - public boolean hasName() { - return result.hasName(); - } - public java.lang.String getName() { - return result.getName(); - } - public Builder setName(java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - result.hasName = true; - result.name_ = value; - return this; - } - public Builder clearName() { - result.hasName = false; - result.name_ = getDefaultInstance().getName(); - return this; - } - - // required uint64 value = 2; - public boolean hasValue() { - return result.hasValue(); - } - public long getValue() { - return result.getValue(); - } - public Builder setValue(long value) { - result.hasValue = true; - result.value_ = value; - return this; - } - public Builder clearValue() { - result.hasValue = false; - result.value_ = 0L; - return this; - } - - // @@protoc_insertion_point(builder_scope:AverageAddEvent) - } - - static { - defaultInstance = new AverageAddEvent(true); - akka.cloud.cluster.EventProtocol.internalForceInit(); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:AverageAddEvent) - } - - public static final class AverageResetEvent extends - com.google.protobuf.GeneratedMessage { - // Use AverageResetEvent.newBuilder() to construct. - private AverageResetEvent() { - initFields(); - } - private AverageResetEvent(boolean noInit) {} - - private static final AverageResetEvent defaultInstance; - public static AverageResetEvent getDefaultInstance() { - return defaultInstance; - } - - public AverageResetEvent getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return akka.cloud.cluster.EventProtocol.internal_static_AverageResetEvent_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return akka.cloud.cluster.EventProtocol.internal_static_AverageResetEvent_fieldAccessorTable; - } - - // required string name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private boolean hasName; - private java.lang.String name_ = ""; - public boolean hasName() { return hasName; } - public java.lang.String getName() { return name_; } - - private void initFields() { - } - public final boolean isInitialized() { - if (!hasName) return false; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (hasName()) { - output.writeString(1, getName()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (hasName()) { - size += com.google.protobuf.CodedOutputStream - .computeStringSize(1, getName()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - public static akka.cloud.cluster.EventProtocol.AverageResetEvent parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AverageResetEvent parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AverageResetEvent parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AverageResetEvent parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AverageResetEvent parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AverageResetEvent parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AverageResetEvent parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static akka.cloud.cluster.EventProtocol.AverageResetEvent parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static akka.cloud.cluster.EventProtocol.AverageResetEvent parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static akka.cloud.cluster.EventProtocol.AverageResetEvent parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(akka.cloud.cluster.EventProtocol.AverageResetEvent prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder { - private akka.cloud.cluster.EventProtocol.AverageResetEvent result; - - // Construct using akka.cloud.cluster.EventProtocol.AverageResetEvent.newBuilder() - private Builder() {} - - private static Builder create() { - Builder builder = new Builder(); - builder.result = new akka.cloud.cluster.EventProtocol.AverageResetEvent(); - return builder; - } - - protected akka.cloud.cluster.EventProtocol.AverageResetEvent internalGetResult() { - return result; - } - - public Builder clear() { - if (result == null) { - throw new IllegalStateException( - "Cannot call clear() after build()."); - } - result = new akka.cloud.cluster.EventProtocol.AverageResetEvent(); - return this; - } - - public Builder clone() { - return create().mergeFrom(result); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return akka.cloud.cluster.EventProtocol.AverageResetEvent.getDescriptor(); - } - - public akka.cloud.cluster.EventProtocol.AverageResetEvent getDefaultInstanceForType() { - return akka.cloud.cluster.EventProtocol.AverageResetEvent.getDefaultInstance(); - } - - public boolean isInitialized() { - return result.isInitialized(); - } - public akka.cloud.cluster.EventProtocol.AverageResetEvent build() { - if (result != null && !isInitialized()) { - throw newUninitializedMessageException(result); - } - return buildPartial(); - } - - private akka.cloud.cluster.EventProtocol.AverageResetEvent buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - if (!isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return buildPartial(); - } - - public akka.cloud.cluster.EventProtocol.AverageResetEvent buildPartial() { - if (result == null) { - throw new IllegalStateException( - "build() has already been called on this Builder."); - } - akka.cloud.cluster.EventProtocol.AverageResetEvent returnMe = result; - result = null; - return returnMe; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof akka.cloud.cluster.EventProtocol.AverageResetEvent) { - return mergeFrom((akka.cloud.cluster.EventProtocol.AverageResetEvent)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(akka.cloud.cluster.EventProtocol.AverageResetEvent other) { - if (other == akka.cloud.cluster.EventProtocol.AverageResetEvent.getDefaultInstance()) return this; - if (other.hasName()) { - setName(other.getName()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - return this; - } - break; - } - case 10: { - setName(input.readString()); - break; - } - } - } - } - - - // required string name = 1; - public boolean hasName() { - return result.hasName(); - } - public java.lang.String getName() { - return result.getName(); - } - public Builder setName(java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - result.hasName = true; - result.name_ = value; - return this; - } - public Builder clearName() { - result.hasName = false; - result.name_ = getDefaultInstance().getName(); - return this; - } - - // @@protoc_insertion_point(builder_scope:AverageResetEvent) - } - - static { - defaultInstance = new AverageResetEvent(true); - akka.cloud.cluster.EventProtocol.internalForceInit(); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:AverageResetEvent) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_GenericLoggingEvent_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_GenericLoggingEvent_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_AuditEvent_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_AuditEvent_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CounterAddEvent_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CounterAddEvent_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CounterSetEvent_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CounterSetEvent_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CounterResetEvent_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CounterResetEvent_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_AverageAddEvent_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_AverageAddEvent_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_AverageResetEvent_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_AverageResetEvent_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\023EventProtocol.proto\"E\n\023GenericLoggingE" + - "vent\022\017\n\007context\030\001 \002(\t\022\017\n\007message\030\002 \002(\t\022\014" + - "\n\004time\030\003 \002(\004\"J\n\nAuditEvent\022\014\n\004name\030\001 \002(\t" + - "\022\017\n\007context\030\002 \002(\t\022\017\n\007message\030\003 \002(\t\022\014\n\004ti" + - "me\030\004 \002(\004\".\n\017CounterAddEvent\022\014\n\004name\030\001 \002(" + - "\t\022\r\n\005delta\030\002 \002(\004\".\n\017CounterSetEvent\022\014\n\004n" + - "ame\030\001 \002(\t\022\r\n\005value\030\002 \002(\r\"!\n\021CounterReset" + - "Event\022\014\n\004name\030\001 \002(\t\".\n\017AverageAddEvent\022\014" + - "\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \002(\004\"!\n\021AverageRe" + - "setEvent\022\014\n\004name\030\001 \002(\tB\026\n\022akka.cloud.clu", - "sterH\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_GenericLoggingEvent_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_GenericLoggingEvent_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_GenericLoggingEvent_descriptor, - new java.lang.String[] { "Context", "Message", "Time", }, - akka.cloud.cluster.EventProtocol.GenericLoggingEvent.class, - akka.cloud.cluster.EventProtocol.GenericLoggingEvent.Builder.class); - internal_static_AuditEvent_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_AuditEvent_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_AuditEvent_descriptor, - new java.lang.String[] { "Name", "Context", "Message", "Time", }, - akka.cloud.cluster.EventProtocol.AuditEvent.class, - akka.cloud.cluster.EventProtocol.AuditEvent.Builder.class); - internal_static_CounterAddEvent_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_CounterAddEvent_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CounterAddEvent_descriptor, - new java.lang.String[] { "Name", "Delta", }, - akka.cloud.cluster.EventProtocol.CounterAddEvent.class, - akka.cloud.cluster.EventProtocol.CounterAddEvent.Builder.class); - internal_static_CounterSetEvent_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_CounterSetEvent_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CounterSetEvent_descriptor, - new java.lang.String[] { "Name", "Value", }, - akka.cloud.cluster.EventProtocol.CounterSetEvent.class, - akka.cloud.cluster.EventProtocol.CounterSetEvent.Builder.class); - internal_static_CounterResetEvent_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_CounterResetEvent_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CounterResetEvent_descriptor, - new java.lang.String[] { "Name", }, - akka.cloud.cluster.EventProtocol.CounterResetEvent.class, - akka.cloud.cluster.EventProtocol.CounterResetEvent.Builder.class); - internal_static_AverageAddEvent_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_AverageAddEvent_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_AverageAddEvent_descriptor, - new java.lang.String[] { "Name", "Value", }, - akka.cloud.cluster.EventProtocol.AverageAddEvent.class, - akka.cloud.cluster.EventProtocol.AverageAddEvent.Builder.class); - internal_static_AverageResetEvent_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_AverageResetEvent_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_AverageResetEvent_descriptor, - new java.lang.String[] { "Name", }, - akka.cloud.cluster.EventProtocol.AverageResetEvent.class, - akka.cloud.cluster.EventProtocol.AverageResetEvent.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - } - - public static void internalForceInit() {} - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/akka-cluster/src/main/java/akka/cloud/cluster/ClusterProtocol.java b/akka-cluster/src/main/java/akka/cluster/ClusterProtocol.java similarity index 69% rename from akka-cluster/src/main/java/akka/cloud/cluster/ClusterProtocol.java rename to akka-cluster/src/main/java/akka/cluster/ClusterProtocol.java index 7fdc8510b2..ff4ab448ed 100644 --- a/akka-cluster/src/main/java/akka/cloud/cluster/ClusterProtocol.java +++ b/akka-cluster/src/main/java/akka/cluster/ClusterProtocol.java @@ -1,7 +1,7 @@ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: ClusterProtocol.proto -package akka.cloud.cluster; +package akka.cluster; public final class ClusterProtocol { private ClusterProtocol() {} @@ -71,7 +71,7 @@ public final class ClusterProtocol { } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { - return akka.cloud.cluster.ClusterProtocol.getDescriptor().getEnumTypes().get(0); + return akka.cluster.ClusterProtocol.getDescriptor().getEnumTypes().get(0); } private static final RemoteDaemonMessageType[] VALUES = { @@ -93,7 +93,7 @@ public final class ClusterProtocol { } static { - akka.cloud.cluster.ClusterProtocol.getDescriptor(); + akka.cluster.ClusterProtocol.getDescriptor(); } // @@protoc_insertion_point(enum_scope:RemoteDaemonMessageType) @@ -118,34 +118,34 @@ public final class ClusterProtocol { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return akka.cloud.cluster.ClusterProtocol.internal_static_RemoteDaemonMessageProtocol_descriptor; + return akka.cluster.ClusterProtocol.internal_static_RemoteDaemonMessageProtocol_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return akka.cloud.cluster.ClusterProtocol.internal_static_RemoteDaemonMessageProtocol_fieldAccessorTable; + return akka.cluster.ClusterProtocol.internal_static_RemoteDaemonMessageProtocol_fieldAccessorTable; } // required .RemoteDaemonMessageType messageType = 1; public static final int MESSAGETYPE_FIELD_NUMBER = 1; private boolean hasMessageType; - private akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageType messageType_; + private akka.cluster.ClusterProtocol.RemoteDaemonMessageType messageType_; public boolean hasMessageType() { return hasMessageType; } - public akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageType getMessageType() { return messageType_; } + public akka.cluster.ClusterProtocol.RemoteDaemonMessageType getMessageType() { return messageType_; } // optional .UuidProtocol actorUuid = 2; public static final int ACTORUUID_FIELD_NUMBER = 2; private boolean hasActorUuid; - private akka.cloud.cluster.ClusterProtocol.UuidProtocol actorUuid_; + private akka.cluster.ClusterProtocol.UuidProtocol actorUuid_; public boolean hasActorUuid() { return hasActorUuid; } - public akka.cloud.cluster.ClusterProtocol.UuidProtocol getActorUuid() { return actorUuid_; } + public akka.cluster.ClusterProtocol.UuidProtocol getActorUuid() { return actorUuid_; } - // optional string actorId = 3; - public static final int ACTORID_FIELD_NUMBER = 3; - private boolean hasActorId; - private java.lang.String actorId_ = ""; - public boolean hasActorId() { return hasActorId; } - public java.lang.String getActorId() { return actorId_; } + // optional string actorAddress = 3; + public static final int ACTORADDRESS_FIELD_NUMBER = 3; + private boolean hasActorAddress; + private java.lang.String actorAddress_ = ""; + public boolean hasActorAddress() { return hasActorAddress; } + public java.lang.String getActorAddress() { return actorAddress_; } // optional string actorClassName = 4; public static final int ACTORCLASSNAME_FIELD_NUMBER = 4; @@ -162,8 +162,8 @@ public final class ClusterProtocol { public com.google.protobuf.ByteString getPayload() { return payload_; } private void initFields() { - messageType_ = akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageType.START; - actorUuid_ = akka.cloud.cluster.ClusterProtocol.UuidProtocol.getDefaultInstance(); + messageType_ = akka.cluster.ClusterProtocol.RemoteDaemonMessageType.START; + actorUuid_ = akka.cluster.ClusterProtocol.UuidProtocol.getDefaultInstance(); } public final boolean isInitialized() { if (!hasMessageType) return false; @@ -182,8 +182,8 @@ public final class ClusterProtocol { if (hasActorUuid()) { output.writeMessage(2, getActorUuid()); } - if (hasActorId()) { - output.writeString(3, getActorId()); + if (hasActorAddress()) { + output.writeString(3, getActorAddress()); } if (hasActorClassName()) { output.writeString(4, getActorClassName()); @@ -208,9 +208,9 @@ public final class ClusterProtocol { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getActorUuid()); } - if (hasActorId()) { + if (hasActorAddress()) { size += com.google.protobuf.CodedOutputStream - .computeStringSize(3, getActorId()); + .computeStringSize(3, getActorAddress()); } if (hasActorClassName()) { size += com.google.protobuf.CodedOutputStream @@ -225,41 +225,41 @@ public final class ClusterProtocol { return size; } - public static akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol parseFrom( + public static akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol parseFrom( + public static akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol parseFrom(byte[] data) + public static akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol parseFrom( + public static akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol parseFrom(java.io.InputStream input) + public static akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol parseFrom( + public static akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol parseDelimitedFrom(java.io.InputStream input) + public static akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -268,7 +268,7 @@ public final class ClusterProtocol { return null; } } - public static akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol parseDelimitedFrom( + public static akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -279,12 +279,12 @@ public final class ClusterProtocol { return null; } } - public static akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol parseFrom( + public static akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol parseFrom( + public static akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -294,25 +294,25 @@ public final class ClusterProtocol { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol prototype) { + public static Builder newBuilder(akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder { - private akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol result; + private akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol result; - // Construct using akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol.newBuilder() + // Construct using akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol.newBuilder() private Builder() {} private static Builder create() { Builder builder = new Builder(); - builder.result = new akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol(); + builder.result = new akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol(); return builder; } - protected akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol internalGetResult() { + protected akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol internalGetResult() { return result; } @@ -321,7 +321,7 @@ public final class ClusterProtocol { throw new IllegalStateException( "Cannot call clear() after build()."); } - result = new akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol(); + result = new akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol(); return this; } @@ -331,24 +331,24 @@ public final class ClusterProtocol { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol.getDescriptor(); + return akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol.getDescriptor(); } - public akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol getDefaultInstanceForType() { - return akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol.getDefaultInstance(); + public akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol getDefaultInstanceForType() { + return akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol.getDefaultInstance(); } public boolean isInitialized() { return result.isInitialized(); } - public akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol build() { + public akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol build() { if (result != null && !isInitialized()) { throw newUninitializedMessageException(result); } return buildPartial(); } - private akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol buildParsed() + private akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { if (!isInitialized()) { throw newUninitializedMessageException( @@ -357,35 +357,35 @@ public final class ClusterProtocol { return buildPartial(); } - public akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol buildPartial() { + public akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol buildPartial() { if (result == null) { throw new IllegalStateException( "build() has already been called on this Builder."); } - akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol returnMe = result; + akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol returnMe = result; result = null; return returnMe; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol) { - return mergeFrom((akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol)other); + if (other instanceof akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol) { + return mergeFrom((akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol other) { - if (other == akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol.getDefaultInstance()) return this; + public Builder mergeFrom(akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol other) { + if (other == akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol.getDefaultInstance()) return this; if (other.hasMessageType()) { setMessageType(other.getMessageType()); } if (other.hasActorUuid()) { mergeActorUuid(other.getActorUuid()); } - if (other.hasActorId()) { - setActorId(other.getActorId()); + if (other.hasActorAddress()) { + setActorAddress(other.getActorAddress()); } if (other.hasActorClassName()) { setActorClassName(other.getActorClassName()); @@ -420,7 +420,7 @@ public final class ClusterProtocol { } case 8: { int rawValue = input.readEnum(); - akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageType value = akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageType.valueOf(rawValue); + akka.cluster.ClusterProtocol.RemoteDaemonMessageType value = akka.cluster.ClusterProtocol.RemoteDaemonMessageType.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { @@ -429,7 +429,7 @@ public final class ClusterProtocol { break; } case 18: { - akka.cloud.cluster.ClusterProtocol.UuidProtocol.Builder subBuilder = akka.cloud.cluster.ClusterProtocol.UuidProtocol.newBuilder(); + akka.cluster.ClusterProtocol.UuidProtocol.Builder subBuilder = akka.cluster.ClusterProtocol.UuidProtocol.newBuilder(); if (hasActorUuid()) { subBuilder.mergeFrom(getActorUuid()); } @@ -438,7 +438,7 @@ public final class ClusterProtocol { break; } case 26: { - setActorId(input.readString()); + setActorAddress(input.readString()); break; } case 34: { @@ -458,10 +458,10 @@ public final class ClusterProtocol { public boolean hasMessageType() { return result.hasMessageType(); } - public akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageType getMessageType() { + public akka.cluster.ClusterProtocol.RemoteDaemonMessageType getMessageType() { return result.getMessageType(); } - public Builder setMessageType(akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageType value) { + public Builder setMessageType(akka.cluster.ClusterProtocol.RemoteDaemonMessageType value) { if (value == null) { throw new NullPointerException(); } @@ -471,7 +471,7 @@ public final class ClusterProtocol { } public Builder clearMessageType() { result.hasMessageType = false; - result.messageType_ = akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageType.START; + result.messageType_ = akka.cluster.ClusterProtocol.RemoteDaemonMessageType.START; return this; } @@ -479,10 +479,10 @@ public final class ClusterProtocol { public boolean hasActorUuid() { return result.hasActorUuid(); } - public akka.cloud.cluster.ClusterProtocol.UuidProtocol getActorUuid() { + public akka.cluster.ClusterProtocol.UuidProtocol getActorUuid() { return result.getActorUuid(); } - public Builder setActorUuid(akka.cloud.cluster.ClusterProtocol.UuidProtocol value) { + public Builder setActorUuid(akka.cluster.ClusterProtocol.UuidProtocol value) { if (value == null) { throw new NullPointerException(); } @@ -490,16 +490,16 @@ public final class ClusterProtocol { result.actorUuid_ = value; return this; } - public Builder setActorUuid(akka.cloud.cluster.ClusterProtocol.UuidProtocol.Builder builderForValue) { + public Builder setActorUuid(akka.cluster.ClusterProtocol.UuidProtocol.Builder builderForValue) { result.hasActorUuid = true; result.actorUuid_ = builderForValue.build(); return this; } - public Builder mergeActorUuid(akka.cloud.cluster.ClusterProtocol.UuidProtocol value) { + public Builder mergeActorUuid(akka.cluster.ClusterProtocol.UuidProtocol value) { if (result.hasActorUuid() && - result.actorUuid_ != akka.cloud.cluster.ClusterProtocol.UuidProtocol.getDefaultInstance()) { + result.actorUuid_ != akka.cluster.ClusterProtocol.UuidProtocol.getDefaultInstance()) { result.actorUuid_ = - akka.cloud.cluster.ClusterProtocol.UuidProtocol.newBuilder(result.actorUuid_).mergeFrom(value).buildPartial(); + akka.cluster.ClusterProtocol.UuidProtocol.newBuilder(result.actorUuid_).mergeFrom(value).buildPartial(); } else { result.actorUuid_ = value; } @@ -508,28 +508,28 @@ public final class ClusterProtocol { } public Builder clearActorUuid() { result.hasActorUuid = false; - result.actorUuid_ = akka.cloud.cluster.ClusterProtocol.UuidProtocol.getDefaultInstance(); + result.actorUuid_ = akka.cluster.ClusterProtocol.UuidProtocol.getDefaultInstance(); return this; } - // optional string actorId = 3; - public boolean hasActorId() { - return result.hasActorId(); + // optional string actorAddress = 3; + public boolean hasActorAddress() { + return result.hasActorAddress(); } - public java.lang.String getActorId() { - return result.getActorId(); + public java.lang.String getActorAddress() { + return result.getActorAddress(); } - public Builder setActorId(java.lang.String value) { + public Builder setActorAddress(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - result.hasActorId = true; - result.actorId_ = value; + result.hasActorAddress = true; + result.actorAddress_ = value; return this; } - public Builder clearActorId() { - result.hasActorId = false; - result.actorId_ = getDefaultInstance().getActorId(); + public Builder clearActorAddress() { + result.hasActorAddress = false; + result.actorAddress_ = getDefaultInstance().getActorAddress(); return this; } @@ -580,7 +580,7 @@ public final class ClusterProtocol { static { defaultInstance = new RemoteDaemonMessageProtocol(true); - akka.cloud.cluster.ClusterProtocol.internalForceInit(); + akka.cluster.ClusterProtocol.internalForceInit(); defaultInstance.initFields(); } @@ -606,34 +606,34 @@ public final class ClusterProtocol { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return akka.cloud.cluster.ClusterProtocol.internal_static_DurableMailboxMessageProtocol_descriptor; + return akka.cluster.ClusterProtocol.internal_static_DurableMailboxMessageProtocol_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return akka.cloud.cluster.ClusterProtocol.internal_static_DurableMailboxMessageProtocol_fieldAccessorTable; + return akka.cluster.ClusterProtocol.internal_static_DurableMailboxMessageProtocol_fieldAccessorTable; } - // required string ownerActorId = 1; - public static final int OWNERACTORID_FIELD_NUMBER = 1; - private boolean hasOwnerActorId; - private java.lang.String ownerActorId_ = ""; - public boolean hasOwnerActorId() { return hasOwnerActorId; } - public java.lang.String getOwnerActorId() { return ownerActorId_; } + // required string ownerActorAddress = 1; + public static final int OWNERACTORADDRESS_FIELD_NUMBER = 1; + private boolean hasOwnerActorAddress; + private java.lang.String ownerActorAddress_ = ""; + public boolean hasOwnerActorAddress() { return hasOwnerActorAddress; } + public java.lang.String getOwnerActorAddress() { return ownerActorAddress_; } - // optional string senderActorId = 2; - public static final int SENDERACTORID_FIELD_NUMBER = 2; - private boolean hasSenderActorId; - private java.lang.String senderActorId_ = ""; - public boolean hasSenderActorId() { return hasSenderActorId; } - public java.lang.String getSenderActorId() { return senderActorId_; } + // optional string senderActorAddress = 2; + public static final int SENDERACTORADDRESS_FIELD_NUMBER = 2; + private boolean hasSenderActorAddress; + private java.lang.String senderActorAddress_ = ""; + public boolean hasSenderActorAddress() { return hasSenderActorAddress; } + public java.lang.String getSenderActorAddress() { return senderActorAddress_; } // optional .UuidProtocol futureUuid = 3; public static final int FUTUREUUID_FIELD_NUMBER = 3; private boolean hasFutureUuid; - private akka.cloud.cluster.ClusterProtocol.UuidProtocol futureUuid_; + private akka.cluster.ClusterProtocol.UuidProtocol futureUuid_; public boolean hasFutureUuid() { return hasFutureUuid; } - public akka.cloud.cluster.ClusterProtocol.UuidProtocol getFutureUuid() { return futureUuid_; } + public akka.cluster.ClusterProtocol.UuidProtocol getFutureUuid() { return futureUuid_; } // required bytes message = 4; public static final int MESSAGE_FIELD_NUMBER = 4; @@ -643,10 +643,10 @@ public final class ClusterProtocol { public com.google.protobuf.ByteString getMessage() { return message_; } private void initFields() { - futureUuid_ = akka.cloud.cluster.ClusterProtocol.UuidProtocol.getDefaultInstance(); + futureUuid_ = akka.cluster.ClusterProtocol.UuidProtocol.getDefaultInstance(); } public final boolean isInitialized() { - if (!hasOwnerActorId) return false; + if (!hasOwnerActorAddress) return false; if (!hasMessage) return false; if (hasFutureUuid()) { if (!getFutureUuid().isInitialized()) return false; @@ -657,11 +657,11 @@ public final class ClusterProtocol { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); - if (hasOwnerActorId()) { - output.writeString(1, getOwnerActorId()); + if (hasOwnerActorAddress()) { + output.writeString(1, getOwnerActorAddress()); } - if (hasSenderActorId()) { - output.writeString(2, getSenderActorId()); + if (hasSenderActorAddress()) { + output.writeString(2, getSenderActorAddress()); } if (hasFutureUuid()) { output.writeMessage(3, getFutureUuid()); @@ -678,13 +678,13 @@ public final class ClusterProtocol { if (size != -1) return size; size = 0; - if (hasOwnerActorId()) { + if (hasOwnerActorAddress()) { size += com.google.protobuf.CodedOutputStream - .computeStringSize(1, getOwnerActorId()); + .computeStringSize(1, getOwnerActorAddress()); } - if (hasSenderActorId()) { + if (hasSenderActorAddress()) { size += com.google.protobuf.CodedOutputStream - .computeStringSize(2, getSenderActorId()); + .computeStringSize(2, getSenderActorAddress()); } if (hasFutureUuid()) { size += com.google.protobuf.CodedOutputStream @@ -699,41 +699,41 @@ public final class ClusterProtocol { return size; } - public static akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol parseFrom( + public static akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol parseFrom( + public static akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol parseFrom(byte[] data) + public static akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol parseFrom( + public static akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol parseFrom(java.io.InputStream input) + public static akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol parseFrom( + public static akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol parseDelimitedFrom(java.io.InputStream input) + public static akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -742,7 +742,7 @@ public final class ClusterProtocol { return null; } } - public static akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol parseDelimitedFrom( + public static akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -753,12 +753,12 @@ public final class ClusterProtocol { return null; } } - public static akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol parseFrom( + public static akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol parseFrom( + public static akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -768,25 +768,25 @@ public final class ClusterProtocol { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol prototype) { + public static Builder newBuilder(akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder { - private akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol result; + private akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol result; - // Construct using akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol.newBuilder() + // Construct using akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol.newBuilder() private Builder() {} private static Builder create() { Builder builder = new Builder(); - builder.result = new akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol(); + builder.result = new akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol(); return builder; } - protected akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol internalGetResult() { + protected akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol internalGetResult() { return result; } @@ -795,7 +795,7 @@ public final class ClusterProtocol { throw new IllegalStateException( "Cannot call clear() after build()."); } - result = new akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol(); + result = new akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol(); return this; } @@ -805,24 +805,24 @@ public final class ClusterProtocol { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol.getDescriptor(); + return akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol.getDescriptor(); } - public akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol getDefaultInstanceForType() { - return akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol.getDefaultInstance(); + public akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol getDefaultInstanceForType() { + return akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol.getDefaultInstance(); } public boolean isInitialized() { return result.isInitialized(); } - public akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol build() { + public akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol build() { if (result != null && !isInitialized()) { throw newUninitializedMessageException(result); } return buildPartial(); } - private akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol buildParsed() + private akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { if (!isInitialized()) { throw newUninitializedMessageException( @@ -831,32 +831,32 @@ public final class ClusterProtocol { return buildPartial(); } - public akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol buildPartial() { + public akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol buildPartial() { if (result == null) { throw new IllegalStateException( "build() has already been called on this Builder."); } - akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol returnMe = result; + akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol returnMe = result; result = null; return returnMe; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol) { - return mergeFrom((akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol)other); + if (other instanceof akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol) { + return mergeFrom((akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol other) { - if (other == akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol.getDefaultInstance()) return this; - if (other.hasOwnerActorId()) { - setOwnerActorId(other.getOwnerActorId()); + public Builder mergeFrom(akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol other) { + if (other == akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol.getDefaultInstance()) return this; + if (other.hasOwnerActorAddress()) { + setOwnerActorAddress(other.getOwnerActorAddress()); } - if (other.hasSenderActorId()) { - setSenderActorId(other.getSenderActorId()); + if (other.hasSenderActorAddress()) { + setSenderActorAddress(other.getSenderActorAddress()); } if (other.hasFutureUuid()) { mergeFutureUuid(other.getFutureUuid()); @@ -890,15 +890,15 @@ public final class ClusterProtocol { break; } case 10: { - setOwnerActorId(input.readString()); + setOwnerActorAddress(input.readString()); break; } case 18: { - setSenderActorId(input.readString()); + setSenderActorAddress(input.readString()); break; } case 26: { - akka.cloud.cluster.ClusterProtocol.UuidProtocol.Builder subBuilder = akka.cloud.cluster.ClusterProtocol.UuidProtocol.newBuilder(); + akka.cluster.ClusterProtocol.UuidProtocol.Builder subBuilder = akka.cluster.ClusterProtocol.UuidProtocol.newBuilder(); if (hasFutureUuid()) { subBuilder.mergeFrom(getFutureUuid()); } @@ -915,45 +915,45 @@ public final class ClusterProtocol { } - // required string ownerActorId = 1; - public boolean hasOwnerActorId() { - return result.hasOwnerActorId(); + // required string ownerActorAddress = 1; + public boolean hasOwnerActorAddress() { + return result.hasOwnerActorAddress(); } - public java.lang.String getOwnerActorId() { - return result.getOwnerActorId(); + public java.lang.String getOwnerActorAddress() { + return result.getOwnerActorAddress(); } - public Builder setOwnerActorId(java.lang.String value) { + public Builder setOwnerActorAddress(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - result.hasOwnerActorId = true; - result.ownerActorId_ = value; + result.hasOwnerActorAddress = true; + result.ownerActorAddress_ = value; return this; } - public Builder clearOwnerActorId() { - result.hasOwnerActorId = false; - result.ownerActorId_ = getDefaultInstance().getOwnerActorId(); + public Builder clearOwnerActorAddress() { + result.hasOwnerActorAddress = false; + result.ownerActorAddress_ = getDefaultInstance().getOwnerActorAddress(); return this; } - // optional string senderActorId = 2; - public boolean hasSenderActorId() { - return result.hasSenderActorId(); + // optional string senderActorAddress = 2; + public boolean hasSenderActorAddress() { + return result.hasSenderActorAddress(); } - public java.lang.String getSenderActorId() { - return result.getSenderActorId(); + public java.lang.String getSenderActorAddress() { + return result.getSenderActorAddress(); } - public Builder setSenderActorId(java.lang.String value) { + public Builder setSenderActorAddress(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - result.hasSenderActorId = true; - result.senderActorId_ = value; + result.hasSenderActorAddress = true; + result.senderActorAddress_ = value; return this; } - public Builder clearSenderActorId() { - result.hasSenderActorId = false; - result.senderActorId_ = getDefaultInstance().getSenderActorId(); + public Builder clearSenderActorAddress() { + result.hasSenderActorAddress = false; + result.senderActorAddress_ = getDefaultInstance().getSenderActorAddress(); return this; } @@ -961,10 +961,10 @@ public final class ClusterProtocol { public boolean hasFutureUuid() { return result.hasFutureUuid(); } - public akka.cloud.cluster.ClusterProtocol.UuidProtocol getFutureUuid() { + public akka.cluster.ClusterProtocol.UuidProtocol getFutureUuid() { return result.getFutureUuid(); } - public Builder setFutureUuid(akka.cloud.cluster.ClusterProtocol.UuidProtocol value) { + public Builder setFutureUuid(akka.cluster.ClusterProtocol.UuidProtocol value) { if (value == null) { throw new NullPointerException(); } @@ -972,16 +972,16 @@ public final class ClusterProtocol { result.futureUuid_ = value; return this; } - public Builder setFutureUuid(akka.cloud.cluster.ClusterProtocol.UuidProtocol.Builder builderForValue) { + public Builder setFutureUuid(akka.cluster.ClusterProtocol.UuidProtocol.Builder builderForValue) { result.hasFutureUuid = true; result.futureUuid_ = builderForValue.build(); return this; } - public Builder mergeFutureUuid(akka.cloud.cluster.ClusterProtocol.UuidProtocol value) { + public Builder mergeFutureUuid(akka.cluster.ClusterProtocol.UuidProtocol value) { if (result.hasFutureUuid() && - result.futureUuid_ != akka.cloud.cluster.ClusterProtocol.UuidProtocol.getDefaultInstance()) { + result.futureUuid_ != akka.cluster.ClusterProtocol.UuidProtocol.getDefaultInstance()) { result.futureUuid_ = - akka.cloud.cluster.ClusterProtocol.UuidProtocol.newBuilder(result.futureUuid_).mergeFrom(value).buildPartial(); + akka.cluster.ClusterProtocol.UuidProtocol.newBuilder(result.futureUuid_).mergeFrom(value).buildPartial(); } else { result.futureUuid_ = value; } @@ -990,7 +990,7 @@ public final class ClusterProtocol { } public Builder clearFutureUuid() { result.hasFutureUuid = false; - result.futureUuid_ = akka.cloud.cluster.ClusterProtocol.UuidProtocol.getDefaultInstance(); + result.futureUuid_ = akka.cluster.ClusterProtocol.UuidProtocol.getDefaultInstance(); return this; } @@ -1020,7 +1020,7 @@ public final class ClusterProtocol { static { defaultInstance = new DurableMailboxMessageProtocol(true); - akka.cloud.cluster.ClusterProtocol.internalForceInit(); + akka.cluster.ClusterProtocol.internalForceInit(); defaultInstance.initFields(); } @@ -1046,12 +1046,12 @@ public final class ClusterProtocol { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return akka.cloud.cluster.ClusterProtocol.internal_static_UuidProtocol_descriptor; + return akka.cluster.ClusterProtocol.internal_static_UuidProtocol_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return akka.cloud.cluster.ClusterProtocol.internal_static_UuidProtocol_fieldAccessorTable; + return akka.cluster.ClusterProtocol.internal_static_UuidProtocol_fieldAccessorTable; } // required uint64 high = 1; @@ -1107,41 +1107,41 @@ public final class ClusterProtocol { return size; } - public static akka.cloud.cluster.ClusterProtocol.UuidProtocol parseFrom( + public static akka.cluster.ClusterProtocol.UuidProtocol parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.UuidProtocol parseFrom( + public static akka.cluster.ClusterProtocol.UuidProtocol parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.UuidProtocol parseFrom(byte[] data) + public static akka.cluster.ClusterProtocol.UuidProtocol parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.UuidProtocol parseFrom( + public static akka.cluster.ClusterProtocol.UuidProtocol parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.UuidProtocol parseFrom(java.io.InputStream input) + public static akka.cluster.ClusterProtocol.UuidProtocol parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.UuidProtocol parseFrom( + public static akka.cluster.ClusterProtocol.UuidProtocol parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.UuidProtocol parseDelimitedFrom(java.io.InputStream input) + public static akka.cluster.ClusterProtocol.UuidProtocol parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -1150,7 +1150,7 @@ public final class ClusterProtocol { return null; } } - public static akka.cloud.cluster.ClusterProtocol.UuidProtocol parseDelimitedFrom( + public static akka.cluster.ClusterProtocol.UuidProtocol parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -1161,12 +1161,12 @@ public final class ClusterProtocol { return null; } } - public static akka.cloud.cluster.ClusterProtocol.UuidProtocol parseFrom( + public static akka.cluster.ClusterProtocol.UuidProtocol parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static akka.cloud.cluster.ClusterProtocol.UuidProtocol parseFrom( + public static akka.cluster.ClusterProtocol.UuidProtocol parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -1176,25 +1176,25 @@ public final class ClusterProtocol { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(akka.cloud.cluster.ClusterProtocol.UuidProtocol prototype) { + public static Builder newBuilder(akka.cluster.ClusterProtocol.UuidProtocol prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder { - private akka.cloud.cluster.ClusterProtocol.UuidProtocol result; + private akka.cluster.ClusterProtocol.UuidProtocol result; - // Construct using akka.cloud.cluster.ClusterProtocol.UuidProtocol.newBuilder() + // Construct using akka.cluster.ClusterProtocol.UuidProtocol.newBuilder() private Builder() {} private static Builder create() { Builder builder = new Builder(); - builder.result = new akka.cloud.cluster.ClusterProtocol.UuidProtocol(); + builder.result = new akka.cluster.ClusterProtocol.UuidProtocol(); return builder; } - protected akka.cloud.cluster.ClusterProtocol.UuidProtocol internalGetResult() { + protected akka.cluster.ClusterProtocol.UuidProtocol internalGetResult() { return result; } @@ -1203,7 +1203,7 @@ public final class ClusterProtocol { throw new IllegalStateException( "Cannot call clear() after build()."); } - result = new akka.cloud.cluster.ClusterProtocol.UuidProtocol(); + result = new akka.cluster.ClusterProtocol.UuidProtocol(); return this; } @@ -1213,24 +1213,24 @@ public final class ClusterProtocol { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return akka.cloud.cluster.ClusterProtocol.UuidProtocol.getDescriptor(); + return akka.cluster.ClusterProtocol.UuidProtocol.getDescriptor(); } - public akka.cloud.cluster.ClusterProtocol.UuidProtocol getDefaultInstanceForType() { - return akka.cloud.cluster.ClusterProtocol.UuidProtocol.getDefaultInstance(); + public akka.cluster.ClusterProtocol.UuidProtocol getDefaultInstanceForType() { + return akka.cluster.ClusterProtocol.UuidProtocol.getDefaultInstance(); } public boolean isInitialized() { return result.isInitialized(); } - public akka.cloud.cluster.ClusterProtocol.UuidProtocol build() { + public akka.cluster.ClusterProtocol.UuidProtocol build() { if (result != null && !isInitialized()) { throw newUninitializedMessageException(result); } return buildPartial(); } - private akka.cloud.cluster.ClusterProtocol.UuidProtocol buildParsed() + private akka.cluster.ClusterProtocol.UuidProtocol buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { if (!isInitialized()) { throw newUninitializedMessageException( @@ -1239,27 +1239,27 @@ public final class ClusterProtocol { return buildPartial(); } - public akka.cloud.cluster.ClusterProtocol.UuidProtocol buildPartial() { + public akka.cluster.ClusterProtocol.UuidProtocol buildPartial() { if (result == null) { throw new IllegalStateException( "build() has already been called on this Builder."); } - akka.cloud.cluster.ClusterProtocol.UuidProtocol returnMe = result; + akka.cluster.ClusterProtocol.UuidProtocol returnMe = result; result = null; return returnMe; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof akka.cloud.cluster.ClusterProtocol.UuidProtocol) { - return mergeFrom((akka.cloud.cluster.ClusterProtocol.UuidProtocol)other); + if (other instanceof akka.cluster.ClusterProtocol.UuidProtocol) { + return mergeFrom((akka.cluster.ClusterProtocol.UuidProtocol)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(akka.cloud.cluster.ClusterProtocol.UuidProtocol other) { - if (other == akka.cloud.cluster.ClusterProtocol.UuidProtocol.getDefaultInstance()) return this; + public Builder mergeFrom(akka.cluster.ClusterProtocol.UuidProtocol other) { + if (other == akka.cluster.ClusterProtocol.UuidProtocol.getDefaultInstance()) return this; if (other.hasHigh()) { setHigh(other.getHigh()); } @@ -1345,7 +1345,7 @@ public final class ClusterProtocol { static { defaultInstance = new UuidProtocol(true); - akka.cloud.cluster.ClusterProtocol.internalForceInit(); + akka.cluster.ClusterProtocol.internalForceInit(); defaultInstance.initFields(); } @@ -1376,23 +1376,24 @@ public final class ClusterProtocol { descriptor; static { java.lang.String[] descriptorData = { - "\n\025ClusterProtocol.proto\"\250\001\n\033RemoteDaemon" + + "\n\025ClusterProtocol.proto\"\255\001\n\033RemoteDaemon" + "MessageProtocol\022-\n\013messageType\030\001 \002(\0162\030.R" + "emoteDaemonMessageType\022 \n\tactorUuid\030\002 \001(" + - "\0132\r.UuidProtocol\022\017\n\007actorId\030\003 \001(\t\022\026\n\016act" + - "orClassName\030\004 \001(\t\022\017\n\007payload\030\005 \001(\014\"\200\001\n\035D" + - "urableMailboxMessageProtocol\022\024\n\014ownerAct" + - "orId\030\001 \002(\t\022\025\n\rsenderActorId\030\002 \001(\t\022!\n\nfut" + - "ureUuid\030\003 \001(\0132\r.UuidProtocol\022\017\n\007message\030" + - "\004 \002(\014\")\n\014UuidProtocol\022\014\n\004high\030\001 \002(\004\022\013\n\003l" + - "ow\030\002 \002(\004*\232\002\n\027RemoteDaemonMessageType\022\t\n\005", - "START\020\001\022\010\n\004STOP\020\002\022\007\n\003USE\020\003\022\013\n\007RELEASE\020\004\022" + - "\022\n\016MAKE_AVAILABLE\020\005\022\024\n\020MAKE_UNAVAILABLE\020" + - "\006\022\016\n\nDISCONNECT\020\007\022\r\n\tRECONNECT\020\010\022\n\n\006RESI" + - "GN\020\t\022\031\n\025FAIL_OVER_CONNECTIONS\020\n\022\026\n\022FUNCT" + - "ION_FUN0_UNIT\020\013\022\025\n\021FUNCTION_FUN0_ANY\020\014\022\032" + - "\n\026FUNCTION_FUN1_ARG_UNIT\020\r\022\031\n\025FUNCTION_F" + - "UN1_ARG_ANY\020\016B\026\n\022akka.cloud.clusterH\001" + "\0132\r.UuidProtocol\022\024\n\014actorAddress\030\003 \001(\t\022\026" + + "\n\016actorClassName\030\004 \001(\t\022\017\n\007payload\030\005 \001(\014\"" + + "\212\001\n\035DurableMailboxMessageProtocol\022\031\n\021own" + + "erActorAddress\030\001 \002(\t\022\032\n\022senderActorAddre" + + "ss\030\002 \001(\t\022!\n\nfutureUuid\030\003 \001(\0132\r.UuidProto" + + "col\022\017\n\007message\030\004 \002(\014\")\n\014UuidProtocol\022\014\n\004" + + "high\030\001 \002(\004\022\013\n\003low\030\002 \002(\004*\232\002\n\027RemoteDaemon", + "MessageType\022\t\n\005START\020\001\022\010\n\004STOP\020\002\022\007\n\003USE\020" + + "\003\022\013\n\007RELEASE\020\004\022\022\n\016MAKE_AVAILABLE\020\005\022\024\n\020MA" + + "KE_UNAVAILABLE\020\006\022\016\n\nDISCONNECT\020\007\022\r\n\tRECO" + + "NNECT\020\010\022\n\n\006RESIGN\020\t\022\031\n\025FAIL_OVER_CONNECT" + + "IONS\020\n\022\026\n\022FUNCTION_FUN0_UNIT\020\013\022\025\n\021FUNCTI" + + "ON_FUN0_ANY\020\014\022\032\n\026FUNCTION_FUN1_ARG_UNIT\020" + + "\r\022\031\n\025FUNCTION_FUN1_ARG_ANY\020\016B\020\n\014akka.clu" + + "sterH\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -1404,25 +1405,25 @@ public final class ClusterProtocol { internal_static_RemoteDaemonMessageProtocol_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RemoteDaemonMessageProtocol_descriptor, - new java.lang.String[] { "MessageType", "ActorUuid", "ActorId", "ActorClassName", "Payload", }, - akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol.class, - akka.cloud.cluster.ClusterProtocol.RemoteDaemonMessageProtocol.Builder.class); + new java.lang.String[] { "MessageType", "ActorUuid", "ActorAddress", "ActorClassName", "Payload", }, + akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol.class, + akka.cluster.ClusterProtocol.RemoteDaemonMessageProtocol.Builder.class); internal_static_DurableMailboxMessageProtocol_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_DurableMailboxMessageProtocol_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_DurableMailboxMessageProtocol_descriptor, - new java.lang.String[] { "OwnerActorId", "SenderActorId", "FutureUuid", "Message", }, - akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol.class, - akka.cloud.cluster.ClusterProtocol.DurableMailboxMessageProtocol.Builder.class); + new java.lang.String[] { "OwnerActorAddress", "SenderActorAddress", "FutureUuid", "Message", }, + akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol.class, + akka.cluster.ClusterProtocol.DurableMailboxMessageProtocol.Builder.class); internal_static_UuidProtocol_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_UuidProtocol_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_UuidProtocol_descriptor, new java.lang.String[] { "High", "Low", }, - akka.cloud.cluster.ClusterProtocol.UuidProtocol.class, - akka.cloud.cluster.ClusterProtocol.UuidProtocol.Builder.class); + akka.cluster.ClusterProtocol.UuidProtocol.class, + akka.cluster.ClusterProtocol.UuidProtocol.Builder.class); return null; } }; diff --git a/akka-cluster/src/main/java/akka/cloud/cluster/LocalBookKeeper.java b/akka-cluster/src/main/java/akka/cluster/LocalBookKeeper.java similarity index 99% rename from akka-cluster/src/main/java/akka/cloud/cluster/LocalBookKeeper.java rename to akka-cluster/src/main/java/akka/cluster/LocalBookKeeper.java index dbd6ff26c9..e358ff8ae4 100644 --- a/akka-cluster/src/main/java/akka/cloud/cluster/LocalBookKeeper.java +++ b/akka-cluster/src/main/java/akka/cluster/LocalBookKeeper.java @@ -1,4 +1,4 @@ -package akka.cloud.cluster; +package akka.cluster; /** * Licensed to the Apache Software Foundation (ASF) under one diff --git a/akka-zookeeper/src/main/java/akka/cloud/zookeeper/DistributedQueue.java b/akka-cluster/src/main/java/akka/cluster/zookeeper/DistributedQueue.java similarity index 99% rename from akka-zookeeper/src/main/java/akka/cloud/zookeeper/DistributedQueue.java rename to akka-cluster/src/main/java/akka/cluster/zookeeper/DistributedQueue.java index 47586436d2..6dc1d05062 100644 --- a/akka-zookeeper/src/main/java/akka/cloud/zookeeper/DistributedQueue.java +++ b/akka-cluster/src/main/java/akka/cluster/zookeeper/DistributedQueue.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package akka.cloud.zookeeper; +package akka.cluster.zookeeper; import java.util.List; import java.util.NoSuchElementException; diff --git a/akka-zookeeper/src/main/java/akka/cloud/zookeeper/ZooKeeperQueue.java b/akka-cluster/src/main/java/akka/cluster/zookeeper/ZooKeeperQueue.java similarity index 99% rename from akka-zookeeper/src/main/java/akka/cloud/zookeeper/ZooKeeperQueue.java rename to akka-cluster/src/main/java/akka/cluster/zookeeper/ZooKeeperQueue.java index 01a427180b..00524ed91f 100644 --- a/akka-zookeeper/src/main/java/akka/cloud/zookeeper/ZooKeeperQueue.java +++ b/akka-cluster/src/main/java/akka/cluster/zookeeper/ZooKeeperQueue.java @@ -2,7 +2,7 @@ * Copyright (C) 2009-2011 Scalable Solutions AB */ -package akka.cloud.zookeeper; +package akka.cluster.zookeeper; import java.io.Serializable; import java.util.List; diff --git a/akka-cluster/src/main/protocol/ClusterProtocol.proto b/akka-cluster/src/main/protocol/ClusterProtocol.proto index 6661564240..aa626fee3b 100644 --- a/akka-cluster/src/main/protocol/ClusterProtocol.proto +++ b/akka-cluster/src/main/protocol/ClusterProtocol.proto @@ -2,12 +2,12 @@ * Copyright (C) 2009-2011 Scalable Solutions AB */ -option java_package = "akka.cloud.cluster"; +option java_package = "akka.cluster"; option optimize_for = SPEED; /****************************************** Compile with: - cd ./akka-cloud-cluster/src/main/protocol + cd ./akka-cluster/src/main/protocol protoc ClusterProtocol.proto --java_out ../java *******************************************/ @@ -17,7 +17,7 @@ option optimize_for = SPEED; message RemoteDaemonMessageProtocol { required RemoteDaemonMessageType messageType = 1; optional UuidProtocol actorUuid = 2; - optional string actorId = 3; + optional string actorAddress = 3; optional string actorClassName = 4; optional bytes payload = 5; } @@ -46,8 +46,8 @@ enum RemoteDaemonMessageType { * Defines the durable mailbox message. */ message DurableMailboxMessageProtocol { - required string ownerActorId = 1; - optional string senderActorId = 2; + required string ownerActorAddress= 1; + optional string senderActorAddress = 2; optional UuidProtocol futureUuid = 3; required bytes message = 4; } diff --git a/akka-cluster/src/main/protocol/EventProtocol.proto b/akka-cluster/src/main/protocol/EventProtocol.proto deleted file mode 100644 index 92e893265f..0000000000 --- a/akka-cluster/src/main/protocol/EventProtocol.proto +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Copyright (C) 2009-2011 Scalable Solutions AB - */ - - option java_package = "akka.cloud.cluster"; - option optimize_for = SPEED; - - /****************************************** - Compile with: - cd ./akka-cloud-cluster/src/main/protocol - protoc MonitoringProtocol.proto --java_out ../java -*/ - -message GenericLoggingEvent { - required string context = 1; - required string message = 2; - required uint64 time = 3; -} - -message AuditEvent { - required string name = 1; - required string context = 2; - required string message = 3; - required uint64 time = 4; -} - -message CounterAddEvent { - required string name = 1; - required uint64 delta = 2; -} - -message CounterSetEvent { - required string name = 1; - required uint32 value = 2; -} - -message CounterResetEvent { - required string name = 1; -} - -message AverageAddEvent { - required string name = 1; - required uint64 value = 2; -} - -message AverageResetEvent { - required string name = 1; -} - diff --git a/akka-cluster/src/main/scala/akka/cloud/cluster/BookKeeperServer.scala b/akka-cluster/src/main/scala/akka/cluster/BookKeeperServer.scala similarity index 98% rename from akka-cluster/src/main/scala/akka/cloud/cluster/BookKeeperServer.scala rename to akka-cluster/src/main/scala/akka/cluster/BookKeeperServer.scala index 4e9a6c4f46..7db2d63a1a 100644 --- a/akka-cluster/src/main/scala/akka/cloud/cluster/BookKeeperServer.scala +++ b/akka-cluster/src/main/scala/akka/cluster/BookKeeperServer.scala @@ -1,7 +1,7 @@ /** * Copyright (C) 2009-2011 Scalable Solutions AB */ -package akka.cloud.cluster +package akka.cluster import org.apache.bookkeeper.proto.BookieServer diff --git a/akka-cluster/src/main/scala/akka/cloud/cluster/Cluster.scala b/akka-cluster/src/main/scala/akka/cluster/Cluster.scala similarity index 88% rename from akka-cluster/src/main/scala/akka/cloud/cluster/Cluster.scala rename to akka-cluster/src/main/scala/akka/cluster/Cluster.scala index 729e9ae498..f6f35f0496 100644 --- a/akka-cluster/src/main/scala/akka/cloud/cluster/Cluster.scala +++ b/akka-cluster/src/main/scala/akka/cluster/Cluster.scala @@ -1,7 +1,7 @@ /** * Copyright (C) 2009-2011 Scalable Solutions AB */ -package akka.cloud.cluster +package akka.cluster import org.apache.zookeeper._ import org.apache.zookeeper.Watcher.Event._ @@ -26,6 +26,7 @@ import ClusterProtocol._ import RemoteDaemonMessageType._ import akka.util._ +import Helpers._ import akka.actor._ import akka.actor.Actor._ import akka.event.EventHandler @@ -36,10 +37,8 @@ import akka.serialization.{Format, Serializer} import akka.serialization.Compression.LZF import akka.AkkaException -import akka.cloud.common.JMX -import akka.cloud.common.Util._ -import akka.cloud.monitoring.Monitoring -import akka.cloud.zookeeper._ +//import akka.cloud.monitoring.Monitoring +import akka.cluster.zookeeper._ import com.eaio.uuid.UUID @@ -113,14 +112,14 @@ final case class NodeAddress( } case class ActorAddress( - actorUuid: UUID = null, - actorId: String = Cluster.EMPTY_STRING, - actorClassName: String = Cluster.EMPTY_STRING) + uuid: UUID = null, + address: String = Cluster.EMPTY_STRING, + className: String = Cluster.EMPTY_STRING) object ActorAddress { - def forUuid(actorUuid: UUID) = ActorAddress(actorUuid, Cluster.EMPTY_STRING, Cluster.EMPTY_STRING) - def forId(actorId: String) = ActorAddress(null, actorId, Cluster.EMPTY_STRING) - def forClassName(actorClassName: String) = ActorAddress(null, actorClassName, Cluster.EMPTY_STRING) + def forUuid(uuid: UUID) = ActorAddress(uuid, Cluster.EMPTY_STRING, Cluster.EMPTY_STRING) + def forAddress(address: String) = ActorAddress(null, address, Cluster.EMPTY_STRING) + def forClassName(className: String) = ActorAddress(null, className, Cluster.EMPTY_STRING) } /** @@ -133,13 +132,13 @@ object Cluster { val UUID_PREFIX = "uuid:".intern // config options - val zooKeeperServers = config.getString("akka.cloud.cluster.zookeeper-server-addresses", "localhost:2181") - val remoteServerPort = config.getInt("akka.cloud.cluster.remote-server-port", 2552) - val sessionTimeout = Duration(config.getInt("akka.cloud.cluster.session-timeout", 60), TIME_UNIT).toMillis.toInt - val connectionTimeout = Duration(config.getInt("akka.cloud.cluster.connection-timeout", 60), TIME_UNIT).toMillis.toInt - val maxTimeToWaitUntilConnected = Duration(config.getInt("akka.cloud.cluster.max-time-to-wait-until-connected", 30), TIME_UNIT).toMillis.toInt - val shouldCompressData = config.getBool("akka.cloud.cluster.use-compression", false) - val enableJMX = config.getBool("akka.enable-jmx", true) + val zooKeeperServers = config.getString("akka.cluster.zookeeper-server-addresses", "localhost:2181") + val remoteServerPort = config.getInt("akka.cluster.remote-server-port", 2552) + val sessionTimeout = Duration(config.getInt("akka.cluster.session-timeout", 60), TIME_UNIT).toMillis.toInt + val connectionTimeout = Duration(config.getInt("akka.cluster.connection-timeout", 60), TIME_UNIT).toMillis.toInt + val maxTimeToWaitUntilConnected = Duration(config.getInt("akka.cluster.max-time-to-wait-until-connected", 30), TIME_UNIT).toMillis.toInt + val shouldCompressData = config.getBool("akka.cluster.use-compression", false) + val enableJMX = config.getBool("akka.enable-jmx", true) /** * Cluster membership change listener. @@ -409,14 +408,14 @@ class ClusterNode private[akka] ( case RemoteClientDisconnected(client, address) => client.shutdownClientModule case _ => //ignore other } - }).start + }, "akka.cluster.remoteClientLifeCycleListener").start - val remoteDaemon = actorOf(new RemoteClusterDaemon(this)).start + val remoteDaemon = actorOf(new RemoteClusterDaemon(this), RemoteClusterDaemon.ADDRESS).start val remoteService: RemoteSupport = { val remote = new akka.remote.netty.NettyRemoteSupport remote.start(nodeAddress.hostname, nodeAddress.port) - remote.register(RemoteClusterDaemon.ID, remoteDaemon) + remote.register(RemoteClusterDaemon.ADDRESS, remoteDaemon) remote.addListener(remoteClientLifeCycleListener) remote } @@ -530,7 +529,7 @@ class ClusterNode private[akka] ( remoteDaemon.stop // for monitoring remote listener - registry.actors.filter(remoteService.hasListener).foreach(_.stop) + registry.local.actors.filter(remoteService.hasListener).foreach(_.stop) replicaConnections.clear updateNodes(_ - nodeAddress) @@ -689,11 +688,11 @@ class ClusterNode private[akka] ( case e: ZkNodeExistsException => zkClient.writeData(actorRegistryFormatNodePathFor(uuid), format) } - // create UUID -> ID registry + // create UUID -> ADDRESS registry try { - zkClient.createPersistent(actorRegistryActorIdNodePathFor(uuid), actorRef.id) + zkClient.createPersistent(actorRegistryactorAddressNodePathFor(uuid), actorRef.address) } catch { - case e: ZkNodeExistsException => zkClient.writeData(actorRegistryActorIdNodePathFor(uuid), actorRef.id) + case e: ZkNodeExistsException => zkClient.writeData(actorRegistryactorAddressNodePathFor(uuid), actorRef.address) } // create UUID -> class name registry @@ -710,8 +709,8 @@ class ClusterNode private[akka] ( ignore[ZkNodeExistsException]( zkClient.createPersistent(actorLocationsNodePathFor(uuid)) ) // create ID -> UUIDs registry - ignore[ZkNodeExistsException]( zkClient.createPersistent(actorIdToUuidsNodePathFor(actorRef.id)) ) - ignore[ZkNodeExistsException]( zkClient.createPersistent("%s/%s".format(actorIdToUuidsNodePathFor(actorRef.id), uuid)) ) + ignore[ZkNodeExistsException]( zkClient.createPersistent(actorAddressToUuidsNodePathFor(actorRef.address)) ) + ignore[ZkNodeExistsException]( zkClient.createPersistent("%s/%s".format(actorAddressToUuidsNodePathFor(actorRef.address), uuid)) ) // create class name -> UUIDs registry ignore[ZkNodeExistsException]( zkClient.createPersistent(actorClassNameToUuidsNodePathFor(actorRef.actorClassName)) ) @@ -735,46 +734,46 @@ class ClusterNode private[akka] ( * clusterNode remove classOf[MyActor] * */ - def remove[T <: Actor](actorClass: Class[T]): ClusterNode = remove(ActorAddress(actorClassName = actorClass.getName)) + def remove[T <: Actor](actorClass: Class[T]): ClusterNode = remove(ActorAddress(className = actorClass.getName)) /** * Removes actor with UUID from the cluster. */ def remove(actorAddress: ActorAddress): ClusterNode = { - def removeByUuid(actorUuid: UUID) = { - releaseActorOnAllNodes(actorUuid) + def removeByUuid(uuid: UUID) = { + releaseActorOnAllNodes(uuid) - locallyCheckedOutActors.remove(actorUuid) + locallyCheckedOutActors.remove(uuid) // warning: ordering matters here - ignore[ZkNoNodeException](zkClient.deleteRecursive(actorIdToUuidsNodePathFor(actorIdForUuid(actorUuid)))) // remove ID to UUID mapping - ignore[ZkNoNodeException](zkClient.deleteRecursive(actorClassNameToUuidsNodePathFor(actorClassNameForUuid(actorUuid)))) // remove class name to UUID mapping - ignore[ZkNoNodeException](zkClient.deleteRecursive(actorAtAddressNodePathFor(nodeAddress.nodeName, actorUuid))) - ignore[ZkNoNodeException](zkClient.deleteRecursive(actorRegistryNodePathFor(actorUuid))) - ignore[ZkNoNodeException](zkClient.deleteRecursive(actorLocationsNodePathFor(actorUuid))) + ignore[ZkNoNodeException](zkClient.deleteRecursive(actorAddressToUuidsNodePathFor(actorAddressForUuid(uuid)))) // remove ID to UUID mapping + ignore[ZkNoNodeException](zkClient.deleteRecursive(actorClassNameToUuidsNodePathFor(actorClassNameForUuid(uuid)))) // remove class name to UUID mapping + ignore[ZkNoNodeException](zkClient.deleteRecursive(actorAtAddressNodePathFor(nodeAddress.nodeName, uuid))) + ignore[ZkNoNodeException](zkClient.deleteRecursive(actorRegistryNodePathFor(uuid))) + ignore[ZkNoNodeException](zkClient.deleteRecursive(actorLocationsNodePathFor(uuid))) } isConnected ifOn { // remove by UUID - if (actorAddress.actorUuid ne null) { + if (actorAddress.uuid ne null) { EventHandler.debug(this, - "Removing actor with UUID [%s] from cluster".format(actorAddress.actorUuid)) - removeByUuid(actorAddress.actorUuid) + "Removing actor with UUID [%s] from cluster".format(actorAddress.uuid)) + removeByUuid(actorAddress.uuid) // remove by ID - } else if (actorAddress.actorId != EMPTY_STRING) { + } else if (actorAddress.address != EMPTY_STRING) { EventHandler.debug(this, - "Removing actor(s) with ID [%s] from cluster".format(actorAddress.actorId)) - uuidsForActorId(actorAddress.actorId) foreach (uuid => removeByUuid(uuid)) + "Removing actor(s) with ID [%s] from cluster".format(actorAddress.address)) + uuidsForActorAddress(actorAddress.address) foreach (uuid => removeByUuid(uuid)) // remove by class name - } else if (actorAddress.actorClassName != EMPTY_STRING) { + } else if (actorAddress.className != EMPTY_STRING) { EventHandler.debug(this, - "Removing actor(s) with class name [%s] from cluster".format(actorAddress.actorClassName)) - uuidsForActorClassName(actorAddress.actorClassName) foreach (uuid => removeByUuid(uuid)) + "Removing actor(s) with class name [%s] from cluster".format(actorAddress.className)) + uuidsForActorClassName(actorAddress.className) foreach (uuid => removeByUuid(uuid)) } else throw new IllegalArgumentException( - "You need to pass in at least one of 'actorUuid' or 'actorId' or 'actorClassName' to 'ClusterNode.remove(..)'") + "You need to pass in at least one of 'uuid' or 'actorAddress' or 'className' to 'ClusterNode.remove(..)'") } this } @@ -919,48 +918,48 @@ class ClusterNode private[akka] ( def registerClusterActorRefForAddress(actorRef: ClusterActorRef, addresses: Array[(UUID, InetSocketAddress)]) = addresses foreach { case (_, address) => clusterActorRefs.put(address, actorRef) } - def refByUuid(actorUuid: UUID): ActorRef = { - val actorClassName = actorClassNameForUuid(actorUuid) + def refByUuid(uuid: UUID): ActorRef = { + val className = actorClassNameForUuid(uuid) val actor = Router newRouter ( router, addresses, - uuidToString(actorUuid), actorClassName, + uuidToString(uuid), className, Cluster.lookupLocalhostName, Cluster.remoteServerPort, // set it to local hostname:port Actor.TIMEOUT, actorType) registerClusterActorRefForAddress(actor, addresses) actor } - def refById(actorId: String): ActorRef = { - val uuids = uuidsForActorId(actorId) - val actorClassName = uuids.map(uuid => actorClassNameForUuid(uuid)).head - if (actorClassName eq null) throw new IllegalStateException( + def refById(actorAddress: String): ActorRef = { + val uuids = uuidsForActorAddress(actorAddress) + val className = uuids.map(uuid => actorClassNameForUuid(uuid)).head + if (className eq null) throw new IllegalStateException( "Actor class name for actor with UUID [" + uuids.head + "] could not be retrieved") val actor = Router newRouter ( router, addresses, - actorId, actorClassName, + actorAddress, className, Cluster.lookupLocalhostName, Cluster.remoteServerPort, // set it to local hostname:port Actor.TIMEOUT, actorType) registerClusterActorRefForAddress(actor, addresses) actor } - def refByClassName(actorClassName: String): ActorRef = { + def refByClassName(className: String): ActorRef = { val actor = Router newRouter ( router, addresses, - actorClassName, actorClassName, + className, className, Cluster.lookupLocalhostName, Cluster.remoteServerPort, // set it to local hostname:port Actor.TIMEOUT, actorType) registerClusterActorRefForAddress(actor, addresses) actor } - val actorUuid = actorAddress.actorUuid - val actorId = actorAddress.actorId - val actorClassName = actorAddress.actorClassName - if ((actorUuid ne null) && actorId == EMPTY_STRING && actorClassName == EMPTY_STRING) refByUuid(actorUuid) - else if (actorId != EMPTY_STRING && (actorUuid eq null) && actorClassName == EMPTY_STRING) refById(actorId) - else if (actorClassName != EMPTY_STRING && (actorUuid eq null) && actorId == EMPTY_STRING) refByClassName(actorClassName) - else throw new IllegalArgumentException("You need to pass in either 'actorUuid' or 'actorId' or 'actorClassName' and only one of them") + val uuid = actorAddress.uuid + val address = actorAddress.address + val className = actorAddress.className + if ((uuid ne null) && address == EMPTY_STRING && className == EMPTY_STRING) refByUuid(uuid) + else if (address != EMPTY_STRING && (uuid eq null) && className == EMPTY_STRING) refById(address) + else if (className != EMPTY_STRING && (uuid eq null) && address == EMPTY_STRING) refByClassName(className) + else throw new IllegalArgumentException("You need to pass in either 'uuid' or 'actorAddress' or 'className' and only one of them") } else throw new ClusterException("Not connected to cluster") /** @@ -990,7 +989,7 @@ class ClusterNode private[akka] ( /** * Returns the IDs of all actors checked out on this node. */ - def idsForActorsInUse: Array[String] = actorIdsForUuids(uuidsForActorsInUse) + def idsForActorsInUse: Array[String] = actorAddresssForUuids(uuidsForActorsInUse) /** * Returns the class names of all actors checked out on this node. @@ -1007,7 +1006,7 @@ class ClusterNode private[akka] ( /** * Returns the IDs of all actors registered in this cluster. */ - def idsForClusteredActors: Array[String] = actorIdsForUuids(uuidsForClusteredActors) + def idsForClusteredActors: Array[String] = actorAddresssForUuids(uuidsForClusteredActors) /** * Returns the class names of all actors registered in this cluster. @@ -1017,15 +1016,15 @@ class ClusterNode private[akka] ( /** * Returns the actor id for the actor with a specific UUID. */ - def actorIdForUuid(uuid: UUID): String = if (isConnected.isOn) { - try { zkClient.readData(actorRegistryActorIdNodePathFor(uuid)).asInstanceOf[String] } + def actorAddressForUuid(uuid: UUID): String = if (isConnected.isOn) { + try { zkClient.readData(actorRegistryactorAddressNodePathFor(uuid)).asInstanceOf[String] } catch { case e: ZkNoNodeException => "" } } else "" /** * Returns the actor ids for all the actors with a specific UUID. */ - def actorIdsForUuids(uuids: Array[UUID]): Array[String] = uuids map (actorIdForUuid(_)) filter (_ != "") + def actorAddresssForUuids(uuids: Array[UUID]): Array[String] = uuids map (actorAddressForUuid(_)) filter (_ != "") /** * Returns the actor class name for the actor with a specific UUID. @@ -1043,16 +1042,16 @@ class ClusterNode private[akka] ( /** * Returns the actor UUIDs for actor ID. */ - def uuidsForActorId(actorId: String): Array[UUID] = if (isConnected.isOn) { - try { zkClient.getChildren(actorIdToUuidsNodePathFor(actorId)).toList.map(new UUID(_)).toArray.asInstanceOf[Array[UUID]] } + def uuidsForActorAddress(actorAddress: String): Array[UUID] = if (isConnected.isOn) { + try { zkClient.getChildren(actorAddressToUuidsNodePathFor(actorAddress)).toList.map(new UUID(_)).toArray.asInstanceOf[Array[UUID]] } catch { case e: ZkNoNodeException => Array[UUID]() } } else Array.empty[UUID] /** * Returns the actor UUIDs for actor class name. */ - def uuidsForActorClassName(actorClassName: String): Array[UUID] = if (isConnected.isOn) { - try { zkClient.getChildren(actorClassNameToUuidsNodePathFor(actorClassName)).toList.map(new UUID(_)).toArray.asInstanceOf[Array[UUID]] } + def uuidsForActorClassName(className: String): Array[UUID] = if (isConnected.isOn) { + try { zkClient.getChildren(actorClassNameToUuidsNodePathFor(className)).toList.map(new UUID(_)).toArray.asInstanceOf[Array[UUID]] } catch { case e: ZkNoNodeException => Array[UUID]() } } else Array.empty[UUID] @@ -1103,7 +1102,7 @@ class ClusterNode private[akka] ( val uuids = try { zkClient.getChildren(actorsAtAddressNodePathFor(nodeName)).toList.map(new UUID(_)).toArray.asInstanceOf[Array[UUID]] } catch { case e: ZkNoNodeException => Array[UUID]() } - actorIdsForUuids(uuids) + actorAddresssForUuids(uuids) } else Array.empty[String] /** @@ -1270,23 +1269,23 @@ class ClusterNode private[akka] ( private[cluster] def configurationNodePathFor(key: String) = "%s/%s".format(CONFIGURATION_NODE, key) - private[cluster] def actorIdToUuidsNodePathFor(actorId: String) = "%s/%s".format(ACTOR_ID_TO_UUIDS_NODE, actorId.replace('.', '_')) - private[cluster] def actorClassNameToUuidsNodePathFor(actorClassName: String) = "%s/%s".format(ACTOR_CLASS_TO_UUIDS_NODE, actorClassName) + private[cluster] def actorAddressToUuidsNodePathFor(actorAddress: String) = "%s/%s".format(ACTOR_ID_TO_UUIDS_NODE, actorAddress.replace('.', '_')) + private[cluster] def actorClassNameToUuidsNodePathFor(className: String) = "%s/%s".format(ACTOR_CLASS_TO_UUIDS_NODE, className) - private[cluster] def actorLocationsNodePathFor(actorUuid: UUID) = "%s/%s".format(ACTOR_LOCATIONS_NODE, actorUuid) - private[cluster] def actorLocationsNodePathFor(actorUuid: UUID, node: NodeAddress) = - "%s/%s/%s".format(ACTOR_LOCATIONS_NODE, actorUuid, node.nodeName) + private[cluster] def actorLocationsNodePathFor(uuid: UUID) = "%s/%s".format(ACTOR_LOCATIONS_NODE, uuid) + private[cluster] def actorLocationsNodePathFor(uuid: UUID, node: NodeAddress) = + "%s/%s/%s".format(ACTOR_LOCATIONS_NODE, uuid, node.nodeName) private[cluster] def actorsAtAddressNodePathFor(node: String) = "%s/%s".format(ACTORS_AT_ADDRESS_NODE, node) private[cluster] def actorAtAddressNodePathFor(node: String, uuid: UUID) = "%s/%s/%s".format(ACTORS_AT_ADDRESS_NODE, node, uuid) - private[cluster] def actorRegistryNodePathFor(actorUuid: UUID) = "%s/%s".format(ACTOR_REGISTRY_NODE, actorUuid) - private[cluster] def actorRegistryFormatNodePathFor(actorUuid: UUID) = "%s/%s".format(actorRegistryNodePathFor(actorUuid), "format") - private[cluster] def actorRegistryActorIdNodePathFor(actorUuid: UUID) = "%s/%s".format(actorRegistryNodePathFor(actorUuid), "id") - private[cluster] def actorRegistryActorClassNameNodePathFor(actorUuid: UUID) = "%s/%s".format(actorRegistryNodePathFor(actorUuid), "class") - private[cluster] def actorRegistryAddressNodePathFor(actorUuid: UUID): String = "%s/%s".format(actorRegistryNodePathFor(actorUuid), "address") - private[cluster] def actorRegistryAddressNodePathFor(actorUuid: UUID, address: InetSocketAddress): String = - "%s/%s:%s".format(actorRegistryAddressNodePathFor(actorUuid), address.getHostName, address.getPort) + private[cluster] def actorRegistryNodePathFor(uuid: UUID) = "%s/%s".format(ACTOR_REGISTRY_NODE, uuid) + private[cluster] def actorRegistryFormatNodePathFor(uuid: UUID) = "%s/%s".format(actorRegistryNodePathFor(uuid), "format") + private[cluster] def actorRegistryactorAddressNodePathFor(uuid: UUID) = "%s/%s".format(actorRegistryNodePathFor(uuid), "id") + private[cluster] def actorRegistryActorClassNameNodePathFor(uuid: UUID) = "%s/%s".format(actorRegistryNodePathFor(uuid), "class") + private[cluster] def actorRegistryAddressNodePathFor(uuid: UUID): String = "%s/%s".format(actorRegistryNodePathFor(uuid), "address") + private[cluster] def actorRegistryAddressNodePathFor(uuid: UUID, address: InetSocketAddress): String = + "%s/%s:%s".format(actorRegistryAddressNodePathFor(uuid), address.getHostName, address.getPort) private[cluster] def initializeNode = { EventHandler.info(this, "Initializing cluster node [%s]".format(nodeAddress)) @@ -1311,13 +1310,13 @@ class ClusterNode private[akka] ( } private def actorUuidsForActorAddress(actorAddress: ActorAddress): Array[UUID] = { - val actorUuid = actorAddress.actorUuid - val actorId = actorAddress.actorId - val actorClassName = actorAddress.actorClassName - if ((actorUuid ne null) && actorId == EMPTY_STRING && actorClassName == EMPTY_STRING) Array(actorUuid) - else if (actorId != EMPTY_STRING && (actorUuid eq null) && actorClassName == EMPTY_STRING) uuidsForActorId(actorId) - else if (actorClassName != EMPTY_STRING && (actorUuid eq null) && actorId == EMPTY_STRING) uuidsForActorClassName(actorClassName) - else throw new IllegalArgumentException("You need to pass in either 'actorUuid' or 'actorId' or 'actorClassName' and only one of them") + val uuid = actorAddress.uuid + val address = actorAddress.address + val className = actorAddress.className + if ((uuid ne null) && address == EMPTY_STRING && className == EMPTY_STRING) Array(uuid) + else if (address != EMPTY_STRING && (uuid eq null) && className == EMPTY_STRING) uuidsForActorAddress(address) + else if (className != EMPTY_STRING && (uuid eq null) && address == EMPTY_STRING) uuidsForActorClassName(className) + else throw new IllegalArgumentException("You need to pass in either 'uuid' or 'actorAddress' or 'className' and only one of them") } filter (_ ne null) /** @@ -1355,7 +1354,7 @@ class ClusterNode private[akka] ( membershipNodes foreach { node => if (!replicaConnections.contains(node)) { val address = addressForNode(node) - val clusterDaemon = Actor.remote.actorFor(RemoteClusterDaemon.ID, address.getHostName, address.getPort) + val clusterDaemon = Actor.remote.actorFor(RemoteClusterDaemon.ADDRESS, address.getHostName, address.getPort) replicaConnections.put(node, (address, clusterDaemon)) } } @@ -1404,12 +1403,12 @@ class ClusterNode private[akka] ( "Migrating actors from failed node [%s] to node [%s]: Actor UUIDs [%s]" .format(failedNodeName, nodeAddress.nodeName, actorUuidsForFailedNode)) - actorUuidsForFailedNode.foreach { actorUuid => + actorUuidsForFailedNode.foreach { uuid => EventHandler.debug(this, "Cluster node [%s] has failed, migrating actor with UUID [%s] to [%s]" - .format(failedNodeName, actorUuid, nodeAddress.nodeName)) + .format(failedNodeName, uuid, nodeAddress.nodeName)) - val actorAddress = ActorAddress(actorUuid = stringToUuid(actorUuid)) + val actorAddress = ActorAddress(uuid = stringToUuid(uuid)) migrateWithoutCheckingThatActorResidesOnItsHomeNode( // since the ephemeral node is already gone, so can't check NodeAddress(nodeAddress.clusterName, failedNodeName), nodeAddress, actorAddress) @@ -1421,7 +1420,7 @@ class ClusterNode private[akka] ( homeAddress.setAccessible(true) homeAddress.set(actor, Some(remoteServerAddress)) - remoteService.register(actorUuid, actor) + remoteService.register(uuid, actor) } } @@ -1448,7 +1447,7 @@ class ClusterNode private[akka] ( from: NodeAddress, to: NodeAddress, actorAddress: ActorAddress) { actorUuidsForActorAddress(actorAddress) map { uuid => - val actorAddress = ActorAddress(actorUuid = uuid) + val actorAddress = ActorAddress(uuid = uuid) if (!isInUseOnNode(actorAddress, to)) { release(actorAddress) @@ -1560,7 +1559,9 @@ class ClusterNode private[akka] ( } JMX.register(clusterJmxObjectName, clusterMBean) - Monitoring.registerLocalMBean(clusterJmxObjectName, clusterMBean) + + // FIXME need monitoring to lookup the cluster MBean dynamically + // Monitoring.registerLocalMBean(clusterJmxObjectName, clusterMBean) } } @@ -1639,7 +1640,7 @@ trait ErrorHandler { * @author Jonas Bonér */ object RemoteClusterDaemon { - val ID = "akka:cloud:cluster:daemon" + val ADDRESS = "akka:cloud:cluster:daemon" // FIXME configure functionServerDispatcher to what? val functionServerDispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher("akka:cloud:cluster:function:server").build @@ -1652,7 +1653,6 @@ class RemoteClusterDaemon(cluster: ClusterNode) extends Actor { import RemoteClusterDaemon._ import Cluster._ - self.id = ID self.dispatcher = Dispatchers.newThreadBasedDispatcher(self) def receive: Receive = { @@ -1663,28 +1663,28 @@ class RemoteClusterDaemon(cluster: ClusterNode) extends Actor { case USE => if (message.hasActorUuid) { val uuid = uuidProtocolToUuid(message.getActorUuid) - val address = ActorAddress(actorUuid = uuid) + val address = ActorAddress(uuid = uuid) implicit val format: Format[Actor] = cluster formatForActor address val actors = cluster use address - } else if (message.hasActorId) { - val id = message.getActorId - val address = ActorAddress(actorId = id) + } else if (message.hasActorAddress) { + val id = message.getActorAddress + val address = ActorAddress(address = id) implicit val format: Format[Actor] = cluster formatForActor address val actors = cluster use address } else if (message.hasActorClassName) { val actorClassName = message.getActorClassName - val address = ActorAddress(actorClassName = actorClassName) + val address = ActorAddress(className = actorClassName) implicit val format: Format[Actor] = cluster formatForActor address val actors = cluster use address } else EventHandler.warning(this, - "None of 'actorUuid', 'actorId' or 'actorClassName' is specified, ignoring remote cluster daemon command [%s]".format(message)) + "None of 'uuid', 'actorAddress' or 'className' is specified, ignoring remote cluster daemon command [%s]".format(message)) case RELEASE => - if (message.hasActorUuid) { cluster release ActorAddress(actorUuid = uuidProtocolToUuid(message.getActorUuid)) } - else if (message.hasActorId) { cluster release ActorAddress(actorId = message.getActorId) } - else if (message.hasActorClassName) { cluster release ActorAddress(actorClassName = message.getActorClassName) } + if (message.hasActorUuid) { cluster release ActorAddress(uuid = uuidProtocolToUuid(message.getActorUuid)) } + else if (message.hasActorAddress) { cluster release ActorAddress(address = message.getActorAddress) } + else if (message.hasActorClassName) { cluster release ActorAddress(className = message.getActorClassName) } else EventHandler.warning(this, - "None of 'actorUuid', 'actorId' or 'actorClassName' is specified, ignoring remote cluster daemon command [%s]".format(message)) + "None of 'uuid', 'actorAddress' or 'className' is specified, ignoring remote cluster daemon command [%s]".format(message)) case START => cluster.start diff --git a/akka-cluster/src/main/scala/akka/cloud/cluster/ClusterActorRef.scala b/akka-cluster/src/main/scala/akka/cluster/ClusterActorRef.scala similarity index 88% rename from akka-cluster/src/main/scala/akka/cloud/cluster/ClusterActorRef.scala rename to akka-cluster/src/main/scala/akka/cluster/ClusterActorRef.scala index d1e1f69759..78a50925e4 100644 --- a/akka-cluster/src/main/scala/akka/cloud/cluster/ClusterActorRef.scala +++ b/akka-cluster/src/main/scala/akka/cluster/ClusterActorRef.scala @@ -1,7 +1,7 @@ /** * Copyright (C) 2009-2011 Scalable Solutions AB */ -package akka.cloud.cluster +package akka.cluster import Cluster._ @@ -22,16 +22,13 @@ class ClusterActorRef private[akka] ( actorAddresses: Array[Tuple2[UUID, InetSocketAddress]], val serviceId: String, actorClassName: String, - hostname: String, - port: Int, timeout: Long, actorType: ActorType, val replicationStrategy: ReplicationStrategy) - extends RemoteActorRef(serviceId, actorClassName, hostname, port, timeout, None, actorType) { + extends RemoteActorRef(serviceId, actorClassName, timeout, None, actorType) { this: ClusterActorRef with Router.Router => - EventHandler.debug(this, "Creating a ClusterActorRef [%s] for Actor [%s] on [%s:%s]" - .format(serviceId, actorClassName, hostname, port)) + EventHandler.debug(this, "Creating a ClusterActorRef [%s] for Actor [%s]".format(serviceId, actorClassName)) private[akka] val addresses = new AtomicReference[Map[InetSocketAddress, ActorRef]]( createConnections(actorAddresses, actorClassName)) @@ -72,7 +69,6 @@ class ClusterActorRef private[akka] ( private def createRemoteActorRef(uuid: UUID, address: InetSocketAddress) = { RemoteActorRef( UUID_PREFIX + uuidToString(uuid), actorClassName, // clustered refs are always registered and looked up by UUID - address.getHostName, address.getPort, Actor.TIMEOUT, None, actorType) } } diff --git a/akka-cluster/src/main/scala/akka/cloud/cluster/MurmurHash.scala b/akka-cluster/src/main/scala/akka/cluster/MurmurHash.scala similarity index 99% rename from akka-cluster/src/main/scala/akka/cloud/cluster/MurmurHash.scala rename to akka-cluster/src/main/scala/akka/cluster/MurmurHash.scala index b310cbb665..6441a53f76 100644 --- a/akka-cluster/src/main/scala/akka/cloud/cluster/MurmurHash.scala +++ b/akka-cluster/src/main/scala/akka/cluster/MurmurHash.scala @@ -6,7 +6,7 @@ ** |/ ** \* */ -package akka.cloud.cluster +package akka.cluster /** An implementation of Austin Appleby's MurmurHash 3.0 algorithm * (32 bit version); reference: http://code.google.com/p/smhasher diff --git a/akka-cluster/src/main/scala/akka/cloud/cluster/Routing.scala b/akka-cluster/src/main/scala/akka/cluster/Routing.scala similarity index 94% rename from akka-cluster/src/main/scala/akka/cloud/cluster/Routing.scala rename to akka-cluster/src/main/scala/akka/cluster/Routing.scala index 335d23c6f4..a2e05e7062 100644 --- a/akka-cluster/src/main/scala/akka/cloud/cluster/Routing.scala +++ b/akka-cluster/src/main/scala/akka/cluster/Routing.scala @@ -1,7 +1,7 @@ /** * Copyright (C) 2009-2011 Scalable Solutions AB */ -package akka.cloud.cluster +package akka.cluster import Cluster._ @@ -38,18 +38,15 @@ object Router { routerType match { case Direct => new ClusterActorRef( - addresses, serviceId, actorClassName, - hostname, port, timeout, + addresses, serviceId, actorClassName, timeout, actorType, replicationStrategy) with Direct case Random => new ClusterActorRef( - addresses, serviceId, actorClassName, - hostname, port, timeout, + addresses, serviceId, actorClassName, timeout, actorType, replicationStrategy) with Random case RoundRobin => new ClusterActorRef( - addresses, serviceId, actorClassName, - hostname, port, timeout, + addresses, serviceId, actorClassName, timeout, actorType, replicationStrategy) with RoundRobin } } diff --git a/akka-cluster/src/main/scala/akka/cloud/cluster/replication/ReplicatedClusterRef.scala b/akka-cluster/src/main/scala/akka/cluster/replication/ReplicatedClusterRef.scala similarity index 79% rename from akka-cluster/src/main/scala/akka/cloud/cluster/replication/ReplicatedClusterRef.scala rename to akka-cluster/src/main/scala/akka/cluster/replication/ReplicatedClusterRef.scala index 528e1c9d84..af395b8903 100644 --- a/akka-cluster/src/main/scala/akka/cloud/cluster/replication/ReplicatedClusterRef.scala +++ b/akka-cluster/src/main/scala/akka/cluster/replication/ReplicatedClusterRef.scala @@ -1,7 +1,7 @@ /** * Copyright (C) 2009-2011 Scalable Solutions AB */ -package akka.cloud.cluster +package akka.cluster import Cluster._ @@ -34,11 +34,10 @@ object ReplicationStrategy { /** * @author Jonas Bonér */ -class ReplicatedActorRef private[akka] (actorRef: ActorRef) extends ActorRef with ScalaActorRef { +class ReplicatedActorRef private[akka] (actorRef: ActorRef, val address: String) extends ActorRef with ScalaActorRef { private lazy val txLog = { - EventHandler.debug(this, "Creating a ReplicatedActorRef for Actor [%s] on [%s]" - .format(actorClassName, homeAddress)) + EventHandler.debug(this, "Creating a ReplicatedActorRef for Actor [%s]".format(actorClassName)) TransactionLog.newLogFor(uuid.toString) } @@ -62,18 +61,13 @@ class ReplicatedActorRef private[akka] (actorRef: ActorRef) extends ActorRef wit override def getFaultHandler(): FaultHandlingStrategy = actorRef.getFaultHandler() override def setLifeCycle(lifeCycle: LifeCycle): Unit = actorRef.setLifeCycle(lifeCycle) override def getLifeCycle(): LifeCycle = actorRef.getLifeCycle - def homeAddress: Option[InetSocketAddress] = actorRef.homeAddress def actorClass: Class[_ <: Actor] = actorRef.actorClass def actorClassName: String = actorRef.actorClassName def dispatcher_=(md: MessageDispatcher): Unit = actorRef.dispatcher_=(md) def dispatcher: MessageDispatcher = actorRef.dispatcher def link(actorRef: ActorRef): Unit = actorRef.link(actorRef) def unlink(actorRef: ActorRef): Unit = actorRef.unlink(actorRef) - def startLink(actorRef: ActorRef): Unit = actorRef.startLink(actorRef) - def spawn(clazz: Class[_ <: Actor]): ActorRef = actorRef.spawn(clazz) - def spawnRemote(clazz: Class[_ <: Actor], hostname: String, port: Int, timeout: Long): ActorRef = actorRef.spawnRemote(clazz, hostname, port, timeout) - def spawnLink(clazz: Class[_ <: Actor]): ActorRef = actorRef.spawnLink(clazz) - def spawnLinkRemote(clazz: Class[_ <: Actor], hostname: String, port: Int, timeout: Long): ActorRef = actorRef.spawnLinkRemote(clazz, hostname, port, timeout) + def startLink(actorRef: ActorRef): ActorRef = actorRef.startLink(actorRef) def supervisor: Option[ActorRef] = actorRef.supervisor def linkedActors: JMap[Uuid, ActorRef] = actorRef.linkedActors protected[akka] def postMessageToMailbox(message: Any, senderOption: Option[ActorRef]): Unit = actorRef.postMessageToMailbox(message, senderOption) @@ -89,5 +83,4 @@ class ReplicatedActorRef private[akka] (actorRef: ActorRef) extends ActorRef wit protected[akka] def handleTrapExit(dead: ActorRef, reason: Throwable): Unit = actorRef.handleTrapExit(dead, reason) protected[akka] def restart(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit = actorRef.restart(reason, maxNrOfRetries, withinTimeRange) protected[akka] def restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit = actorRef.restartLinkedActors(reason, maxNrOfRetries, withinTimeRange) - protected[akka] def registerSupervisorAsRemoteActor: Option[Uuid] = actorRef.registerSupervisorAsRemoteActor } diff --git a/akka-cluster/src/main/scala/akka/cloud/cluster/replication/TransactionLog.scala b/akka-cluster/src/main/scala/akka/cluster/replication/TransactionLog.scala similarity index 99% rename from akka-cluster/src/main/scala/akka/cloud/cluster/replication/TransactionLog.scala rename to akka-cluster/src/main/scala/akka/cluster/replication/TransactionLog.scala index e48c6f61a3..a83abd461e 100644 --- a/akka-cluster/src/main/scala/akka/cloud/cluster/replication/TransactionLog.scala +++ b/akka-cluster/src/main/scala/akka/cluster/replication/TransactionLog.scala @@ -1,7 +1,7 @@ /** * Copyright (C) 2009-2011 Scalable Solutions AB */ -package akka.cloud.cluster +package akka.cluster import org.apache.bookkeeper.client.{BookKeeper, LedgerHandle, LedgerEntry, BKException, AsyncCallback} import org.apache.zookeeper.CreateMode @@ -15,7 +15,7 @@ import akka.event.EventHandler import akka.dispatch.{DefaultCompletableFuture, CompletableFuture} import akka.AkkaException -import akka.cloud.zookeeper._ +import akka.cluster.zookeeper._ import java.util.Enumeration diff --git a/akka-zookeeper/src/main/scala/akka/cloud/zookeeper/AkkaZkClient.scala b/akka-cluster/src/main/scala/akka/cluster/zookeeper/AkkaZkClient.scala similarity index 96% rename from akka-zookeeper/src/main/scala/akka/cloud/zookeeper/AkkaZkClient.scala rename to akka-cluster/src/main/scala/akka/cluster/zookeeper/AkkaZkClient.scala index ea3e1e4676..42037707ac 100644 --- a/akka-zookeeper/src/main/scala/akka/cloud/zookeeper/AkkaZkClient.scala +++ b/akka-cluster/src/main/scala/akka/cluster/zookeeper/AkkaZkClient.scala @@ -1,7 +1,7 @@ /** * Copyright (C) 2009-2011 Scalable Solutions AB */ -package akka.cloud.zookeeper +package akka.cluster.zookeeper import org.I0Itec.zkclient._ import org.I0Itec.zkclient.serialize._ diff --git a/akka-zookeeper/src/main/scala/akka/cloud/zookeeper/AkkaZooKeeper.scala b/akka-cluster/src/main/scala/akka/cluster/zookeeper/AkkaZooKeeper.scala similarity index 96% rename from akka-zookeeper/src/main/scala/akka/cloud/zookeeper/AkkaZooKeeper.scala rename to akka-cluster/src/main/scala/akka/cluster/zookeeper/AkkaZooKeeper.scala index ba35be78cc..6095f72ec6 100644 --- a/akka-zookeeper/src/main/scala/akka/cloud/zookeeper/AkkaZooKeeper.scala +++ b/akka-cluster/src/main/scala/akka/cluster/zookeeper/AkkaZooKeeper.scala @@ -1,7 +1,7 @@ /** * Copyright (C) 2009-2011 Scalable Solutions AB */ -package akka.cloud.zookeeper +package akka.cluster.zookeeper import org.I0Itec.zkclient._ import org.apache.commons.io.FileUtils diff --git a/akka-zookeeper/src/main/scala/akka/cloud/zookeeper/ZooKeeperBarrier.scala b/akka-cluster/src/main/scala/akka/cluster/zookeeper/ZooKeeperBarrier.scala similarity index 98% rename from akka-zookeeper/src/main/scala/akka/cloud/zookeeper/ZooKeeperBarrier.scala rename to akka-cluster/src/main/scala/akka/cluster/zookeeper/ZooKeeperBarrier.scala index 134a7a1be4..4f83cdd7b2 100644 --- a/akka-zookeeper/src/main/scala/akka/cloud/zookeeper/ZooKeeperBarrier.scala +++ b/akka-cluster/src/main/scala/akka/cluster/zookeeper/ZooKeeperBarrier.scala @@ -1,7 +1,7 @@ /** * Copyright (C) 2009-2011 Scalable Solutions AB */ -package akka.cloud.zookeeper +package akka.cluster.zookeeper import akka.util.Duration import akka.util.duration._ diff --git a/akka-cluster/src/test/scala/akka/cloud/cluster/ClusterMultiJvmSpec.scala b/akka-cluster/src/test/scala/akka/cluster/ClusterMultiJvmSpec.scala similarity index 98% rename from akka-cluster/src/test/scala/akka/cloud/cluster/ClusterMultiJvmSpec.scala rename to akka-cluster/src/test/scala/akka/cluster/ClusterMultiJvmSpec.scala index 9d59cde803..405d65d016 100644 --- a/akka-cluster/src/test/scala/akka/cloud/cluster/ClusterMultiJvmSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/ClusterMultiJvmSpec.scala @@ -2,13 +2,13 @@ * Copyright (C) 2009-2011 Scalable Solutions AB */ -package akka.cloud.cluster +package akka.cluster import org.scalatest.WordSpec import org.scalatest.matchers.MustMatchers import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach} -import akka.cloud.zookeeper._ +import akka.zookeeper._ import org.I0Itec.zkclient._ object MultiNodeTest { diff --git a/akka-cluster/src/test/scala/akka/cloud/cluster/ClusterSpec.scala b/akka-cluster/src/test/scala/akka/cluster/ClusterSpec.scala similarity index 99% rename from akka-cluster/src/test/scala/akka/cloud/cluster/ClusterSpec.scala rename to akka-cluster/src/test/scala/akka/cluster/ClusterSpec.scala index 642fc51f01..930444ea8a 100644 --- a/akka-cluster/src/test/scala/akka/cloud/cluster/ClusterSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/ClusterSpec.scala @@ -1,4 +1,4 @@ -package akka.cloud.cluster +package akka.cluster import org.scalatest.WordSpec import org.scalatest.matchers.MustMatchers @@ -9,8 +9,7 @@ import org.I0Itec.zkclient._ import akka.actor._ import akka.actor.Actor._ import akka.serialization.{Serializer, SerializerBasedActorFormat} - -import akka.cloud.common.Util._ +import akka.util.Helpers._ import java.util.concurrent.{ CyclicBarrier, TimeUnit } diff --git a/akka-cluster/src/test/scala/akka/cloud/cluster/ClusteredFunctions.scala b/akka-cluster/src/test/scala/akka/cluster/ClusteredFunctions.scala similarity index 99% rename from akka-cluster/src/test/scala/akka/cloud/cluster/ClusteredFunctions.scala rename to akka-cluster/src/test/scala/akka/cluster/ClusteredFunctions.scala index 3e19d1e47e..50e875c600 100644 --- a/akka-cluster/src/test/scala/akka/cloud/cluster/ClusteredFunctions.scala +++ b/akka-cluster/src/test/scala/akka/cluster/ClusteredFunctions.scala @@ -4,7 +4,7 @@ package sample.cluster -import akka.cloud.cluster._ +import akka.cluster._ import akka.dispatch.Futures /** diff --git a/akka-cluster/src/test/scala/akka/cloud/cluster/ClusteredPingPongSample.scala b/akka-cluster/src/test/scala/akka/cluster/ClusteredPingPongSample.scala similarity index 99% rename from akka-cluster/src/test/scala/akka/cloud/cluster/ClusteredPingPongSample.scala rename to akka-cluster/src/test/scala/akka/cluster/ClusteredPingPongSample.scala index 2e81cc0bcd..77aff89707 100644 --- a/akka-cluster/src/test/scala/akka/cloud/cluster/ClusteredPingPongSample.scala +++ b/akka-cluster/src/test/scala/akka/cluster/ClusteredPingPongSample.scala @@ -4,7 +4,7 @@ package sample.cluster -import akka.cloud.cluster._ +import akka.cluster._ import akka.actor._ import akka.actor.Actor._ diff --git a/akka-cluster/src/test/scala/akka/cloud/cluster/PingPongMultiJvmExample.scala b/akka-cluster/src/test/scala/akka/cluster/PingPongMultiJvmExample.scala similarity index 96% rename from akka-cluster/src/test/scala/akka/cloud/cluster/PingPongMultiJvmExample.scala rename to akka-cluster/src/test/scala/akka/cluster/PingPongMultiJvmExample.scala index 42bfc130f1..3ea2fa221b 100644 --- a/akka-cluster/src/test/scala/akka/cloud/cluster/PingPongMultiJvmExample.scala +++ b/akka-cluster/src/test/scala/akka/cluster/PingPongMultiJvmExample.scala @@ -4,8 +4,7 @@ package example.cluster -import akka.cloud.cluster._ -import akka.cloud.monitoring._ +import akka.cluster._ import akka.actor._ import akka.serialization.{Serializer, SerializerBasedActorFormat} @@ -183,11 +182,11 @@ object PingPongMultiJvmNode1 { pause("shutdown", "Ready to shutdown") println("Stopping everything ...") - Monitoring.stopLocalDaemons - MonitoringServer.stop + //Monitoring.stopLocalDaemons + //MonitoringServer.stop Actor.remote.shutdown - Actor.registry.shutdownAll + Actor.registry.local.shutdownAll node.stop @@ -231,7 +230,7 @@ class PongNode(number: Int) { // clean up and stop Actor.remote.shutdown - Actor.registry.shutdownAll + Actor.registry.local.shutdownAll node.stop } diff --git a/akka-cluster/src/test/scala/akka/cloud/cluster/ReplicationSpec.scala b/akka-cluster/src/test/scala/akka/cluster/ReplicationSpec.scala similarity index 99% rename from akka-cluster/src/test/scala/akka/cloud/cluster/ReplicationSpec.scala rename to akka-cluster/src/test/scala/akka/cluster/ReplicationSpec.scala index d21875a72f..84b48f8fdc 100644 --- a/akka-cluster/src/test/scala/akka/cloud/cluster/ReplicationSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/ReplicationSpec.scala @@ -1,7 +1,7 @@ /** * Copyright (C) 2009-2011 Scalable Solutions AB */ -package akka.cloud.cluster +package akka.cluster import org.apache.bookkeeper.client.{BookKeeper, BKException} import BKException._ diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index b991571289..96b020d13c 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -197,9 +197,8 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val akka_stm = project("akka-stm", "akka-stm", new AkkaStmProject(_), akka_actor) lazy val akka_typed_actor = project("akka-typed-actor", "akka-typed-actor", new AkkaTypedActorProject(_), akka_stm, akka_actor_tests) -// lazy val akka_remote = project("akka-remote", "akka-remote", new AkkaRemoteProject(_), akka_typed_actor) -// lazy val akka_zookeeper = project("akka-zookeeper", "akka-zookeeper", new AkkaZookeeperProject(_), akka_remote) -// lazy val akka_cluster = project("akka-cluster", "akka-cluster", new AkkaClusterProject(_), akka_zookeeper) + lazy val akka_remote = project("akka-remote", "akka-remote", new AkkaRemoteProject(_), akka_typed_actor) + lazy val akka_cluster = project("akka-cluster", "akka-cluster", new AkkaClusterProject(_), akka_remote) lazy val akka_http = project("akka-http", "akka-http", new AkkaHttpProject(_), akka_actor) lazy val akka_slf4j = project("akka-slf4j", "akka-slf4j", new AkkaSlf4jProject(_), akka_actor) @@ -348,24 +347,17 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { } } - // ------------------------------------------------------------------------------------------------------------------- - // akka-zookeeper sub project - // ------------------------------------------------------------------------------------------------------------------- - - class AkkaZookeeperProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val log4j = Dependencies.log4j - val zookeeper = Dependencies.zookeeper - val zookeeperLock = Dependencies.zookeeperLock - val zkClient = Dependencies.zkClient - val commons_io = Dependencies.commons_io - } - // ------------------------------------------------------------------------------------------------------------------- // akka-cluster sub project // ------------------------------------------------------------------------------------------------------------------- class AkkaClusterProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) with MultiJvmTests { val bookkeeper = Dependencies.bookkeeper + val zookeeper = Dependencies.zookeeper + val zookeeperLock = Dependencies.zookeeperLock + val zkClient = Dependencies.zkClient + val commons_io = Dependencies.commons_io + val log4j = Dependencies.log4j // test dependencies @@ -437,10 +429,10 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { new AkkaSampleAntsProject(_), akka_stm) lazy val akka_sample_fsm = project("akka-sample-fsm", "akka-sample-fsm", new AkkaSampleFSMProject(_), akka_actor) -// lazy val akka_sample_remote = project("akka-sample-remote", "akka-sample-remote", -// new AkkaSampleRemoteProject(_), akka_remote) -// lazy val akka_sample_chat = project("akka-sample-chat", "akka-sample-chat", -// new AkkaSampleChatProject(_), akka_remote) + lazy val akka_sample_remote = project("akka-sample-remote", "akka-sample-remote", + new AkkaSampleRemoteProject(_), akka_remote) + lazy val akka_sample_chat = project("akka-sample-chat", "akka-sample-chat", + new AkkaSampleChatProject(_), akka_remote) lazy val akka_sample_osgi = project("akka-sample-osgi", "akka-sample-osgi", new AkkaSampleOsgiProject(_), akka_actor)