format with new Scalariform version
* and fix mima issue
This commit is contained in:
parent
839ec5f167
commit
3465a221f0
24 changed files with 114 additions and 91 deletions
|
|
@ -139,8 +139,8 @@ class Serialization(val system: ExtendedActorSystem) extends Extension {
|
|||
else {
|
||||
val cache = manifestCache.get
|
||||
cache.get(manifest) match {
|
||||
case Some(cachedClassManifest) => s1.fromBinary(bytes, cachedClassManifest)
|
||||
case None =>
|
||||
case Some(cachedClassManifest) ⇒ s1.fromBinary(bytes, cachedClassManifest)
|
||||
case None ⇒
|
||||
system.dynamicAccess.getClassFor[AnyRef](manifest) match {
|
||||
case Success(classManifest) ⇒
|
||||
val classManifestOption: Option[Class[_]] = Some(classManifest)
|
||||
|
|
@ -167,7 +167,7 @@ class Serialization(val system: ExtendedActorSystem) extends Extension {
|
|||
"akka.actor.serializers is not in synch between the two systems.")
|
||||
}
|
||||
serializer match {
|
||||
case ser: ByteBufferSerializer =>
|
||||
case ser: ByteBufferSerializer ⇒
|
||||
ser.fromBinary(buf, manifest)
|
||||
case _ ⇒
|
||||
val bytes = Array.ofDim[Byte](buf.remaining())
|
||||
|
|
|
|||
|
|
@ -46,7 +46,8 @@ class CodecBenchmark {
|
|||
remote.artery.hostname = localhost
|
||||
remote.artery.port = 0
|
||||
}
|
||||
""")
|
||||
"""
|
||||
)
|
||||
|
||||
implicit val system = ActorSystem("CodecBenchmark", config)
|
||||
val systemB = ActorSystem("systemB", system.settings.config)
|
||||
|
|
@ -56,8 +57,10 @@ class CodecBenchmark {
|
|||
val headerIn = HeaderBuilder(compression)
|
||||
val envelopeTemplateBuffer = ByteBuffer.allocate(ArteryTransport.MaximumFrameSize).order(ByteOrder.LITTLE_ENDIAN)
|
||||
|
||||
val uniqueLocalAddress = UniqueAddress(system.asInstanceOf[ExtendedActorSystem].provider.getDefaultAddress,
|
||||
AddressUidExtension(system).addressUid)
|
||||
val uniqueLocalAddress = UniqueAddress(
|
||||
system.asInstanceOf[ExtendedActorSystem].provider.getDefaultAddress,
|
||||
AddressUidExtension(system).addressUid
|
||||
)
|
||||
val payload = Array.ofDim[Byte](1000)
|
||||
|
||||
private var materializer: ActorMaterializer = _
|
||||
|
|
|
|||
|
|
@ -65,11 +65,13 @@ abstract class QuickRestartSpec
|
|||
runOn(second) {
|
||||
restartingSystem =
|
||||
if (restartingSystem == null)
|
||||
ActorSystem(system.name,
|
||||
ActorSystem(
|
||||
system.name,
|
||||
ConfigFactory.parseString(s"akka.cluster.roles = [round-$n]")
|
||||
.withFallback(system.settings.config))
|
||||
else
|
||||
ActorSystem(system.name,
|
||||
ActorSystem(
|
||||
system.name,
|
||||
ConfigFactory.parseString(s"""
|
||||
akka.cluster.roles = [round-$n]
|
||||
akka.remote.netty.tcp.port = ${Cluster(restartingSystem).selfAddress.port.get}""") // same port
|
||||
|
|
|
|||
|
|
@ -195,11 +195,11 @@ object MultiNodeSpec {
|
|||
require(selfIndex >= 0 && selfIndex < maxNodes, "multinode.index is out of bounds: " + selfIndex)
|
||||
|
||||
private[testkit] val nodeConfig = mapToConfig(Map(
|
||||
"akka.actor.provider" -> "akka.remote.RemoteActorRefProvider",
|
||||
"akka.remote.artery.hostname" -> selfName,
|
||||
"akka.remote.netty.tcp.hostname" -> selfName,
|
||||
"akka.remote.netty.tcp.port" -> selfPort,
|
||||
"akka.remote.artery.port" -> selfPort))
|
||||
"akka.actor.provider" → "akka.remote.RemoteActorRefProvider",
|
||||
"akka.remote.artery.hostname" → selfName,
|
||||
"akka.remote.netty.tcp.hostname" → selfName,
|
||||
"akka.remote.netty.tcp.port" → selfPort,
|
||||
"akka.remote.artery.port" → selfPort))
|
||||
|
||||
private[testkit] val baseConfig: Config = ConfigFactory.parseString("""
|
||||
akka {
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ object HandshakeRestartReceiverSpec extends MultiNodeConfig {
|
|||
class Subject extends Actor {
|
||||
def receive = {
|
||||
case "shutdown" ⇒ context.system.terminate()
|
||||
case "identify" ⇒ sender() ! (AddressUidExtension(context.system).addressUid -> self)
|
||||
case "identify" ⇒ sender() ! (AddressUidExtension(context.system).addressUid → self)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -240,7 +240,8 @@ abstract class MaxThroughputSpec
|
|||
val senders = for (n ← 1 to senderReceiverPairs) yield {
|
||||
val receiver = identifyReceiver(receiverName + n)
|
||||
val plotProbe = TestProbe()
|
||||
val snd = system.actorOf(senderProps(receiver, testSettings, plotProbe.ref),
|
||||
val snd = system.actorOf(
|
||||
senderProps(receiver, testSettings, plotProbe.ref),
|
||||
testName + "-snd" + n)
|
||||
val terminationProbe = TestProbe()
|
||||
terminationProbe.watch(snd)
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ package akka.remote.artery
|
|||
final case class PlotResult(values: Vector[(String, Number)] = Vector.empty) {
|
||||
|
||||
def add(key: String, value: Number): PlotResult =
|
||||
copy(values = values :+ (key -> value))
|
||||
copy(values = values :+ (key → value))
|
||||
|
||||
def addAll(p: PlotResult): PlotResult =
|
||||
copy(values ++ p.values)
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ object RemoteRestartedQuarantinedSpec extends MultiNodeConfig {
|
|||
class Subject extends Actor {
|
||||
def receive = {
|
||||
case "shutdown" ⇒ context.system.terminate()
|
||||
case "identify" ⇒ sender() ! (AddressUidExtension(context.system).addressUid -> self)
|
||||
case "identify" ⇒ sender() ! (AddressUidExtension(context.system).addressUid → self)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,8 @@ package akka.remote.artery
|
|||
import java.util.concurrent.TimeUnit.SECONDS
|
||||
import java.util.concurrent.Executors
|
||||
|
||||
class TestRateReporter(name: String) extends RateReporter(SECONDS.toNanos(1),
|
||||
class TestRateReporter(name: String) extends RateReporter(
|
||||
SECONDS.toNanos(1),
|
||||
new RateReporter.Reporter {
|
||||
override def onReport(messagesPerSec: Double, bytesPerSec: Double, totalMessages: Long, totalBytes: Long): Unit = {
|
||||
println(name +
|
||||
|
|
|
|||
|
|
@ -239,7 +239,8 @@ private[remote] class ArteryTransport(_system: ExtendedActorSystem, _provider: R
|
|||
private val systemMessageResendInterval: FiniteDuration = 1.second
|
||||
private val handshakeRetryInterval: FiniteDuration = 1.second
|
||||
private val handshakeTimeout: FiniteDuration =
|
||||
system.settings.config.getMillisDuration("akka.remote.handshake-timeout").requiring(_ > Duration.Zero,
|
||||
system.settings.config.getMillisDuration("akka.remote.handshake-timeout").requiring(
|
||||
_ > Duration.Zero,
|
||||
"handshake-timeout must be > 0")
|
||||
private val injectHandshakeInterval: FiniteDuration = 1.second
|
||||
private val giveUpSendAfter: FiniteDuration = 60.seconds
|
||||
|
|
|
|||
|
|
@ -103,7 +103,8 @@ private[akka] class Association(
|
|||
Unsafe.instance.getObjectVolatile(this, AbstractAssociation.sharedStateOffset).asInstanceOf[AssociationState]
|
||||
|
||||
def completeHandshake(peer: UniqueAddress): Unit = {
|
||||
require(remoteAddress == peer.address,
|
||||
require(
|
||||
remoteAddress == peer.address,
|
||||
s"wrong remote address in completeHandshake, got ${peer.address}, expected ${remoteAddress}")
|
||||
val current = associationState
|
||||
current.uniqueRemoteAddressPromise.trySuccess(peer)
|
||||
|
|
@ -114,7 +115,8 @@ private[akka] class Association(
|
|||
if (swapState(current, newState)) {
|
||||
current.uniqueRemoteAddressValue() match {
|
||||
case Some(Success(old)) ⇒
|
||||
log.debug("Incarnation {} of association to [{}] with new UID [{}] (old UID [{}])",
|
||||
log.debug(
|
||||
"Incarnation {} of association to [{}] with new UID [{}] (old UID [{}])",
|
||||
newState.incarnation, peer.address, peer.uid, old.uid)
|
||||
case _ ⇒
|
||||
// Failed, nothing to do
|
||||
|
|
@ -190,7 +192,8 @@ private[akka] class Association(
|
|||
val newState = current.newQuarantined()
|
||||
if (swapState(current, newState)) {
|
||||
// quarantine state change was performed
|
||||
log.warning("Association to [{}] with UID [{}] is irrecoverably failed. Quarantining address. {}",
|
||||
log.warning(
|
||||
"Association to [{}] with UID [{}] is irrecoverably failed. Quarantining address. {}",
|
||||
remoteAddress, u, reason)
|
||||
// end delivery of system messages to that incarnation after this point
|
||||
send(ClearSystemMessageDelivery, None, dummyRecipient)
|
||||
|
|
@ -200,10 +203,12 @@ private[akka] class Association(
|
|||
quarantine(reason, uid) // recursive
|
||||
}
|
||||
case Some(Success(peer)) ⇒
|
||||
log.debug("Quarantine of [{}] ignored due to non-matching UID, quarantine requested for [{}] but current is [{}]. {}",
|
||||
log.debug(
|
||||
"Quarantine of [{}] ignored due to non-matching UID, quarantine requested for [{}] but current is [{}]. {}",
|
||||
remoteAddress, u, peer.uid, reason)
|
||||
case None ⇒
|
||||
log.debug("Quarantine of [{}] ignored because handshake not completed, quarantine request was for old incarnation. {}",
|
||||
log.debug(
|
||||
"Quarantine of [{}] ignored because handshake not completed, quarantine request was for old incarnation. {}",
|
||||
remoteAddress, reason)
|
||||
}
|
||||
case None ⇒
|
||||
|
|
|
|||
|
|
@ -172,7 +172,8 @@ class Decoder(
|
|||
push(out, decoded)
|
||||
} catch {
|
||||
case NonFatal(e) ⇒
|
||||
log.warning("Failed to deserialize message with serializer id [{}] and manifest [{}]. {}",
|
||||
log.warning(
|
||||
"Failed to deserialize message with serializer id [{}] and manifest [{}]. {}",
|
||||
headerBuilder.serializer, headerBuilder.manifest, e.getMessage)
|
||||
pull(in)
|
||||
} finally {
|
||||
|
|
|
|||
|
|
@ -198,12 +198,14 @@ private[akka] class InboundHandshake(inboundContext: InboundContext, inControlSt
|
|||
push(out, env)
|
||||
else {
|
||||
// FIXME remove, only debug
|
||||
log.warning(s"Dropping message [{}] from unknown system with UID [{}]. " +
|
||||
log.warning(
|
||||
s"Dropping message [{}] from unknown system with UID [{}]. " +
|
||||
"This system with UID [{}] was probably restarted. " +
|
||||
"Messages will be accepted when new handshake has been completed.",
|
||||
env.message.getClass.getName, inboundContext.localAddress.uid, env.originUid)
|
||||
if (log.isDebugEnabled)
|
||||
log.debug(s"Dropping message [{}] from unknown system with UID [{}]. " +
|
||||
log.debug(
|
||||
s"Dropping message [{}] from unknown system with UID [{}]. " +
|
||||
"This system with UID [{}] was probably restarted. " +
|
||||
"Messages will be accepted when new handshake has been completed.",
|
||||
env.message.getClass.getName, inboundContext.localAddress.uid, env.originUid)
|
||||
|
|
|
|||
|
|
@ -33,7 +33,8 @@ private[akka] class InboundQuarantineCheck(inboundContext: InboundContext) exten
|
|||
push(out, env)
|
||||
case association ⇒
|
||||
if (association.associationState.isQuarantined(env.originUid)) {
|
||||
inboundContext.sendControl(association.remoteAddress,
|
||||
inboundContext.sendControl(
|
||||
association.remoteAddress,
|
||||
Quarantined(inboundContext.localAddress, UniqueAddress(association.remoteAddress, env.originUid)))
|
||||
pull(in)
|
||||
} else
|
||||
|
|
|
|||
|
|
@ -27,7 +27,8 @@ private[akka] class MessageDispatcher(
|
|||
private val remoteDaemon = provider.remoteDaemon
|
||||
private val log = Logging(system, getClass.getName)
|
||||
|
||||
def dispatch(recipient: InternalActorRef,
|
||||
def dispatch(
|
||||
recipient: InternalActorRef,
|
||||
recipientAddress: Address,
|
||||
message: AnyRef,
|
||||
senderOption: Option[ActorRef]): Unit = {
|
||||
|
|
@ -54,7 +55,8 @@ private[akka] class MessageDispatcher(
|
|||
case sel: ActorSelectionMessage ⇒
|
||||
if (UntrustedMode && (!TrustedSelectionPaths.contains(sel.elements.mkString("/", "/", "")) ||
|
||||
sel.msg.isInstanceOf[PossiblyHarmful] || l != provider.rootGuardian))
|
||||
log.debug("operating in UntrustedMode, dropping inbound actor selection to [{}], " +
|
||||
log.debug(
|
||||
"operating in UntrustedMode, dropping inbound actor selection to [{}], " +
|
||||
"allow it by adding the path to 'akka.remote.trusted-selection-paths' configuration",
|
||||
sel.elements.mkString("/", "/", ""))
|
||||
else
|
||||
|
|
@ -72,10 +74,12 @@ private[akka] class MessageDispatcher(
|
|||
// if it was originally addressed to us but is in fact remote from our point of view (i.e. remote-deployed)
|
||||
r.!(message)(sender)
|
||||
else
|
||||
log.error("dropping message [{}] for non-local recipient [{}] arriving at [{}] inbound addresses are [{}]",
|
||||
log.error(
|
||||
"dropping message [{}] for non-local recipient [{}] arriving at [{}] inbound addresses are [{}]",
|
||||
message.getClass, r, recipientAddress, provider.transport.addresses.mkString(", "))
|
||||
|
||||
case r ⇒ log.error("dropping message [{}] for unknown recipient [{}] arriving at [{}] inbound addresses are [{}]",
|
||||
case r ⇒ log.error(
|
||||
"dropping message [{}] for unknown recipient [{}] arriving at [{}] inbound addresses are [{}]",
|
||||
message.getClass, r, recipientAddress, provider.transport.addresses.mkString(", "))
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,19 +9,19 @@ class EnvelopeBufferSpec extends AkkaSpec {
|
|||
|
||||
object TestCompressor extends LiteralCompressionTable {
|
||||
val refToIdx = Map(
|
||||
"compressable0" -> 0,
|
||||
"compressable1" -> 1,
|
||||
"reallylongcompressablestring" -> 2)
|
||||
"compressable0" → 0,
|
||||
"compressable1" → 1,
|
||||
"reallylongcompressablestring" → 2)
|
||||
val idxToRef = refToIdx.map(_.swap)
|
||||
|
||||
val serializerToIdx = Map(
|
||||
"serializer0" -> 0,
|
||||
"serializer1" -> 1)
|
||||
"serializer0" → 0,
|
||||
"serializer1" → 1)
|
||||
val idxToSer = serializerToIdx.map(_.swap)
|
||||
|
||||
val manifestToIdx = Map(
|
||||
"manifest0" -> 0,
|
||||
"manifest1" -> 1)
|
||||
"manifest0" → 0,
|
||||
"manifest1" → 1)
|
||||
val idxToManifest = manifestToIdx.map(_.swap)
|
||||
|
||||
override def compressActorRef(ref: String): Int = refToIdx.getOrElse(ref, -1)
|
||||
|
|
|
|||
|
|
@ -747,9 +747,6 @@ object MiMa extends AutoPlugin {
|
|||
ProblemFilters.exclude[ReversedMissingMethodProblem]("akka.http.scaladsl.DefaultSSLContextCreation.validateAndWarnAboutLooseSettings")
|
||||
),
|
||||
"2.4.4" -> Seq(
|
||||
// Remove useUntrustedMode which is an internal API and not used anywhere anymore
|
||||
ProblemFilters.exclude[DirectMissingMethodProblem]("akka.remote.Remoting.useUntrustedMode"),
|
||||
ProblemFilters.exclude[DirectMissingMethodProblem]("akka.remote.RemoteTransport.useUntrustedMode"),
|
||||
|
||||
// #20080, #20081 remove race condition on HTTP client
|
||||
ProblemFilters.exclude[DirectMissingMethodProblem]("akka.http.scaladsl.Http#HostConnectionPool.gatewayFuture"),
|
||||
|
|
@ -851,6 +848,11 @@ object MiMa extends AutoPlugin {
|
|||
ProblemFilters.exclude[IncompatibleResultTypeProblem]("akka.cluster.client.ClusterClient.initialContactsSel")
|
||||
),
|
||||
"2.4.6" -> Seq(
|
||||
|
||||
// Remove useUntrustedMode which is an internal API and not used anywhere anymore
|
||||
ProblemFilters.exclude[DirectMissingMethodProblem]("akka.remote.Remoting.useUntrustedMode"),
|
||||
ProblemFilters.exclude[DirectMissingMethodProblem]("akka.remote.RemoteTransport.useUntrustedMode"),
|
||||
|
||||
// internal api
|
||||
FilterAnyProblemStartingWith("akka.stream.impl"),
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue