Scala 2.13.0 rc2 (#26967)

This commit is contained in:
Arnout Engelen 2019-05-24 08:11:50 +02:00 committed by Patrik Nordwall
parent 24a3bba135
commit 814cfa286c
168 changed files with 396 additions and 315 deletions

View file

@ -4,7 +4,7 @@ sudo: false
scala:
- "2.12.8"
- "2.13.0-M5"
- "2.13.0-RC2"
before_install:
# using jabba for custom jdk management

View file

@ -11,7 +11,7 @@ import akka.annotation.InternalApi
import akka.event.Logging.LogLevel
import akka.util.OptionVal
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import scala.compat.java8.OptionConverters._
/**

View file

@ -13,7 +13,7 @@ import akka.actor.testkit.typed.{ CapturedLogEvent, Effect }
import akka.actor.testkit.typed.Effect._
import scala.annotation.tailrec
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import scala.collection.immutable
import scala.reflect.ClassTag
import scala.util.control.Exception.Catcher

View file

@ -12,7 +12,7 @@ import java.util.function.Supplier
import java.util.{ List => JList }
import scala.annotation.tailrec
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import scala.collection.immutable
import scala.concurrent.Await
import scala.concurrent.duration._

View file

@ -10,7 +10,7 @@ import akka.actor.testkit.typed.internal.TestInboxImpl
import java.util.concurrent.ThreadLocalRandom
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import scala.collection.immutable
object TestInbox {

View file

@ -14,14 +14,11 @@ public class ThrowablesTest {
Assert.assertTrue(Throwables.isNonFatal(new IllegalArgumentException("isNonFatal")));
}
private static class ControlThrowableImpl extends Throwable implements ControlThrowable {}
@Test
public void testIsFatal() {
Assert.assertTrue(Throwables.isFatal(new StackOverflowError("fatal")));
Assert.assertTrue(Throwables.isFatal(new ThreadDeath()));
Assert.assertTrue(Throwables.isFatal(new InterruptedException("fatal")));
Assert.assertTrue(Throwables.isFatal(new LinkageError("fatal")));
Assert.assertTrue(Throwables.isFatal(new ControlThrowableImpl()));
}
}

View file

@ -95,7 +95,7 @@ class ActorSelectionSpec extends AkkaSpec with DefaultTimeout {
identify(system.child("c2").child("c21")) should ===(Some(c21)) // test Java API
identify(system / Seq("c2", "c21")) should ===(Some(c21))
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
identify(system.descendant(Seq("c2", "c21").asJava)) // test Java API
}
@ -244,7 +244,7 @@ class ActorSelectionSpec extends AkkaSpec with DefaultTimeout {
}
"return deadLetters or ActorIdentity(None), respectively, for non-existing paths" in {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
def checkOne(looker: ActorRef, query: Query, result: Option[ActorRef]): Unit = {
val lookup = askNode(looker, query)

View file

@ -15,7 +15,7 @@ import com.github.ghik.silencer.silent
import scala.concurrent.duration._
import org.scalatest.BeforeAndAfterEach
import org.scalatestplus.junit.JUnitSuiteLike
import org.scalatest.junit.JUnitSuiteLike
object ActorWithStashSpec {
@ -102,6 +102,7 @@ object ActorWithStashSpec {
}
@silent
class JavaActorWithStashSpec extends StashJavaAPI with JUnitSuiteLike
@silent

View file

@ -15,7 +15,8 @@ import akka.testkit.{ AkkaSpec, EventFilter, TestKit }
import com.typesafe.config.{ Config, ConfigFactory }
import akka.actor.CoordinatedShutdown.Phase
import akka.actor.CoordinatedShutdown.UnknownReason
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import scala.concurrent.Promise
import java.util.concurrent.TimeoutException

View file

@ -10,10 +10,12 @@ import akka.testkit.EventFilter
import akka.testkit.TestKit._
import com.typesafe.config.ConfigFactory
import org.scalatest.{ Matchers, WordSpec }
import org.scalatestplus.junit.JUnitSuiteLike
import org.scalatest.junit.JUnitSuiteLike
import scala.util.control.NoStackTrace
import com.github.ghik.silencer.silent
@silent
class JavaExtensionSpec extends JavaExtension with JUnitSuiteLike
object TestExtension extends ExtensionId[TestExtension] with ExtensionIdProvider {

View file

@ -251,7 +251,7 @@ class FSMActorSpec extends AkkaSpec(Map("akka.actor.debug.fsm" -> true)) with Im
}
"log events and transitions if asked to do so" in {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
val config = ConfigFactory
.parseMap(Map(
"akka.loglevel" -> "DEBUG",

View file

@ -27,7 +27,7 @@ object FSMTransitionSpec {
}
class MyFSM(target: ActorRef) extends Actor with FSM[Int, Unit] {
startWith(0, Unit)
startWith(0, ())
when(0) {
case Event("tick", _) => goto(1)
}

View file

@ -4,6 +4,8 @@
package akka.actor
import org.scalatestplus.junit.JUnitSuiteLike
import org.scalatest.junit.JUnitSuiteLike
import com.github.ghik.silencer.silent
@silent
class JavaAPISpec extends JavaAPI with JUnitSuiteLike

View file

@ -409,7 +409,7 @@ abstract class ActorModelSpec(config: String) extends AkkaSpec(config) with Defa
System.err.println(
"Teammates left: " + team.size + " stopLatch: " + stopLatch.getCount + " inhab:" + dispatcher.inhabitants)
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
team.asScala.toList.sortBy(_.self.path).foreach { cell: ActorCell =>
System.err.println(
" - " + cell.self.path + " " + cell.isTerminated + " " + cell.mailbox.currentStatus + " "
@ -452,7 +452,7 @@ abstract class ActorModelSpec(config: String) extends AkkaSpec(config) with Defa
val f6 = a ? Reply("bar2")
val c = system.scheduler.scheduleOnce(2.seconds) {
import collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
Thread.getAllStackTraces().asScala.foreach {
case (thread, stack) =>
println(s"$thread:")

View file

@ -4,7 +4,6 @@
package akka.actor.dispatch
import scala.collection.JavaConverters.mapAsJavaMapConverter
import scala.reflect.ClassTag
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
@ -122,6 +121,8 @@ class DispatchersSpec extends AkkaSpec(DispatchersSpec.config) with ImplicitSend
val defaultDispatcherConfig = settings.config.getConfig("akka.actor.default-dispatcher")
lazy val allDispatchers: Map[String, MessageDispatcher] = {
import akka.util.ccompat.JavaConverters._
validTypes
.map(t => (t, from(ConfigFactory.parseMap(Map(tipe -> t, id -> t).asJava).withFallback(defaultDispatcherConfig))))
.toMap
@ -160,6 +161,7 @@ class DispatchersSpec extends AkkaSpec(DispatchersSpec.config) with ImplicitSend
}
"throw ConfigurationException if type does not exist" in {
import akka.util.ccompat.JavaConverters._
intercept[ConfigurationException] {
from(
ConfigFactory

View file

@ -13,6 +13,9 @@ import akka.util.unused
import scala.concurrent.duration._
object PriorityDispatcherSpec {
case object Result
val config = """
unbounded-prio-dispatcher {
mailbox-type = "akka.dispatch.PriorityDispatcherSpec$Unbounded"
@ -24,19 +27,20 @@ object PriorityDispatcherSpec {
class Unbounded(@unused settings: ActorSystem.Settings, @unused config: Config)
extends UnboundedPriorityMailbox(PriorityGenerator({
case i: Int => i //Reverse order
case "Result" => Int.MaxValue
case i: Int => i //Reverse order
case Result => Int.MaxValue
}: Any => Int))
class Bounded(@unused settings: ActorSystem.Settings, @unused config: Config)
extends BoundedPriorityMailbox(PriorityGenerator({
case i: Int => i //Reverse order
case "Result" => Int.MaxValue
case i: Int => i //Reverse order
case Result => Int.MaxValue
}: Any => Int), 1000, 10 seconds)
}
class PriorityDispatcherSpec extends AkkaSpec(PriorityDispatcherSpec.config) with DefaultTimeout {
import PriorityDispatcherSpec._
"A PriorityDispatcher" must {
"Order it's messages according to the specified comparator using an unbounded mailbox" in {
@ -66,11 +70,11 @@ class PriorityDispatcherSpec extends AkkaSpec(PriorityDispatcherSpec.config) wit
self ! m
}
self.tell("Result", testActor)
self.tell(Result, testActor)
def receive = {
case i: Int => acc += i
case "Result" => sender() ! acc.toList
case i: Int => acc += i
case Result => sender() ! acc.toList
}
}).withDispatcher(dispatcherKey))

View file

@ -13,6 +13,8 @@ import akka.util.unused
import scala.concurrent.duration._
object StablePriorityDispatcherSpec {
case object Result
val config = """
unbounded-stable-prio-dispatcher {
mailbox-type = "akka.dispatch.StablePriorityDispatcherSpec$Unbounded"
@ -26,19 +28,20 @@ object StablePriorityDispatcherSpec {
extends UnboundedStablePriorityMailbox(PriorityGenerator({
case i: Int if i <= 100 => i // Small integers have high priority
case _: Int => 101 // Don't care for other integers
case "Result" => Int.MaxValue
case Result => Int.MaxValue
}: Any => Int))
class Bounded(@unused settings: ActorSystem.Settings, @unused config: Config)
extends BoundedStablePriorityMailbox(PriorityGenerator({
case i: Int if i <= 100 => i // Small integers have high priority
case _: Int => 101 // Don't care for other integers
case "Result" => Int.MaxValue
case Result => Int.MaxValue
}: Any => Int), 1000, 10 seconds)
}
class StablePriorityDispatcherSpec extends AkkaSpec(StablePriorityDispatcherSpec.config) with DefaultTimeout {
import StablePriorityDispatcherSpec._
"A StablePriorityDispatcher" must {
"Order its messages according to the specified comparator while preserving FIFO for equal priority messages, " +
@ -70,11 +73,11 @@ class StablePriorityDispatcherSpec extends AkkaSpec(StablePriorityDispatcherSpec
self ! m
}
self.tell("Result", testActor)
self.tell(Result, testActor)
def receive = {
case i: Int => acc += i
case "Result" => sender() ! acc.toList
case i: Int => acc += i
case Result => sender() ! acc.toList
}
}).withDispatcher(dispatcherKey))

View file

@ -11,7 +11,7 @@ import scala.concurrent.duration._
import akka.testkit._
import org.scalatest.WordSpec
import com.typesafe.config.ConfigFactory
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import akka.actor._
import scala.annotation.tailrec

View file

@ -40,11 +40,17 @@ trait TcpIntegrationSpecSupport { _: AkkaSpec =>
def establishNewClientConnection(): (TestProbe, ActorRef, TestProbe, ActorRef) = {
val connectCommander = TestProbe()(clientSystem)
connectCommander.send(IO(Tcp)(clientSystem), Connect(endpoint, options = connectOptions))
val Connected(`endpoint`, localAddress) = connectCommander.expectMsgType[Connected]
val localAddress = connectCommander.expectMsgType[Connected] match {
case Connected(`endpoint`, localAddress) => localAddress
case Connected(other, _) => fail(s"No match: $other")
}
val clientHandler = TestProbe()(clientSystem)
connectCommander.sender() ! Register(clientHandler.ref)
val Connected(`localAddress`, `endpoint`) = bindHandler.expectMsgType[Connected]
bindHandler.expectMsgType[Connected] match {
case Connected(`localAddress`, `endpoint`) => //ok
case other => fail(s"No match: ${other}")
}
val serverHandler = TestProbe()
bindHandler.sender() ! Register(serverHandler.ref)

View file

@ -4,7 +4,7 @@
package akka.io.dns
import collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import akka.testkit.AkkaSpec
import com.spotify.docker.client.DefaultDockerClient
import com.spotify.docker.client.DockerClient.{ ListContainersParam, LogsParam }

View file

@ -9,7 +9,7 @@ import org.scalatest.{ Matchers, WordSpec }
class ResolvConfParserSpec extends WordSpec with Matchers {
private def parse(str: String): ResolvConf = {
ResolvConfParser.parseLines(str.lines)
ResolvConfParser.parseLines(str.linesIterator)
}
"The ResolvConfParser" should {

View file

@ -4,6 +4,8 @@
package akka.japi
import org.scalatestplus.junit.JUnitSuiteLike
import org.scalatest.junit.JUnitSuiteLike
import com.github.ghik.silencer.silent
@silent
class JavaAPITest extends JavaAPITestBase with JUnitSuiteLike

View file

@ -167,7 +167,7 @@ class BackoffOnRestartSupervisorSpec extends AkkaSpec with ImplicitSender {
supervisor ! BackoffSupervisor.GetCurrentChild
// new instance
val Some(child) = expectMsgType[BackoffSupervisor.CurrentChild].ref
val child = expectMsgType[BackoffSupervisor.CurrentChild].ref.get
child ! "PING"
expectMsg("PONG")

View file

@ -17,7 +17,7 @@ import org.scalatest.time.Span
import org.scalatest.time.SpanSugar._
import org.scalatest.{ Matchers, WordSpec }
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import scala.collection.mutable
import scala.concurrent.{ Await, ExecutionContext, ExecutionContextExecutor, Future }
import scala.util.control.Exception

View file

@ -742,7 +742,7 @@ class ByteStringSpec extends WordSpec with Matchers with Checkers {
a.asByteBuffers.forall(_.isReadOnly)
}
check { (a: ByteString) =>
import scala.collection.JavaConverters.iterableAsScalaIterableConverter
import akka.util.ccompat.JavaConverters._
a.asByteBuffers.zip(a.getByteBuffers().asScala).forall(x => x._1 == x._2)
}
}

View file

@ -4,6 +4,8 @@
package akka.util
import org.scalatestplus.junit.JUnitSuiteLike
import org.scalatest.junit.JUnitSuiteLike
import com.github.ghik.silencer.silent
@silent
class JavaDurationSpec extends JavaDuration with JUnitSuiteLike

View file

@ -5,6 +5,7 @@
package akka.actor.typed.eventstream
import scala.concurrent.duration._
import scala.language.postfixOps
import akka.actor.testkit.typed.scaladsl.{ ScalaTestWithActorTestKit, TestProbe }
import org.scalatest.WordSpecLike
@ -13,7 +14,7 @@ class EventStreamSpec extends ScalaTestWithActorTestKit with WordSpecLike {
import EventStreamSpec._
private final val ShortWait = 100 milli
private final val ShortWait = 100.millis
"system event stream".can {
val eventObjListener: TestProbe[EventObj.type] = testKit.createTestProbe()

View file

@ -10,7 +10,7 @@ import akka.annotation.InternalApi
import akka.actor.typed.{ ActorSystem, Extension, ExtensionId, Extensions }
import scala.annotation.tailrec
import scala.util.{ Failure, Success, Try }
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import akka.actor.typed.ExtensionSetup

View file

@ -10,7 +10,7 @@ import akka.event.Logging._
import akka.event.{ LoggingBus, LoggingFilterWithMarker, LogMarker => UntypedLM }
import akka.util.OptionVal
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
/**
* INTERNAL API

View file

@ -9,7 +9,7 @@ import akka.actor.typed.receptionist.Receptionist.Command
import akka.actor.typed.receptionist.{ Receptionist, ServiceKey }
import akka.annotation.InternalApi
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
/**
* Internal API

View file

@ -14,7 +14,7 @@ import akka.japi.function.{ Effect, Function2 => JapiFunction2 }
import akka.japi.pf.PFBuilder
import akka.util.unused
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import scala.reflect.ClassTag
/**

View file

@ -8,7 +8,7 @@ import akka.actor.typed.{ ActorRef, ActorSystem, Dispatchers, Extension, Extensi
import akka.actor.typed.internal.receptionist._
import akka.annotation.DoNotInherit
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import scala.reflect.ClassTag
import akka.annotation.InternalApi

View file

@ -0,0 +1,9 @@
# These were added for bincompat with 2.4 but no longer part of the public API,
# so can be safely excluded now. Required to avoid naming clashes with extension
# methods generated on Scala 2.13 (https://github.com/scala/scala/commit/f9879b63a87b1e3062c58abd9ce30e68d9fee8ca)
# These should go into the .backwards.excludes for the last 2.5 version in
# which Scala 2.13 support is merged
ProblemFilters.exclude[DirectMissingMethodProblem]("akka.pattern.AskableActorRef.ask$extension")
ProblemFilters.exclude[DirectMissingMethodProblem]("akka.pattern.AskableActorRef.?$extension")
ProblemFilters.exclude[DirectMissingMethodProblem]("akka.pattern.AskableActorSelection.ask$extension")
ProblemFilters.exclude[DirectMissingMethodProblem]("akka.pattern.AskableActorSelection.?$extension")

View file

@ -7,13 +7,15 @@ package akka.util
import java.io.{ ObjectInputStream, ObjectOutputStream }
import java.nio.{ ByteBuffer, ByteOrder }
import java.lang.{ Iterable => JIterable }
import java.nio.charset.{ Charset, StandardCharsets }
import scala.annotation.{ tailrec, varargs }
import scala.collection.mutable.{ Builder, WrappedArray }
import scala.collection.{ immutable, mutable }
import scala.collection.immutable.{ IndexedSeq, IndexedSeqOps, StrictOptimizedSeqOps, VectorBuilder }
import scala.reflect.ClassTag
import java.nio.charset.{ Charset, StandardCharsets }
import com.github.ghik.silencer.silent
object ByteString {
@ -384,7 +386,7 @@ object ByteString {
}
}
override protected def writeReplace(): AnyRef = new SerializationProxy(this)
protected def writeReplace(): AnyRef = new SerializationProxy(this)
}
private[akka] object ByteStrings extends Companion {
@ -626,7 +628,7 @@ object ByteString {
}
}
override protected def writeReplace(): AnyRef = new SerializationProxy(this)
protected def writeReplace(): AnyRef = new SerializationProxy(this)
}
@SerialVersionUID(1L)
@ -675,10 +677,7 @@ sealed abstract class ByteString
override protected def fromSpecific(coll: IterableOnce[Byte]): ByteString = ByteString(coll)
override protected def newSpecificBuilder: mutable.Builder[Byte, ByteString] = ByteString.newBuilder
// FIXME this is a workaround for
// https://github.com/scala/bug/issues/11192#issuecomment-436926231
protected[this] override def writeReplace(): AnyRef = this
override val empty: ByteString = ByteString.empty
def apply(idx: Int): Byte
private[akka] def byteStringCompanion: ByteString.Companion
@ -814,6 +813,7 @@ sealed abstract class ByteString
* Java API: Returns an Iterable of read-only ByteBuffers that directly wraps this ByteStrings
* all fragments. Will always have at least one entry.
*/
@silent
def getByteBuffers(): JIterable[ByteBuffer] = {
import scala.collection.JavaConverters.asJavaIterableConverter
asByteBuffers.asJava

View file

@ -14,4 +14,10 @@ package akka.util
package object ccompat {
private[akka] type Factory[-A, +C] = scala.collection.Factory[A, C]
private[akka] val Factory = scala.collection.Factory
// When we drop support for 2.12 we can delete this concept
// and import scala.jdk.CollectionConverters.Ops._ instead
object JavaConverters
extends scala.collection.convert.AsJavaExtensions
with scala.collection.convert.AsScalaExtensions
}

View file

@ -82,4 +82,6 @@ package object ccompat {
implicit class ImmutableSortedSetOps[A](val real: i.SortedSet[A]) extends AnyVal {
def unsorted: i.Set[A] = real
}
object JavaConverters extends scala.collection.convert.DecorateAsJava with scala.collection.convert.DecorateAsScala
}

View file

@ -294,7 +294,7 @@ object CoordinatedShutdown extends ExtensionId[CoordinatedShutdown] with Extensi
* INTERNAL API
*/
private[akka] def phasesFromConfig(conf: Config): Map[String, Phase] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
val defaultPhaseTimeout = conf.getString("default-phase-timeout")
val phasesConf = conf.getConfig("phases")
val defaultPhaseConfig = ConfigFactory.parseString(s"""
@ -588,7 +588,7 @@ final class CoordinatedShutdown private[akka] (
* Sum of timeouts of all phases that have some task.
*/
def totalTimeout(): FiniteDuration = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
tasks.keySet.asScala.foldLeft(Duration.Zero) {
case (acc, phase) => acc + timeout(phase)
}

View file

@ -135,7 +135,7 @@ case object NoScopeGiven extends NoScopeGiven {
*/
private[akka] class Deployer(val settings: ActorSystem.Settings, val dynamicAccess: DynamicAccess) {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
private val resizerEnabled: Config = ConfigFactory.parseString("resizer.enabled=on")
private val deployments = new AtomicReference(WildcardIndex[Deploy]())

View file

@ -75,6 +75,7 @@ trait UnrestrictedStash extends Actor with StashSupport {
* Overridden callback. Prepends all messages in the stash to the mailbox,
* clears the stash, stops all children and invokes the postStop() callback.
*/
@throws(classOf[Exception])
override def preRestart(reason: Throwable, message: Option[Any]): Unit = {
try unstashAll()
finally super.preRestart(reason, message)
@ -85,6 +86,7 @@ trait UnrestrictedStash extends Actor with StashSupport {
* Must be called when overriding this method, otherwise stashed messages won't be propagated to DeadLetters
* when actor stops.
*/
@throws(classOf[Exception])
override def postStop(): Unit =
try unstashAll()
finally super.postStop()

View file

@ -30,6 +30,7 @@ private[akka] trait Children { this: ActorCell =>
Unsafe.instance.getObjectVolatile(this, AbstractActorCell.childrenOffset).asInstanceOf[ChildrenContainer]
final def children: immutable.Iterable[ActorRef] = childrenRefs.children
@silent
final def getChildren(): java.lang.Iterable[ActorRef] =
scala.collection.JavaConverters.asJavaIterableConverter(children).asJava

View file

@ -194,7 +194,7 @@ class Dispatchers @InternalApi private[akka] (
* INTERNAL API
*/
private[akka] def config(id: String, appConfig: Config): Config = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
def simpleName = id.substring(id.lastIndexOf('.') + 1)
idConfig(id)
.withFallback(appConfig)
@ -203,7 +203,7 @@ class Dispatchers @InternalApi private[akka] (
}
private def idConfig(id: String): Config = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
ConfigFactory.parseMap(Map("id" -> id).asJava)
}

View file

@ -93,7 +93,7 @@ object ExecutionContexts {
* Futures is the Java API for Futures and Promises
*/
object Futures {
import scala.collection.JavaConverters.iterableAsScalaIterableConverter
import akka.util.ccompat.JavaConverters._
/**
* Starts an asynchronous computation and returns a `Future` object with the result of that computation.

View file

@ -55,7 +55,7 @@ private[akka] class Mailboxes(
private val mailboxTypeConfigurators = new ConcurrentHashMap[String, MailboxType]
private val mailboxBindings: Map[Class[_ <: Any], String] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
settings.config
.getConfig("akka.actor.mailbox.requirements")
.root
@ -255,7 +255,7 @@ private[akka] class Mailboxes(
//INTERNAL API
private def config(id: String): Config = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
ConfigFactory
.parseMap(Map("id" -> id).asJava)
.withFallback(settings.config.getConfig(id))

View file

@ -745,7 +745,7 @@ object Logging {
* Java API: Retrieve the contents of the MDC.
*/
def getMDC: java.util.Map[String, Any] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
mdc.asJava
}
}
@ -1564,7 +1564,7 @@ trait DiagnosticLoggingAdapter extends LoggingAdapter {
import Logging._
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
private var _mdc = emptyMDC

View file

@ -12,7 +12,7 @@ import com.typesafe.config.Config
import scala.concurrent.duration._
import scala.collection.immutable
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import akka.util.{ ByteString, Helpers }
import akka.util.Helpers.Requiring
import akka.util.JavaDurationConverters._

View file

@ -387,7 +387,7 @@ private[io] abstract class TcpConnection(val tcp: TcpExt, val channel: SocketCha
val interestedInClose: Set[ActorRef] =
(if (writePending) Set(pendingWrite.commander) else Set.empty) ++
closedMessage.toSet[CloseInformation].flatMap(_.notificationsTo)
closedMessage.toList.flatMap(_.notificationsTo).toSet
if (channel.isOpen) // if channel is still open here, we didn't go through stopWith => unexpected actor termination
prepareAbort()

View file

@ -245,7 +245,7 @@ class UdpExt(system: ExtendedActorSystem) extends IO.Extension {
object UdpMessage {
import Udp._
import java.lang.{ Iterable => JIterable }
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
/**
* Each [[Udp.Send]] can optionally request a positive acknowledgment to be sent

View file

@ -256,7 +256,7 @@ object UdpConnectedMessage {
def resumeReading: Command = ResumeReading
implicit private def fromJava[T](coll: JIterable[T]): immutable.Iterable[T] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
coll.asScala.to(immutable.Iterable)
}
}

View file

@ -9,7 +9,7 @@ import java.util
import akka.actor.NoSerializationVerificationNeeded
import scala.collection.{ immutable => im }
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
/**
* Supersedes [[akka.io.Dns]] protocol.

View file

@ -15,7 +15,7 @@ import akka.util.Helpers
import akka.util.JavaDurationConverters._
import com.typesafe.config.{ Config, ConfigValueType }
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import scala.collection.immutable
import scala.concurrent.duration.FiniteDuration
import scala.util.{ Failure, Success, Try }

View file

@ -7,7 +7,7 @@ package akka.io.dns.internal
import java.io.File
import java.nio.file.Files
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import scala.util.Try
private[dns] case class ResolvConf(search: List[String], ndots: Int)

View file

@ -261,18 +261,6 @@ trait ExplicitAskSupport {
object AskableActorRef {
/**
* INTERNAL API: for binary compatibility
*/
private[pattern] def ask$extension(actorRef: ActorRef, message: Any, timeout: Timeout): Future[Any] =
actorRef.internalAsk(message, timeout, ActorRef.noSender)
/**
* INTERNAL API: for binary compatibility
*/
private[pattern] def $qmark$extension(actorRef: ActorRef, message: Any, timeout: Timeout): Future[Any] =
actorRef.internalAsk(message, timeout, ActorRef.noSender)
private def messagePartOfException(message: Any, sender: ActorRef): String = {
val msg = if (message == null) "unknown" else message
val wasSentBy = if (sender == ActorRef.noSender) "" else s" was sent by [$sender]"
@ -402,21 +390,6 @@ final class ExplicitlyAskableActorRef(val actorRef: ActorRef) extends AnyVal {
}
}
object AskableActorSelection {
/**
* INTERNAL API: for binary compatibility
*/
private[pattern] def ask$extension(actorSel: ActorSelection, message: Any, timeout: Timeout): Future[Any] =
actorSel.internalAsk(message, timeout, ActorRef.noSender)
/**
* INTERNAL API: for binary compatibility
*/
private[pattern] def $qmark$extension(actorSel: ActorSelection, message: Any, timeout: Timeout): Future[Any] =
actorSel.internalAsk(message, timeout, ActorRef.noSender)
}
/*
* Implementation class of the ask pattern enrichment of ActorSelection
*/

View file

@ -126,7 +126,7 @@ object ConsistentHash {
* Java API: Factory method to create a ConsistentHash
*/
def create[T](nodes: java.lang.Iterable[T], virtualNodesFactor: Int): ConsistentHash[T] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
apply(nodes.asScala, virtualNodesFactor)(ClassTag(classOf[Any].asInstanceOf[Class[T]]))
}

View file

@ -78,7 +78,7 @@ final case class SeveralRoutees(routees: immutable.IndexedSeq[Routee]) extends R
* Java API
*/
def getRoutees(): java.util.List[Routee] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
routees.asJava
}

View file

@ -405,7 +405,7 @@ final case class Routees(routees: immutable.IndexedSeq[Routee]) {
* Java API
*/
def getRoutees: java.util.List[Routee] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
routees.asJava
}
}

View file

@ -63,7 +63,7 @@ object Serialization {
}
private final def configToMap(cfg: Config): Map[String, String] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
cfg.root.unwrapped.asScala.toMap.map { case (k, v) => (k -> v.toString) }
}
}

View file

@ -8,7 +8,7 @@ import akka.actor.ExtendedActorSystem
import akka.actor.setup.Setup
import scala.collection.immutable
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
object SerializationSetup {

View file

@ -8,7 +8,11 @@ import annotation.tailrec
import java.util.concurrent.{ ConcurrentHashMap, ConcurrentSkipListSet }
import java.util.Comparator
import scala.collection.JavaConverters.{ asScalaIteratorConverter, collectionAsScalaIterableConverter }
import scala.collection.JavaConverters.collectionAsScalaIterableConverter
import akka.util.ccompat.JavaConverters._
import com.github.ghik.silencer.silent
/**
* An implementation of a ConcurrentMultiMap
@ -143,6 +147,7 @@ class Index[K, V](val mapSize: Int, val valueComparator: Comparator[V]) {
if (set ne null) {
set.synchronized {
container.remove(key, set)
@silent
val ret = collectionAsScalaIterableConverter(set.clone()).asScala // Make copy since we need to clear the original
set.clear() // Clear the original set to signal to any pending writers that there was a conflict
Some(ret)

View file

@ -4,6 +4,8 @@
package akka.util
import com.github.ghik.silencer.silent
import akka.annotation.InternalApi
/**
@ -12,6 +14,15 @@ import akka.annotation.InternalApi
* or other reason. Useful in combination with
* `-Ywarn-unused:explicits,implicits` compiler options.
*
* Extends 'deprecated' to make sure using a parameter marked @unused
* produces a warning, and not using a parameter marked @unused does not
* produce an 'unused parameter' warning.
*
* This approach is deprecated in Scala 2.13 and scheduled to be
* removed in 2.14. Perhaps we should promote introducing an `@unused`
* to Scala? https://contributors.scala-lang.org/t/more-error-reporting-annotations/1681/7
*
* INTERNAL API
*/
@silent
@InternalApi private[akka] class unused extends deprecated("unused", "")

View file

@ -10,7 +10,7 @@ import org.openjdk.jmh.runner.options.CommandLineOptions
object BenchRunner {
def main(args: Array[String]) = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
val args2 = args.toList.flatMap {
case "quick" => "-i 1 -wi 1 -f1 -t1".split(" ").toList

View file

@ -10,7 +10,7 @@ import org.openjdk.jmh.runner.options.CommandLineOptions
object BenchRunner {
def main(args: Array[String]) = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
val args2 = args.toList.flatMap {
case "quick" => "-i 1 -wi 1 -f1 -t1".split(" ").toList

View file

@ -16,6 +16,8 @@ import akka.cluster.MemberStatus
import java.util.concurrent.ThreadLocalRandom
import akka.actor.DeadLetterSuppression
import com.github.ghik.silencer.silent
/**
* Runtime collection management commands.
*/
@ -92,6 +94,7 @@ trait ClusterMetricsEvent
final case class ClusterMetricsChanged(nodeMetrics: Set[NodeMetrics]) extends ClusterMetricsEvent {
/** Java API */
@silent
def getNodeMetrics: java.lang.Iterable[NodeMetrics] =
scala.collection.JavaConverters.asJavaIterableConverter(nodeMetrics).asJava
}

View file

@ -9,6 +9,8 @@ import scala.util.Success
import scala.util.Failure
import scala.util.Try
import com.github.ghik.silencer.silent
/**
* Metrics key/value.
*
@ -323,6 +325,7 @@ final case class NodeMetrics(address: Address, timestamp: Long, metrics: Set[Met
/**
* Java API
*/
@silent
def getMetrics: java.lang.Iterable[Metric] =
scala.collection.JavaConverters.asJavaIterableConverter(metrics).asJava

View file

@ -18,7 +18,7 @@ import akka.util.ccompat._
import scala.annotation.tailrec
import scala.collection.immutable
import scala.collection.JavaConverters.{ asJavaIterableConverter, asScalaBufferConverter, setAsJavaSetConverter }
import akka.util.ccompat.JavaConverters._
import java.io.NotSerializableException
import akka.dispatch.Dispatchers

View file

@ -8,7 +8,7 @@ import java.net.URLEncoder
import java.util.Optional
import java.util.concurrent.ConcurrentHashMap
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import scala.collection.immutable
import scala.concurrent.Await
import scala.util.control.NonFatal

View file

@ -109,14 +109,14 @@ object ShardCoordinator {
shardId: ShardId,
currentShardAllocations: Map[ActorRef, immutable.IndexedSeq[ShardId]]): Future[ActorRef] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
allocateShard(requester, shardId, currentShardAllocations.asJava)
}
override final def rebalance(
currentShardAllocations: Map[ActorRef, immutable.IndexedSeq[ShardId]],
rebalanceInProgress: Set[ShardId]): Future[Set[ShardId]] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
implicit val ec = ExecutionContexts.sameThreadExecutionContext
rebalance(currentShardAllocations.asJava, rebalanceInProgress.asJava).map(_.asScala.toSet)
}

View file

@ -242,7 +242,7 @@ object ShardRegion {
* Java API
*/
def getRegions: java.util.Set[Address] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
regions.asJava
}
@ -269,7 +269,7 @@ object ShardRegion {
* Java API
*/
def getRegions(): java.util.Map[Address, ShardRegionStats] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
regions.asJava
}
}
@ -297,7 +297,7 @@ object ShardRegion {
* Java API
*/
def getStats(): java.util.Map[ShardId, Int] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
stats.asJava
}
@ -329,7 +329,7 @@ object ShardRegion {
* If gathering the shard information times out the set of shards will be empty.
*/
def getShards(): java.util.Set[ShardState] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
shards.asJava
}
}
@ -340,7 +340,7 @@ object ShardRegion {
* Java API:
*/
def getEntityIds(): java.util.Set[EntityId] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
entityIds.asJava
}
}

View file

@ -9,7 +9,7 @@ import java.util.zip.GZIPInputStream
import java.util.zip.GZIPOutputStream
import scala.annotation.tailrec
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import scala.collection.immutable
import akka.actor.ActorRef
import akka.actor.ExtendedActorSystem

View file

@ -152,7 +152,7 @@ final class ClusterClientSettings(
* Java API
*/
def withInitialContacts(initialContacts: java.util.Set[ActorPath]): ClusterClientSettings = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
withInitialContacts(initialContacts.asScala.toSet)
}
@ -262,7 +262,7 @@ case object GetContactPoints extends GetContactPoints {
* @param contactPoints The presently known list of contact points.
*/
final case class ContactPoints(contactPoints: Set[ActorPath]) {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
/**
* Java API
@ -811,7 +811,7 @@ case object GetClusterClients extends GetClusterClients {
* @param clusterClients The presently known list of cluster clients.
*/
final case class ClusterClients(clusterClients: Set[ActorRef]) {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
/**
* Java API

View file

@ -4,7 +4,7 @@
package akka.cluster.client.protobuf
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import akka.actor.ExtendedActorSystem
import akka.serialization.BaseSerializer
import akka.serialization.SerializerWithStringManifest

View file

@ -236,7 +236,7 @@ object DistributedPubSubMediator {
* Java API
*/
def getTopics(): java.util.Set[String] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
topics.asJava
}
}
@ -590,14 +590,12 @@ class DistributedPubSubMediator(settings: DistributedPubSubSettings)
case Send(path, msg, localAffinity) =>
val routees = registry(selfAddress).content.get(path) match {
case Some(valueHolder) if localAffinity =>
(for {
routee <- valueHolder.routee
} yield routee).toVector
valueHolder.routee.toList.toIndexedSeq
case _ =>
(for {
(_, bucket) <- registry
valueHolder <- bucket.content.get(path).toSeq
routee <- valueHolder.routee.toSeq
valueHolder <- bucket.content.get(path).toList
routee <- valueHolder.routee.toList
} yield routee).toVector
}
@ -780,8 +778,8 @@ class DistributedPubSubMediator(settings: DistributedPubSubSettings)
val refs = for {
(address, bucket) <- registry
if !(allButSelf && address == selfAddress) // if we should skip sender() node and current address == self address => skip
valueHolder <- bucket.content.get(path).toSeq
ref <- valueHolder.ref.toSeq
valueHolder <- bucket.content.get(path).toList
ref <- valueHolder.ref.toList
} yield ref
if (refs.isEmpty) ignoreOrSendToDeadLetters(msg)
else refs.foreach(_.forward(msg))

View file

@ -12,7 +12,7 @@ import java.util.zip.GZIPOutputStream
import java.util.zip.GZIPInputStream
import scala.annotation.tailrec
import akka.cluster.pubsub.protobuf.msg.{ DistributedPubSubMessages => dm }
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import akka.cluster.pubsub.DistributedPubSubMediator._
import akka.cluster.pubsub.DistributedPubSubMediator.Internal._
import akka.actor.ActorRef

View file

@ -15,7 +15,7 @@ import akka.testkit.TestProbe
import com.typesafe.config.ConfigFactory
import scala.concurrent.{ Future, Promise }
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
object TestLeaseExt extends ExtensionId[TestLeaseExt] with ExtensionIdProvider {
override def get(system: ActorSystem): TestLeaseExt = super.get(system)

View file

@ -26,6 +26,8 @@ import scala.util.control.NonFatal
import akka.event.Logging.LogLevel
import com.github.ghik.silencer.silent
/**
* Cluster Extension Id and factory for creating Cluster extension.
*/
@ -94,6 +96,7 @@ class Cluster(val system: ExtendedActorSystem) extends Extension {
/**
* Java API: roles that this member has
*/
@silent
def getSelfRoles: java.util.Set[String] =
scala.collection.JavaConverters.setAsJavaSetConverter(selfRoles).asJava

View file

@ -19,6 +19,8 @@ import akka.util.ccompat._
import scala.runtime.AbstractFunction5
import com.github.ghik.silencer.silent
/**
* Domain events published to the event bus.
* Subscribe with:
@ -120,25 +122,28 @@ object ClusterEvent {
* Java API: get current member list.
*/
def getMembers: java.lang.Iterable[Member] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
members.asJava
}
/**
* Java API: get current unreachable set.
*/
@silent
def getUnreachable: java.util.Set[Member] =
scala.collection.JavaConverters.setAsJavaSetConverter(unreachable).asJava
/**
* Java API: All data centers in the cluster
*/
@silent
def getUnreachableDataCenters: java.util.Set[String] =
scala.collection.JavaConverters.setAsJavaSetConverter(unreachableDataCenters).asJava
/**
* Java API: get current seen-by set.
*/
@silent
def getSeenBy: java.util.Set[Address] =
scala.collection.JavaConverters.setAsJavaSetConverter(seenBy).asJava
@ -166,6 +171,7 @@ object ClusterEvent {
/**
* Java API: All node roles in the cluster
*/
@silent
def getAllRoles: java.util.Set[String] =
scala.collection.JavaConverters.setAsJavaSetConverter(allRoles).asJava
@ -177,6 +183,7 @@ object ClusterEvent {
/**
* Java API: All data centers in the cluster
*/
@silent
def getAllDataCenters: java.util.Set[String] =
scala.collection.JavaConverters.setAsJavaSetConverter(allDataCenters).asJava

View file

@ -166,7 +166,7 @@ final class ClusterSettings(val config: Config, val systemName: String) {
cc.getInt("min-nr-of-members")
}.requiring(_ > 0, "min-nr-of-members must be > 0")
val MinNrOfMembersOfRole: Map[String, Int] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
cc.getConfig("role")
.root
.asScala
@ -186,7 +186,7 @@ final class ClusterSettings(val config: Config, val systemName: String) {
val ByPassConfigCompatCheck: Boolean = !cc.getBoolean("configuration-compatibility-check.enforce-on-join")
val ConfigCompatCheckers: Set[String] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
cc.getConfig("configuration-compatibility-check.checkers")
.root
.unwrapped
@ -200,7 +200,7 @@ final class ClusterSettings(val config: Config, val systemName: String) {
}
val SensitiveConfigPaths = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
val sensitiveKeys =
cc.getConfig("configuration-compatibility-check.sensitive-config-paths")

View file

@ -11,7 +11,7 @@ import akka.annotation.{ DoNotInherit, InternalApi }
import akka.util.ccompat._
import com.typesafe.config.{ Config, ConfigFactory, ConfigValue }
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import scala.collection.{ immutable => im }
abstract class JoinConfigCompatChecker {

View file

@ -49,6 +49,7 @@ class Member private[cluster] (
/**
* Java API
*/
@silent
def getRoles: java.util.Set[String] =
scala.collection.JavaConverters.setAsJavaSetConverter(roles).asJava

View file

@ -15,14 +15,13 @@ import akka.protobuf.{ ByteString, MessageLite }
import scala.annotation.tailrec
import scala.collection.immutable
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import scala.concurrent.duration.Deadline
import akka.annotation.InternalApi
import akka.cluster.InternalClusterAction._
import akka.cluster.routing.{ ClusterRouterPool, ClusterRouterPoolSettings }
import akka.routing.Pool
import akka.util.ccompat._
import akka.util.ccompat._
import com.github.ghik.silencer.silent
import com.typesafe.config.{ Config, ConfigFactory, ConfigRenderOptions }
@ -184,6 +183,7 @@ final class ClusterMessageSerializer(val system: ExtendedActorSystem)
builder.build()
}
@silent
private def clusterRouterPoolSettingsToProto(settings: ClusterRouterPoolSettings): cm.ClusterRouterPoolSettings = {
val builder = cm.ClusterRouterPoolSettings.newBuilder()
builder
@ -193,8 +193,7 @@ final class ClusterMessageSerializer(val system: ExtendedActorSystem)
.addAllUseRoles(settings.useRoles.asJava)
// for backwards compatibility
@silent
val _ = settings.useRole.foreach(builder.setUseRole)
settings.useRole.foreach(builder.setUseRole)
builder.build()
}

View file

@ -30,7 +30,7 @@ import com.typesafe.config.ConfigFactory
import scala.annotation.{ tailrec, varargs }
import scala.collection.immutable
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
object ClusterRouterGroupSettings {
@deprecated("useRole has been replaced with useRoles", since = "2.5.4")

View file

@ -770,7 +770,7 @@ abstract class StressSpec
.append(" MB")
sb.append("\n")
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
val args = runtime.getInputArguments.asScala.filterNot(_.contains("classpath")).mkString("\n ")
sb.append("Args:\n ").append(args)
sb.append("\n")

View file

@ -12,8 +12,10 @@ import akka.dispatch.Dispatchers
import akka.remote.PhiAccrualFailureDetector
import akka.util.Helpers.ConfigOps
import akka.actor.Address
import com.github.ghik.silencer.silent
@silent
class ClusterConfigSpec extends AkkaSpec {
"Clustering" must {
@ -41,8 +43,7 @@ class ClusterConfigSpec extends AkkaSpec {
UnreachableNodesReaperInterval should ===(1 second)
PublishStatsInterval should ===(Duration.Undefined)
AutoDownUnreachableAfter should ===(Duration.Undefined)
@silent
val _ = DownRemovalMargin should ===(Duration.Zero)
DownRemovalMargin should ===(Duration.Zero)
MinNrOfMembers should ===(1)
MinNrOfMembersOfRole should ===(Map.empty[String, Int])
SelfDataCenter should ===("default")

View file

@ -37,18 +37,20 @@ class JoinConfigCompatPreDefinedChecksSpec extends WordSpec with Matchers {
"fail when some required keys are NOT provided" in {
val Invalid(incompatibleKeys) =
JoinConfigCompatChecker.exists(
requiredKeys,
config("""
|{
| akka.cluster.min-nr-of-members = 1
|}
""".stripMargin))
incompatibleKeys should have size 2
incompatibleKeys should contain("akka.cluster.retry-unsuccessful-join-after is missing")
incompatibleKeys should contain("akka.cluster.allow-weakly-up-members is missing")
JoinConfigCompatChecker.exists(
requiredKeys,
config("""
|{
| akka.cluster.min-nr-of-members = 1
|}
""".stripMargin)) match {
case Valid =>
fail()
case Invalid(incompatibleKeys) =>
incompatibleKeys should have size 2
incompatibleKeys should contain("akka.cluster.retry-unsuccessful-join-after is missing")
incompatibleKeys should contain("akka.cluster.allow-weakly-up-members is missing")
}
}
}
@ -87,56 +89,62 @@ class JoinConfigCompatPreDefinedChecksSpec extends WordSpec with Matchers {
"fail when some required keys are NOT provided" in {
val Invalid(incompatibleKeys) =
JoinConfigCompatChecker.fullMatch(
requiredKeys,
config("""
|{
| akka.cluster.min-nr-of-members = 1
|}
""".stripMargin),
clusterConfig)
incompatibleKeys should have size 2
incompatibleKeys should contain("akka.cluster.retry-unsuccessful-join-after is missing")
incompatibleKeys should contain("akka.cluster.allow-weakly-up-members is missing")
JoinConfigCompatChecker.fullMatch(
requiredKeys,
config("""
|{
| akka.cluster.min-nr-of-members = 1
|}
""".stripMargin),
clusterConfig) match {
case Valid =>
fail()
case Invalid(incompatibleKeys) =>
incompatibleKeys should have size 2
incompatibleKeys should contain("akka.cluster.retry-unsuccessful-join-after is missing")
incompatibleKeys should contain("akka.cluster.allow-weakly-up-members is missing")
}
}
"fail when all required keys are passed, but some values don't match cluster config" in {
val Invalid(incompatibleKeys) =
JoinConfigCompatChecker.fullMatch(
requiredKeys,
config("""
|{
| akka.cluster.min-nr-of-members = 1
| akka.cluster.retry-unsuccessful-join-after = 15s
| akka.cluster.allow-weakly-up-members = off
|}
""".stripMargin),
clusterConfig)
incompatibleKeys should have size 2
incompatibleKeys should contain("akka.cluster.retry-unsuccessful-join-after is incompatible")
incompatibleKeys should contain("akka.cluster.allow-weakly-up-members is incompatible")
JoinConfigCompatChecker.fullMatch(
requiredKeys,
config("""
|{
| akka.cluster.min-nr-of-members = 1
| akka.cluster.retry-unsuccessful-join-after = 15s
| akka.cluster.allow-weakly-up-members = off
|}
""".stripMargin),
clusterConfig) match {
case Valid =>
fail()
case Invalid(incompatibleKeys) =>
incompatibleKeys should have size 2
incompatibleKeys should contain("akka.cluster.retry-unsuccessful-join-after is incompatible")
incompatibleKeys should contain("akka.cluster.allow-weakly-up-members is incompatible")
}
}
"fail when all required keys are passed, but some are missing and others don't match cluster config" in {
val Invalid(incompatibleKeys) =
JoinConfigCompatChecker.fullMatch(
requiredKeys,
config("""
|{
| akka.cluster.min-nr-of-members = 1
| akka.cluster.allow-weakly-up-members = off
|}
""".stripMargin),
clusterConfig)
incompatibleKeys should have size 2
incompatibleKeys should contain("akka.cluster.retry-unsuccessful-join-after is missing")
incompatibleKeys should contain("akka.cluster.allow-weakly-up-members is incompatible")
JoinConfigCompatChecker.fullMatch(
requiredKeys,
config("""
|{
| akka.cluster.min-nr-of-members = 1
| akka.cluster.allow-weakly-up-members = off
|}
""".stripMargin),
clusterConfig) match {
case Valid =>
fail()
case Invalid(incompatibleKeys) =>
incompatibleKeys should have size 2
incompatibleKeys should contain("akka.cluster.retry-unsuccessful-join-after is missing")
incompatibleKeys should contain("akka.cluster.allow-weakly-up-members is incompatible")
}
}
}

View file

@ -14,7 +14,7 @@ import collection.immutable.SortedSet
import akka.testkit.{ AkkaSpec, TestKit }
import com.github.ghik.silencer.silent
import com.typesafe.config.ConfigFactory
@silent
class ClusterMessageSerializerSpec extends AkkaSpec("akka.actor.provider = cluster") {
val serializer = new ClusterMessageSerializer(system.asInstanceOf[ExtendedActorSystem])
@ -148,8 +148,7 @@ class ClusterMessageSerializerSpec extends AkkaSpec("akka.actor.provider = clust
pool.settings.totalInstances should ===(123)
pool.settings.maxInstancesPerNode should ===(345)
pool.settings.allowLocalRoutees should ===(true)
@silent
val _ = pool.settings.useRole should ===(Some("role ABC"))
pool.settings.useRole should ===(Some("role ABC"))
pool.settings.useRoles should ===(Set("role ABC"))
}
} finally {

View file

@ -35,7 +35,7 @@ object ServiceDiscovery {
* Java API
*/
def getAddresses: java.util.List[ResolvedTarget] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
addresses.asJava
}
@ -58,6 +58,8 @@ object ServiceDiscovery {
object ResolvedTarget {
// Simply compare the bytes of the address.
// This may not work in exotic cases such as IPv4 addresses encoded as IPv6 addresses.
import com.github.ghik.silencer.silent
@silent
private implicit val inetAddressOrdering: Ordering[InetAddress] =
Ordering.by[InetAddress, Iterable[Byte]](_.getAddress)

View file

@ -13,7 +13,7 @@ import akka.event.Logging
import akka.util.Helpers.Requiring
import com.typesafe.config.Config
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import scala.concurrent.Future
import scala.concurrent.duration.FiniteDuration
import scala.util.control.NonFatal

View file

@ -11,7 +11,7 @@ import akka.discovery.ServiceDiscovery.{ Resolved, ResolvedTarget }
import akka.event.Logging
import com.typesafe.config.Config
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import scala.concurrent.Future
import scala.concurrent.duration.FiniteDuration

View file

@ -8,7 +8,7 @@ import java.io.File
import java.nio.ByteBuffer
import java.util.concurrent.TimeUnit
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import scala.concurrent.duration._
import scala.util.Try
import scala.util.control.NonFatal
@ -301,7 +301,7 @@ final class LmdbDurableStore(config: Config) extends Actor with ActorLogging {
TimeUnit.NANOSECONDS.toMillis(System.nanoTime - t0))
} catch {
case NonFatal(e) =>
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
log.error(e, "failed to store [{}]", pending.keySet.asScala.mkString(","))
tx.abort()
} finally {

View file

@ -43,7 +43,7 @@ final case class GSet[A] private (elements: Set[A])(override val delta: Option[G
* Java API
*/
def getElements(): java.util.Set[A] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
elements.asJava
}

View file

@ -75,7 +75,7 @@ final class LWWMap[A, B] private[akka] (private[akka] val underlying: ORMap[A, L
* Java API: All entries of the map.
*/
def getEntries(): java.util.Map[A, B] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
entries.asJava
}

View file

@ -196,7 +196,7 @@ final class ORMap[A, B <: ReplicatedData] private[akka] (
* Java API: All entries of the map.
*/
def getEntries(): java.util.Map[A, B] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
entries.asJava
}

View file

@ -102,7 +102,7 @@ final class ORMultiMap[A, B] private[akka] (
* Java API: All entries of a multimap where keys are strings and values are sets.
*/
def getEntries(): java.util.Map[A, java.util.Set[B]] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
val result = new java.util.HashMap[A, java.util.Set[B]]
if (withValueDeltas)
underlying.entries.foreach {
@ -165,14 +165,14 @@ final class ORMultiMap[A, B] private[akka] (
* replicated data set.
*/
def put(node: SelfUniqueAddress, key: A, value: java.util.Set[B]): ORMultiMap[A, B] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
put(node.uniqueAddress, key, value.asScala.toSet)
}
@Deprecated
@deprecated("Use `put` that takes a `SelfUniqueAddress` parameter instead.", since = "2.5.20")
def put(node: Cluster, key: A, value: java.util.Set[B]): ORMultiMap[A, B] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
put(node.selfUniqueAddress, key, value.asScala.toSet)
}

View file

@ -309,7 +309,7 @@ final class ORSet[A] private[akka] (
* Java API
*/
def getElements(): java.util.Set[A] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
elements.asJava
}

View file

@ -54,7 +54,7 @@ final class PNCounterMap[A] private[akka] (private[akka] val underlying: ORMap[A
/** Java API */
def getEntries: java.util.Map[A, BigInteger] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
underlying.entries.map { case (k, c) => k -> c.value.bigInteger }.asJava
}

View file

@ -65,6 +65,8 @@ import akka.event.Logging
import akka.util.JavaDurationConverters._
import akka.util.ccompat._
import com.github.ghik.silencer.silent
@ccompatUsedUntil213
object ReplicatorSettings {
@ -87,7 +89,7 @@ object ReplicatorSettings {
case _ => config.getDuration("pruning-interval", MILLISECONDS).millis
}
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
new ReplicatorSettings(
role = roleOption(config.getString("role")),
gossipInterval = config.getDuration("gossip-interval", MILLISECONDS).millis,
@ -177,7 +179,7 @@ final class ReplicatorSettings(
deltaCrdtEnabled: Boolean,
maxDeltaSize: Int) =
this(
role.toSet,
role.iterator.toSet,
gossipInterval,
notifySubscribersInterval,
maxDeltaElements,
@ -201,7 +203,7 @@ final class ReplicatorSettings(
pruningInterval: FiniteDuration,
maxPruningDissemination: FiniteDuration) =
this(
roles = role.toSet,
roles = role.iterator.toSet,
gossipInterval,
notifySubscribersInterval,
maxDeltaElements,
@ -270,9 +272,9 @@ final class ReplicatorSettings(
deltaCrdtEnabled,
200)
def withRole(role: String): ReplicatorSettings = copy(roles = ReplicatorSettings.roleOption(role).toSet)
def withRole(role: String): ReplicatorSettings = copy(roles = ReplicatorSettings.roleOption(role).iterator.toSet)
def withRole(role: Option[String]): ReplicatorSettings = copy(roles = role.toSet)
def withRole(role: Option[String]): ReplicatorSettings = copy(roles = role.iterator.toSet)
@varargs
def withRoles(roles: String*): ReplicatorSettings = copy(roles = roles.toSet)
@ -325,7 +327,7 @@ final class ReplicatorSettings(
* Java API
*/
def withDurableKeys(durableKeys: java.util.Set[String]): ReplicatorSettings = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
withDurableKeys(durableKeys.asScala.toSet)
}
@ -464,7 +466,7 @@ object Replicator {
* Java API
*/
def getKeyIds: java.util.Set[String] = {
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
keyIds.asJava
}
}
@ -1329,7 +1331,9 @@ final class Replicator(settings: ReplicatorSettings) extends Actor with ActorLog
// possibility to disable Gossip for testing purpose
var fullStateGossipEnabled = true
@silent
val subscribers = new mutable.HashMap[KeyId, mutable.Set[ActorRef]] with mutable.MultiMap[KeyId, ActorRef]
@silent
val newSubscribers = new mutable.HashMap[KeyId, mutable.Set[ActorRef]] with mutable.MultiMap[KeyId, ActorRef]
var subscriptionKeys = Map.empty[KeyId, KeyR]

View file

@ -11,7 +11,7 @@ import java.util.Comparator
import java.util.TreeSet
import scala.annotation.tailrec
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import scala.collection.immutable
import akka.actor.ExtendedActorSystem

View file

@ -6,7 +6,7 @@ package akka.cluster.ddata.protobuf
import scala.concurrent.duration._
import java.util.concurrent.TimeUnit
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import scala.collection.immutable
import scala.concurrent.duration.Duration
import akka.actor.ExtendedActorSystem

View file

@ -10,7 +10,7 @@ import java.util.zip.GZIPInputStream
import java.util.zip.GZIPOutputStream
import scala.annotation.tailrec
import scala.collection.immutable.TreeMap
import scala.collection.JavaConverters._
import akka.util.ccompat.JavaConverters._
import akka.actor.ActorRef
import akka.actor.Address
import akka.actor.ExtendedActorSystem

View file

@ -138,7 +138,7 @@ class ORMultiMapSpec extends WordSpec with Matchers {
"be able to get all bindings for an entry and then reduce them upon putting them back" in {
val m = ORMultiMap().addBinding(node1, "a", "A1").addBinding(node1, "a", "A2").addBinding(node1, "b", "B1")
val Some(a) = m.get("a")
val a = m.get("a").get
a should be(Set("A1", "A2"))

View file

@ -169,6 +169,7 @@ public class LambdaPersistencePluginDocTest {
static Object o4 =
new Object() {
// https://github.com/akka/akka/issues/26826
// #journal-tck-before-after-java
@RunWith(JUnitRunner.class)
class MyJournalSpecTest extends JavaJournalSpec {

Some files were not shown because too many files have changed in this diff Show more