diff --git a/akka-actor/src/main/scala/akka/io/InetAddressDnsResolver.scala b/akka-actor/src/main/scala/akka/io/InetAddressDnsResolver.scala index 8245c1c64b..50c883dce6 100644 --- a/akka-actor/src/main/scala/akka/io/InetAddressDnsResolver.scala +++ b/akka-actor/src/main/scala/akka/io/InetAddressDnsResolver.scala @@ -20,12 +20,12 @@ class InetAddressDnsResolver(cache: SimpleDnsCache, config: Config) extends Acto // Controls the cache policy for successful lookups only private final val CachePolicyProp = "networkaddress.cache.ttl" - // Deprecated JVM property key, keeping for legacy compatibility; replaced by CachePolicyProp + // Deprecated JVM property key, keeping for legacy compatibility; replaced by CachePolicyProp private final val CachePolicyPropFallback = "sun.net.inetaddr.ttl" // Controls the cache policy for negative lookups only private final val NegativeCachePolicyProp = "networkaddress.cache.negative.ttl" - // Deprecated JVM property key, keeping for legacy compatibility; replaced by NegativeCachePolicyProp + // Deprecated JVM property key, keeping for legacy compatibility; replaced by NegativeCachePolicyProp private final val NegativeCachePolicyPropFallback = "sun.net.inetaddr.negative.ttl" // default values (-1 and 0 are magic numbers, trust them) diff --git a/akka-actor/src/main/scala/akka/io/TcpOutgoingConnection.scala b/akka-actor/src/main/scala/akka/io/TcpOutgoingConnection.scala index cd94a8d966..ff7b749a98 100644 --- a/akka-actor/src/main/scala/akka/io/TcpOutgoingConnection.scala +++ b/akka-actor/src/main/scala/akka/io/TcpOutgoingConnection.scala @@ -124,4 +124,4 @@ private[io] object TcpOutgoingConnection { def connectTimeoutExpired(timeout: Option[FiniteDuration]) = new ConnectException(s"Connect timeout of $timeout expired") with NoStackTrace -} \ No newline at end of file +} diff --git a/akka-actor/src/main/scala/akka/routing/OptimalSizeExploringResizer.scala b/akka-actor/src/main/scala/akka/routing/OptimalSizeExploringResizer.scala index a1cb56848d..0b055e4abe 100644 --- a/akka-actor/src/main/scala/akka/routing/OptimalSizeExploringResizer.scala +++ b/akka-actor/src/main/scala/akka/routing/OptimalSizeExploringResizer.scala @@ -32,7 +32,7 @@ case object OptimalSizeExploringResizer { /** * INTERNAL API */ - private[routing]type PoolSize = Int + private[routing] type PoolSize = Int /** * INTERNAL API @@ -51,7 +51,7 @@ case object OptimalSizeExploringResizer { /** * INTERNAL API */ - private[routing]type PerformanceLog = Map[PoolSize, Duration] + private[routing] type PerformanceLog = Map[PoolSize, Duration] def apply(resizerCfg: Config): OptimalSizeExploringResizer = DefaultOptimalSizeExploringResizer( diff --git a/akka-actor/src/main/scala/akka/routing/RoutedActorCell.scala b/akka-actor/src/main/scala/akka/routing/RoutedActorCell.scala index 45226e813c..465f49eb07 100644 --- a/akka-actor/src/main/scala/akka/routing/RoutedActorCell.scala +++ b/akka-actor/src/main/scala/akka/routing/RoutedActorCell.scala @@ -149,8 +149,8 @@ private[akka] class RouterActor extends Actor { val routingLogicController: Option[ActorRef] = cell.routerConfig.routingLogicController( cell.router.logic).map(props ⇒ context.actorOf( - props.withDispatcher(context.props.dispatcher), - name = "routingLogicController")) + props.withDispatcher(context.props.dispatcher), + name = "routingLogicController")) def receive = { case GetRoutees ⇒ diff --git a/akka-actor/src/main/scala/akka/util/ByteString.scala b/akka-actor/src/main/scala/akka/util/ByteString.scala index 1127fbac83..24f690bb1b 100644 --- a/akka-actor/src/main/scala/akka/util/ByteString.scala +++ b/akka-actor/src/main/scala/akka/util/ByteString.scala @@ -667,7 +667,7 @@ sealed abstract class ByteString extends IndexedSeq[Byte] with IndexedSeqOptimiz // *must* be overridden by derived classes. This construction is necessary // to specialize the return type, as the method is already implemented in // a parent trait. - // + // // Avoid `iterator` in performance sensitive code, call ops directly on ByteString instead override def iterator: ByteIterator = throw new UnsupportedOperationException("Method iterator is not implemented in ByteString") @@ -742,7 +742,7 @@ sealed abstract class ByteString extends IndexedSeq[Byte] with IndexedSeqOptimiz * @param buffer a ByteBuffer to copy bytes to * @return the number of bytes actually copied */ - // *must* be overridden by derived classes. + // *must* be overridden by derived classes. def copyToBuffer(buffer: ByteBuffer): Int = throw new UnsupportedOperationException("Method copyToBuffer is not implemented in ByteString") /** diff --git a/akka-bench-jmh/src/main/scala/akka/BenchRunner.scala b/akka-bench-jmh/src/main/scala/akka/BenchRunner.scala index 534b6c05ac..4944bb1ccc 100644 --- a/akka-bench-jmh/src/main/scala/akka/BenchRunner.scala +++ b/akka-bench-jmh/src/main/scala/akka/BenchRunner.scala @@ -9,10 +9,10 @@ object BenchRunner { import scala.collection.JavaConverters._ val args2 = args.toList.flatMap { - case "quick" => "-i 1 -wi 1 -f1 -t1".split(" ").toList - case "full" => "-i 10 -wi 4 -f3 -t1".split(" ").toList - case "jitwatch" => "-jvmArgs=-XX:+UnlockDiagnosticVMOptions -XX:+TraceClassLoading -XX:+LogCompilation" :: Nil - case other => other :: Nil + case "quick" ⇒ "-i 1 -wi 1 -f1 -t1".split(" ").toList + case "full" ⇒ "-i 10 -wi 4 -f3 -t1".split(" ").toList + case "jitwatch" ⇒ "-jvmArgs=-XX:+UnlockDiagnosticVMOptions -XX:+TraceClassLoading -XX:+LogCompilation" :: Nil + case other ⇒ other :: Nil } val opts = new CommandLineOptions(args2: _*) @@ -20,7 +20,7 @@ object BenchRunner { val report = results.asScala.map { result: RunResult ⇒ val bench = result.getParams.getBenchmark - val params = result.getParams.getParamsKeys.asScala.map(key => s"$key=${result.getParams.getParam(key)}").mkString("_") + val params = result.getParams.getParamsKeys.asScala.map(key ⇒ s"$key=${result.getParams.getParam(key)}").mkString("_") val score = result.getAggregatedResult.getPrimaryResult.getScore.round val unit = result.getAggregatedResult.getPrimaryResult.getScoreUnit s"\t${bench}_${params}\t$score\t$unit" diff --git a/akka-bench-jmh/src/main/scala/akka/actor/ActorPathValidationBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/actor/ActorPathValidationBenchmark.scala index e187b63c28..dc66658991 100644 --- a/akka-bench-jmh/src/main/scala/akka/actor/ActorPathValidationBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/actor/ActorPathValidationBenchmark.scala @@ -11,7 +11,7 @@ import org.openjdk.jmh.annotations.Fork import org.openjdk.jmh.annotations.Measurement import org.openjdk.jmh.annotations.Mode import org.openjdk.jmh.annotations.OutputTimeUnit -import org.openjdk.jmh.annotations.{ Scope => JmhScope } +import org.openjdk.jmh.annotations.{ Scope ⇒ JmhScope } import org.openjdk.jmh.annotations.State import org.openjdk.jmh.annotations.Warmup diff --git a/akka-bench-jmh/src/main/scala/akka/actor/AffinityPoolComparativeBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/actor/AffinityPoolComparativeBenchmark.scala index 91d2d4de13..c052a65c3c 100644 --- a/akka-bench-jmh/src/main/scala/akka/actor/AffinityPoolComparativeBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/actor/AffinityPoolComparativeBenchmark.scala @@ -39,8 +39,8 @@ class AffinityPoolComparativeBenchmark { requireRightNumberOfCores(cores) val mailboxConf = mailbox match { - case "default" => "" - case "SingleConsumerOnlyUnboundedMailbox" => + case "default" ⇒ "" + case "SingleConsumerOnlyUnboundedMailbox" ⇒ s"""default-mailbox.mailbox-type = "${classOf[akka.dispatch.SingleConsumerOnlyUnboundedMailbox].getName}"""" } diff --git a/akka-bench-jmh/src/main/scala/akka/actor/AffinityPoolRequestResponseBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/actor/AffinityPoolRequestResponseBenchmark.scala index 75f5d93e94..e9e4b21979 100644 --- a/akka-bench-jmh/src/main/scala/akka/actor/AffinityPoolRequestResponseBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/actor/AffinityPoolRequestResponseBenchmark.scala @@ -43,8 +43,8 @@ class AffinityPoolRequestResponseBenchmark { requireRightNumberOfCores(cores) val mailboxConf = mailbox match { - case "default" => "" - case "SingleConsumerOnlyUnboundedMailbox" => + case "default" ⇒ "" + case "SingleConsumerOnlyUnboundedMailbox" ⇒ s"""default-mailbox.mailbox-type = "${classOf[akka.dispatch.SingleConsumerOnlyUnboundedMailbox].getName}"""" } diff --git a/akka-bench-jmh/src/main/scala/akka/actor/BenchmarkActors.scala b/akka-bench-jmh/src/main/scala/akka/actor/BenchmarkActors.scala index 8d3d1e9f86..e3def86a8c 100644 --- a/akka-bench-jmh/src/main/scala/akka/actor/BenchmarkActors.scala +++ b/akka-bench-jmh/src/main/scala/akka/actor/BenchmarkActors.scala @@ -19,7 +19,7 @@ object BenchmarkActors { class PingPong(val messagesPerPair: Int, latch: CountDownLatch) extends Actor { var left = messagesPerPair / 2 def receive = { - case Message => + case Message ⇒ if (left == 0) { latch.countDown() @@ -37,7 +37,7 @@ object BenchmarkActors { class Echo extends Actor { def receive = { - case Message => + case Message ⇒ sender() ! Message } } @@ -54,7 +54,7 @@ object BenchmarkActors { private var batch = 0 def receive = { - case Message => + case Message ⇒ batch -= 1 if (batch <= 0) { if (!sendBatch()) { @@ -81,9 +81,9 @@ object BenchmarkActors { class Pipe(next: Option[ActorRef]) extends Actor { def receive = { - case Message => + case Message ⇒ if (next.isDefined) next.get forward Message - case Stop => + case Stop ⇒ context stop self if (next.isDefined) next.get forward Stop } @@ -97,7 +97,7 @@ object BenchmarkActors { val fullPathToDispatcher = "akka.actor." + dispatcher val latch = new CountDownLatch(numPairs * 2) val actors = for { - i <- (1 to numPairs).toVector + i ← (1 to numPairs).toVector } yield { val ping = system.actorOf(PingPong.props(messagesPerPair, latch).withDispatcher(fullPathToDispatcher)) val pong = system.actorOf(PingPong.props(messagesPerPair, latch).withDispatcher(fullPathToDispatcher)) @@ -108,19 +108,19 @@ object BenchmarkActors { private def initiatePingPongForPairs(refs: Vector[(ActorRef, ActorRef)], inFlight: Int) = { for { - (ping, pong) <- refs - _ <- 1 to inFlight + (ping, pong) ← refs + _ ← 1 to inFlight } { ping.tell(Message, pong) } } private def startEchoActorPairs(messagesPerPair: Int, numPairs: Int, dispatcher: String, - batchSize: Int)(implicit system: ActorSystem) = { + batchSize: Int)(implicit system: ActorSystem) = { val fullPathToDispatcher = "akka.actor." + dispatcher val latch = new CountDownLatch(numPairs) - val actors = (1 to numPairs).map { _ => + val actors = (1 to numPairs).map { _ ⇒ system.actorOf(EchoSender.props(messagesPerPair, latch, batchSize).withDispatcher(fullPathToDispatcher)) }.toVector (actors, latch) diff --git a/akka-bench-jmh/src/main/scala/akka/actor/ForkJoinActorBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/actor/ForkJoinActorBenchmark.scala index 93f1d1d1a5..51b18176f7 100644 --- a/akka-bench-jmh/src/main/scala/akka/actor/ForkJoinActorBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/actor/ForkJoinActorBenchmark.scala @@ -38,8 +38,8 @@ class ForkJoinActorBenchmark { requireRightNumberOfCores(cores) val mailboxConf = mailbox match { - case "default" => "" - case "SingleConsumerOnlyUnboundedMailbox" => + case "default" ⇒ "" + case "SingleConsumerOnlyUnboundedMailbox" ⇒ s"""default-mailbox.mailbox-type = "${classOf[akka.dispatch.SingleConsumerOnlyUnboundedMailbox].getName}"""" } @@ -133,4 +133,4 @@ object ForkJoinActorBenchmark { final val totalMessagesLessThanCores = (lessThanCoresActors * messages) / 2 final val totalMessagesSameAsCores = (sameAsCoresActors * messages) / 2 -} \ No newline at end of file +} diff --git a/akka-bench-jmh/src/main/scala/akka/actor/RequestResponseActors.scala b/akka-bench-jmh/src/main/scala/akka/actor/RequestResponseActors.scala index dd7e0f8e41..34e94dc4a1 100644 --- a/akka-bench-jmh/src/main/scala/akka/actor/RequestResponseActors.scala +++ b/akka-bench-jmh/src/main/scala/akka/actor/RequestResponseActors.scala @@ -20,7 +20,7 @@ object RequestResponseActors { private val randGenerator = new Random() override def receive: Receive = { - case u: User => { + case u: User ⇒ { receivedUsers.put(u.userId, u) if (left == 0) { latch.countDown() @@ -42,10 +42,10 @@ object RequestResponseActors { class UserServiceActor(userDb: Map[Int, User], latch: CountDownLatch, numQueries: Int) extends Actor { private var left = numQueries def receive = { - case Request(id) => + case Request(id) ⇒ userDb.get(id) match { - case Some(u) => sender() ! u - case None => + case Some(u) ⇒ sender() ! u + case None ⇒ } if (left == 0) { latch.countDown() @@ -60,11 +60,11 @@ object RequestResponseActors { def props(latch: CountDownLatch, numQueries: Int, numUsersInDB: Int) = { val r = new Random() val users = for { - id <- 0 until numUsersInDB + id ← 0 until numUsersInDB firstName = r.nextString(5) lastName = r.nextString(7) ssn = r.nextInt() - friendIds = for { _ <- 0 until 5 } yield r.nextInt(numUsersInDB) + friendIds = for { _ ← 0 until 5 } yield r.nextInt(numUsersInDB) } yield id -> User(id, firstName, lastName, ssn, friendIds) Props(new UserServiceActor(users.toMap, latch, numQueries)) } @@ -74,7 +74,7 @@ object RequestResponseActors { val fullPathToDispatcher = "akka.actor." + dispatcher val latch = new CountDownLatch(numActors) val actorsPairs = for { - i <- (1 to (numActors / 2)).toVector + i ← (1 to (numActors / 2)).toVector userQueryActor = system.actorOf(UserQueryActor.props(latch, numQueriesPerActor, numUsersInDBPerActor).withDispatcher(fullPathToDispatcher)) userServiceActor = system.actorOf(UserServiceActor.props(latch, numQueriesPerActor, numUsersInDBPerActor).withDispatcher(fullPathToDispatcher)) } yield (userQueryActor, userServiceActor) @@ -83,8 +83,8 @@ object RequestResponseActors { def initiateQuerySimulation(requestResponseActorPairs: Seq[(ActorRef, ActorRef)], inFlight: Int) = { for { - (queryActor, serviceActor) <- requestResponseActorPairs - i <- 1 to inFlight + (queryActor, serviceActor) ← requestResponseActorPairs + i ← 1 to inFlight } { serviceActor.tell(Request(i), queryActor) } diff --git a/akka-bench-jmh/src/main/scala/akka/actor/StashCreationBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/actor/StashCreationBenchmark.scala index de0a8f5600..01fbda2b65 100644 --- a/akka-bench-jmh/src/main/scala/akka/actor/StashCreationBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/actor/StashCreationBenchmark.scala @@ -13,7 +13,7 @@ import java.util.concurrent.TimeUnit object StashCreationBenchmark { class StashingActor extends Actor with Stash { def receive = { - case msg => sender() ! msg + case msg ⇒ sender() ! msg } } diff --git a/akka-bench-jmh/src/main/scala/akka/actor/TellOnlyBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/actor/TellOnlyBenchmark.scala index 24b196d3c3..f668a00af5 100644 --- a/akka-bench-jmh/src/main/scala/akka/actor/TellOnlyBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/actor/TellOnlyBenchmark.scala @@ -121,27 +121,27 @@ object TellOnlyBenchmark { } class DroppingDispatcher( - _configurator: MessageDispatcherConfigurator, - _id: String, - _throughput: Int, - _throughputDeadlineTime: Duration, + _configurator: MessageDispatcherConfigurator, + _id: String, + _throughput: Int, + _throughputDeadlineTime: Duration, _executorServiceFactoryProvider: ExecutorServiceFactoryProvider, - _shutdownTimeout: FiniteDuration + _shutdownTimeout: FiniteDuration ) - extends Dispatcher(_configurator, _id, _throughput, _throughputDeadlineTime, _executorServiceFactoryProvider, _shutdownTimeout) { + extends Dispatcher(_configurator, _id, _throughput, _throughputDeadlineTime, _executorServiceFactoryProvider, _shutdownTimeout) { override protected[akka] def dispatch(receiver: ActorCell, invocation: Envelope): Unit = { val mbox = receiver.mailbox mbox.enqueue(receiver.self, invocation) mbox.messageQueue match { case mb: DroppingMessageQueue if mb.dropping ⇒ // do nothing - case _ ⇒ registerForExecution(mbox, true, false) + case _ ⇒ registerForExecution(mbox, true, false) } } } class DroppingDispatcherConfigurator(config: Config, prerequisites: DispatcherPrerequisites) - extends MessageDispatcherConfigurator(config, prerequisites) { + extends MessageDispatcherConfigurator(config, prerequisites) { override def dispatcher(): MessageDispatcher = new DroppingDispatcher( this, diff --git a/akka-bench-jmh/src/main/scala/akka/cluster/ddata/ORSetMergeBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/cluster/ddata/ORSetMergeBenchmark.scala index 4efd9e6a9f..b7f709705b 100644 --- a/akka-bench-jmh/src/main/scala/akka/cluster/ddata/ORSetMergeBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/cluster/ddata/ORSetMergeBenchmark.scala @@ -10,7 +10,7 @@ import org.openjdk.jmh.annotations.Fork import org.openjdk.jmh.annotations.Measurement import org.openjdk.jmh.annotations.Mode import org.openjdk.jmh.annotations.OutputTimeUnit -import org.openjdk.jmh.annotations.{ Scope => JmhScope } +import org.openjdk.jmh.annotations.{ Scope ⇒ JmhScope } import org.openjdk.jmh.annotations.State import org.openjdk.jmh.annotations.Warmup import akka.cluster.UniqueAddress @@ -49,7 +49,7 @@ class ORSetMergeBenchmark { @Setup(Level.Trial) def setup(): Unit = { - set1 = (1 to set1Size).foldLeft(ORSet.empty[String])((s, n) => s.add(nextNode(), "elem" + n)) + set1 = (1 to set1Size).foldLeft(ORSet.empty[String])((s, n) ⇒ s.add(nextNode(), "elem" + n)) addFromSameNode = set1.add(nodeA, "elem" + set1Size + 1).merge(set1) addFromOtherNode = set1.add(nodeB, "elem" + set1Size + 1).merge(set1) complex1 = set1.add(nodeB, "a").add(nodeC, "b").remove(nodeD, "elem" + set1Size).merge(set1) diff --git a/akka-bench-jmh/src/main/scala/akka/cluster/ddata/VersionVectorBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/cluster/ddata/VersionVectorBenchmark.scala index df1a0986bc..217a0e6633 100644 --- a/akka-bench-jmh/src/main/scala/akka/cluster/ddata/VersionVectorBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/cluster/ddata/VersionVectorBenchmark.scala @@ -10,7 +10,7 @@ import org.openjdk.jmh.annotations.Fork import org.openjdk.jmh.annotations.Measurement import org.openjdk.jmh.annotations.Mode import org.openjdk.jmh.annotations.OutputTimeUnit -import org.openjdk.jmh.annotations.{ Scope => JmhScope } +import org.openjdk.jmh.annotations.{ Scope ⇒ JmhScope } import org.openjdk.jmh.annotations.State import org.openjdk.jmh.annotations.Warmup import akka.cluster.UniqueAddress @@ -46,7 +46,7 @@ class VersionVectorBenchmark { @Setup(Level.Trial) def setup(): Unit = { - vv1 = (1 to size).foldLeft(VersionVector.empty)((vv, n) => vv + nextNode()) + vv1 = (1 to size).foldLeft(VersionVector.empty)((vv, n) ⇒ vv + nextNode()) vv2 = vv1 + nextNode() vv3 = vv1 + nextNode() dot1 = VersionVector(nodeA, vv1.versionAt(nodeA)) diff --git a/akka-bench-jmh/src/main/scala/akka/dispatch/NodeQueueBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/dispatch/NodeQueueBenchmark.scala index 864d21175c..1311cfe583 100644 --- a/akka-bench-jmh/src/main/scala/akka/dispatch/NodeQueueBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/dispatch/NodeQueueBenchmark.scala @@ -41,8 +41,8 @@ mailbox { implicit val sys = ActorSystem("ANQ", config) val ref = sys.actorOf(Props(new Actor { def receive = { - case Stop => sender() ! Stop - case _ => + case Stop ⇒ sender() ! Stop + case _ ⇒ } }).withDispatcher("dispatcher").withMailbox("mailbox"), "receiver") diff --git a/akka-bench-jmh/src/main/scala/akka/persistence/Common.scala b/akka-bench-jmh/src/main/scala/akka/persistence/Common.scala index f4a85c4200..8738281389 100644 --- a/akka-bench-jmh/src/main/scala/akka/persistence/Common.scala +++ b/akka-bench-jmh/src/main/scala/akka/persistence/Common.scala @@ -8,7 +8,7 @@ import akka.actor.Actor /** only as a "the best we could possibly get" baseline, does not persist anything */ class BaselineActor(respondAfter: Int) extends Actor { override def receive = { - case n: Int => if (n == respondAfter) sender() ! n + case n: Int ⇒ if (n == respondAfter) sender() ! n } } diff --git a/akka-bench-jmh/src/main/scala/akka/persistence/PersistenceActorDeferBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/persistence/PersistenceActorDeferBenchmark.scala index c15d158eff..e5565b8b25 100644 --- a/akka-bench-jmh/src/main/scala/akka/persistence/PersistenceActorDeferBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/persistence/PersistenceActorDeferBenchmark.scala @@ -65,7 +65,7 @@ class PersistentActorDeferBenchmark { @Benchmark @OperationsPerInvocation(10000) def tell_persistAsync_defer_persistAsync_reply(): Unit = { - for (i <- data10k) persistAsync_defer.tell(i, probe.ref) + for (i ← data10k) persistAsync_defer.tell(i, probe.ref) probe.expectMsg(data10k.last) } @@ -73,7 +73,7 @@ class PersistentActorDeferBenchmark { @Benchmark @OperationsPerInvocation(10000) def tell_persistAsync_defer_persistAsync_replyASAP(): Unit = { - for (i <- data10k) persistAsync_defer_replyASAP.tell(i, probe.ref) + for (i ← data10k) persistAsync_defer_replyASAP.tell(i, probe.ref) probe.expectMsg(data10k.last) } @@ -85,12 +85,12 @@ class `persistAsync, defer`(respondAfter: Int) extends PersistentActor { override def persistenceId: String = self.path.name override def receiveCommand = { - case n: Int => - persistAsync(Evt(n)) { e => } - deferAsync(Evt(n)) { e => if (e.i == respondAfter) sender() ! e.i } + case n: Int ⇒ + persistAsync(Evt(n)) { e ⇒ } + deferAsync(Evt(n)) { e ⇒ if (e.i == respondAfter) sender() ! e.i } } override def receiveRecover = { - case _ => // do nothing + case _ ⇒ // do nothing } } class `persistAsync, defer, respond ASAP`(respondAfter: Int) extends PersistentActor { @@ -98,12 +98,12 @@ class `persistAsync, defer, respond ASAP`(respondAfter: Int) extends PersistentA override def persistenceId: String = self.path.name override def receiveCommand = { - case n: Int => - persistAsync(Evt(n)) { e => } - deferAsync(Evt(n)) { e => } + case n: Int ⇒ + persistAsync(Evt(n)) { e ⇒ } + deferAsync(Evt(n)) { e ⇒ } if (n == respondAfter) sender() ! n } override def receiveRecover = { - case _ => // do nothing + case _ ⇒ // do nothing } } diff --git a/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorBenchmark.scala index 15d94f981e..75bd2de020 100644 --- a/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorBenchmark.scala @@ -63,7 +63,7 @@ class PersistentActorThroughputBenchmark { @Benchmark @OperationsPerInvocation(10000) def actor_normalActor_reply_baseline(): Unit = { - for (i <- data10k) actor.tell(i, probe.ref) + for (i ← data10k) actor.tell(i, probe.ref) probe.expectMsg(data10k.last) } @@ -71,7 +71,7 @@ class PersistentActorThroughputBenchmark { @Benchmark @OperationsPerInvocation(10000) def persistentActor_persist_reply(): Unit = { - for (i <- data10k) persistPersistentActor.tell(i, probe.ref) + for (i ← data10k) persistPersistentActor.tell(i, probe.ref) probe.expectMsg(Evt(data10k.last)) } @@ -79,7 +79,7 @@ class PersistentActorThroughputBenchmark { @Benchmark @OperationsPerInvocation(10000) def persistentActor_persistAsync_reply(): Unit = { - for (i <- data10k) persistAsync1PersistentActor.tell(i, probe.ref) + for (i ← data10k) persistAsync1PersistentActor.tell(i, probe.ref) probe.expectMsg(Evt(data10k.last)) } @@ -87,7 +87,7 @@ class PersistentActorThroughputBenchmark { @Benchmark @OperationsPerInvocation(10000) def persistentActor_noPersist_reply(): Unit = { - for (i <- data10k) noPersistPersistentActor.tell(i, probe.ref) + for (i ← data10k) noPersistPersistentActor.tell(i, probe.ref) probe.expectMsg(Evt(data10k.last)) } @@ -95,7 +95,7 @@ class PersistentActorThroughputBenchmark { @Benchmark @OperationsPerInvocation(10000) def persistentActor_persistAsync_replyRightOnCommandReceive(): Unit = { - for (i <- data10k) persistAsyncQuickReplyPersistentActor.tell(i, probe.ref) + for (i ← data10k) persistAsyncQuickReplyPersistentActor.tell(i, probe.ref) probe.expectMsg(Evt(data10k.last)) } @@ -107,10 +107,10 @@ class NoPersistPersistentActor(respondAfter: Int) extends PersistentActor { override def persistenceId: String = self.path.name override def receiveCommand = { - case n: Int => if (n == respondAfter) sender() ! Evt(n) + case n: Int ⇒ if (n == respondAfter) sender() ! Evt(n) } override def receiveRecover = { - case _ => // do nothing + case _ ⇒ // do nothing } } @@ -119,10 +119,10 @@ class PersistPersistentActor(respondAfter: Int) extends PersistentActor { override def persistenceId: String = self.path.name override def receiveCommand = { - case n: Int => persist(Evt(n)) { e => if (e.i == respondAfter) sender() ! e } + case n: Int ⇒ persist(Evt(n)) { e ⇒ if (e.i == respondAfter) sender() ! e } } override def receiveRecover = { - case _ => // do nothing + case _ ⇒ // do nothing } } @@ -131,11 +131,11 @@ class PersistAsyncPersistentActor(respondAfter: Int) extends PersistentActor { override def persistenceId: String = self.path.name override def receiveCommand = { - case n: Int => - persistAsync(Evt(n)) { e => if (e.i == respondAfter) sender() ! e } + case n: Int ⇒ + persistAsync(Evt(n)) { e ⇒ if (e.i == respondAfter) sender() ! e } } override def receiveRecover = { - case _ => // do nothing + case _ ⇒ // do nothing } } @@ -144,12 +144,12 @@ class PersistAsyncQuickReplyPersistentActor(respondAfter: Int) extends Persisten override def persistenceId: String = self.path.name override def receiveCommand = { - case n: Int => + case n: Int ⇒ val e = Evt(n) if (n == respondAfter) sender() ! e persistAsync(e)(identity) } override def receiveRecover = { - case _ => // do nothing + case _ ⇒ // do nothing } } diff --git a/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorWithAtLeastOnceDeliveryBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorWithAtLeastOnceDeliveryBenchmark.scala index 106824c153..aa1980a447 100644 --- a/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorWithAtLeastOnceDeliveryBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorWithAtLeastOnceDeliveryBenchmark.scala @@ -62,7 +62,7 @@ class PersistentActorWithAtLeastOnceDeliveryBenchmark { @Benchmark @OperationsPerInvocation(10000) def persistentActor_persistAsync_with_AtLeastOnceDelivery(): Unit = { - for (i <- 1 to dataCount) + for (i ← 1 to dataCount) persistAsyncPersistentActorWithAtLeastOnceDelivery.tell(i, probe.ref) probe.expectMsg(20.seconds, Evt(dataCount)) } @@ -70,7 +70,7 @@ class PersistentActorWithAtLeastOnceDeliveryBenchmark { @Benchmark @OperationsPerInvocation(10000) def persistentActor_persist_with_AtLeastOnceDelivery(): Unit = { - for (i <- 1 to dataCount) + for (i ← 1 to dataCount) persistPersistentActorWithAtLeastOnceDelivery.tell(i, probe.ref) probe.expectMsg(2.minutes, Evt(dataCount)) } @@ -78,7 +78,7 @@ class PersistentActorWithAtLeastOnceDeliveryBenchmark { @Benchmark @OperationsPerInvocation(10000) def persistentActor_noPersist_with_AtLeastOnceDelivery(): Unit = { - for (i <- 1 to dataCount) + for (i ← 1 to dataCount) noPersistPersistentActorWithAtLeastOnceDelivery.tell(i, probe.ref) probe.expectMsg(20.seconds, Evt(dataCount)) } @@ -91,28 +91,28 @@ class NoPersistPersistentActorWithAtLeastOnceDelivery(respondAfter: Int, val upS override def persistenceId: String = self.path.name override def receiveCommand = { - case n: Int => - deliver(downStream)(deliveryId => Msg(deliveryId, n)) + case n: Int ⇒ + deliver(downStream)(deliveryId ⇒ Msg(deliveryId, n)) if (n == respondAfter) //switch to wait all message confirmed context.become(waitConfirm) - case Confirm(deliveryId) => + case Confirm(deliveryId) ⇒ confirmDelivery(deliveryId) - case _ => // do nothing + case _ ⇒ // do nothing } override def receiveRecover = { - case _ => // do nothing + case _ ⇒ // do nothing } val waitConfirm: Actor.Receive = { - case Confirm(deliveryId) => + case Confirm(deliveryId) ⇒ confirmDelivery(deliveryId) if (numberOfUnconfirmed == 0) { upStream ! Evt(respondAfter) context.unbecome() } - case _ => // do nothing + case _ ⇒ // do nothing } } @@ -123,30 +123,30 @@ class PersistPersistentActorWithAtLeastOnceDelivery(respondAfter: Int, val upStr override def persistenceId: String = self.path.name override def receiveCommand = { - case n: Int => - persist(MsgSent(n)) { e => - deliver(downStream)(deliveryId => Msg(deliveryId, n)) + case n: Int ⇒ + persist(MsgSent(n)) { e ⇒ + deliver(downStream)(deliveryId ⇒ Msg(deliveryId, n)) if (n == respondAfter) //switch to wait all message confirmed context.become(waitConfirm) } - case Confirm(deliveryId) => + case Confirm(deliveryId) ⇒ confirmDelivery(deliveryId) - case _ => // do nothing + case _ ⇒ // do nothing } override def receiveRecover = { - case _ => // do nothing + case _ ⇒ // do nothing } val waitConfirm: Actor.Receive = { - case Confirm(deliveryId) => + case Confirm(deliveryId) ⇒ confirmDelivery(deliveryId) if (numberOfUnconfirmed == 0) { upStream ! Evt(respondAfter) context.unbecome() } - case _ => // do nothing + case _ ⇒ // do nothing } } @@ -157,30 +157,30 @@ class PersistAsyncPersistentActorWithAtLeastOnceDelivery(respondAfter: Int, val override def persistenceId: String = self.path.name override def receiveCommand = { - case n: Int => - persistAsync(MsgSent(n)) { e => - deliver(downStream)(deliveryId => Msg(deliveryId, n)) + case n: Int ⇒ + persistAsync(MsgSent(n)) { e ⇒ + deliver(downStream)(deliveryId ⇒ Msg(deliveryId, n)) if (n == respondAfter) //switch to wait all message confirmed context.become(waitConfirm) } - case Confirm(deliveryId) => + case Confirm(deliveryId) ⇒ confirmDelivery(deliveryId) - case _ => // do nothing + case _ ⇒ // do nothing } override def receiveRecover = { - case _ => // do nothing + case _ ⇒ // do nothing } val waitConfirm: Actor.Receive = { - case Confirm(deliveryId) => + case Confirm(deliveryId) ⇒ confirmDelivery(deliveryId) if (numberOfUnconfirmed == 0) { upStream ! Evt(respondAfter) context.unbecome() } - case _ => // do nothing + case _ ⇒ // do nothing } } @@ -198,15 +198,15 @@ class DestinationActor extends Actor { var seqNr = 0L override def receive = { - case n: Int => + case n: Int ⇒ sender() ! Confirm(n) - case Msg(deliveryId, _) => + case Msg(deliveryId, _) ⇒ seqNr += 1 if (seqNr % 11 == 0) { //drop it } else { sender() ! Confirm(deliveryId) } - case _ => // do nothing + case _ ⇒ // do nothing } } diff --git a/akka-bench-jmh/src/main/scala/akka/remote/artery/BenchTestSource.scala b/akka-bench-jmh/src/main/scala/akka/remote/artery/BenchTestSource.scala index 2ffe12f823..3aab197664 100644 --- a/akka-bench-jmh/src/main/scala/akka/remote/artery/BenchTestSource.scala +++ b/akka-bench-jmh/src/main/scala/akka/remote/artery/BenchTestSource.scala @@ -18,7 +18,7 @@ import akka.stream.stage.OutHandler class BenchTestSource(elementCount: Int) extends GraphStage[SourceShape[java.lang.Integer]] { private val elements = new Array[java.lang.Integer](elementCount) - (1 to elementCount).map(n => elements(n - 1) = n) + (1 to elementCount).map(n ⇒ elements(n - 1) = n) val out: Outlet[java.lang.Integer] = Outlet("BenchTestSource") override val shape: SourceShape[java.lang.Integer] = SourceShape(out) diff --git a/akka-bench-jmh/src/main/scala/akka/remote/artery/CodecBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/remote/artery/CodecBenchmark.scala index e5280d7673..a535190e4e 100644 --- a/akka-bench-jmh/src/main/scala/akka/remote/artery/CodecBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/remote/artery/CodecBenchmark.scala @@ -91,11 +91,11 @@ class CodecBenchmark { """ ) val config = configType match { - case RemoteInstrument => + case RemoteInstrument ⇒ ConfigFactory.parseString( s"""akka.remote.artery.advanced.instruments = [ "${classOf[DummyRemoteInstrument].getName}" ]""" ).withFallback(commonConfig) - case _ => + case _ ⇒ commonConfig } @@ -148,7 +148,7 @@ class CodecBenchmark { val deserializer: Flow[InboundEnvelope, InboundEnvelope, NotUsed] = Flow.fromGraph(new Deserializer(inboundContext, system.asInstanceOf[ExtendedActorSystem], envelopePool)) val decoderInput: Flow[String, EnvelopeBuffer, NotUsed] = Flow[String] - .map { _ => + .map { _ ⇒ val envelope = envelopePool.acquire() envelopeTemplateBuffer.rewind() envelope.byteBuffer.put(envelopeTemplateBuffer) @@ -158,14 +158,14 @@ class CodecBenchmark { encodeGraph = encoderInput .via(encoder) - .map(envelope => envelopePool.release(envelope)) + .map(envelope ⇒ envelopePool.release(envelope)) decodeGraph = decoderInput .via(decoder) .via(deserializer) .map { - case env: ReusableInboundEnvelope => inboundEnvelopePool.release(env) - case _ => + case env: ReusableInboundEnvelope ⇒ inboundEnvelopePool.release(env) + case _ ⇒ } encodeDecodeGraph = encoderInput @@ -173,8 +173,8 @@ class CodecBenchmark { .via(decoder) .via(deserializer) .map { - case env: ReusableInboundEnvelope => inboundEnvelopePool.release(env) - case _ => + case env: ReusableInboundEnvelope ⇒ inboundEnvelopePool.release(env) + case _ ⇒ } } diff --git a/akka-bench-jmh/src/main/scala/akka/remote/artery/FlightRecorderBench.scala b/akka-bench-jmh/src/main/scala/akka/remote/artery/FlightRecorderBench.scala index 3d49155461..e84d66c2ed 100644 --- a/akka-bench-jmh/src/main/scala/akka/remote/artery/FlightRecorderBench.scala +++ b/akka-bench-jmh/src/main/scala/akka/remote/artery/FlightRecorderBench.scala @@ -45,7 +45,7 @@ class FlightRecorderBench { @OperationsPerInvocation(10000000) def flight_recorder_writes(): Unit = { val latch = new CountDownLatch(writers) - (1 to writers).foreach { _ => + (1 to writers).foreach { _ ⇒ val sink = recorder.createEventSink() new Thread { override def run(): Unit = { diff --git a/akka-bench-jmh/src/main/scala/akka/remote/artery/LatchSink.scala b/akka-bench-jmh/src/main/scala/akka/remote/artery/LatchSink.scala index 4ac234e9cd..8b29f18c44 100644 --- a/akka-bench-jmh/src/main/scala/akka/remote/artery/LatchSink.scala +++ b/akka-bench-jmh/src/main/scala/akka/remote/artery/LatchSink.scala @@ -42,7 +42,7 @@ class LatchSink(countDownAfter: Int, latch: CountDownLatch) extends GraphStage[S } class BarrierSink(countDownAfter: Int, latch: CountDownLatch, barrierAfter: Int, barrier: CyclicBarrier) - extends GraphStage[SinkShape[Any]] { + extends GraphStage[SinkShape[Any]] { val in: Inlet[Any] = Inlet("BarrierSink") override val shape: SinkShape[Any] = SinkShape(in) diff --git a/akka-bench-jmh/src/main/scala/akka/remote/artery/compress/CountMinSketchBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/remote/artery/compress/CountMinSketchBenchmark.scala index b3c5f1c53b..b02d48dc26 100644 --- a/akka-bench-jmh/src/main/scala/akka/remote/artery/compress/CountMinSketchBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/remote/artery/compress/CountMinSketchBenchmark.scala @@ -31,7 +31,7 @@ class CountMinSketchBenchmark { @Setup def init(): Unit = { countMinSketch = new CountMinSketch(d, w, seed) - (0 to 8191).foreach { index => + (0 to 8191).foreach { index ⇒ preallocateIds(index) = rand.nextInt() preallocateValues(index) = Math.abs(rand.nextInt()) } diff --git a/akka-bench-jmh/src/main/scala/akka/stream/EmptySourceBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/stream/EmptySourceBenchmark.scala index c262f88211..f9ef08f60d 100644 --- a/akka-bench-jmh/src/main/scala/akka/stream/EmptySourceBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/stream/EmptySourceBenchmark.scala @@ -34,12 +34,12 @@ class EmptySourceBenchmark { /* (not serious benchmark, just sanity check: run on macbook 15, late 2013) - + While it was a PublisherSource: [info] EmptySourceBenchmark.empty thrpt 10 11.219 ± 6.498 ops/ms - + Rewrite to GraphStage: [info] EmptySourceBenchmark.empty thrpt 10 17.556 ± 2.865 ops/ms - + */ } diff --git a/akka-bench-jmh/src/main/scala/akka/stream/FlatMapMergeBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/stream/FlatMapMergeBenchmark.scala index ac503084c6..42cf1edba8 100644 --- a/akka-bench-jmh/src/main/scala/akka/stream/FlatMapMergeBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/stream/FlatMapMergeBenchmark.scala @@ -33,11 +33,11 @@ class FlatMapMergeBenchmark { def setup(): Unit = { val source = NumberOfStreams match { // Base line: process NumberOfElements-many elements from a single source without using flatMapMerge - case 0 => createSource(NumberOfElements) + case 0 ⇒ createSource(NumberOfElements) // Stream merging: process NumberOfElements-many elements from n sources, each producing (NumberOfElements/n)-many elements - case n => + case n ⇒ val subSource = createSource(NumberOfElements / n) - Source.repeat(()).take(n).flatMapMerge(n, _ => subSource) + Source.repeat(()).take(n).flatMapMerge(n, _ ⇒ subSource) } graph = Source.fromGraph(source).toMat(Sink.ignore)(Keep.right) } diff --git a/akka-bench-jmh/src/main/scala/akka/stream/FlowMapBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/stream/FlowMapBenchmark.scala index 1687a2cb39..accd3ddcb1 100644 --- a/akka-bench-jmh/src/main/scala/akka/stream/FlowMapBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/stream/FlowMapBenchmark.scala @@ -128,7 +128,7 @@ class FlowMapBenchmark { } // source setup - private def mkMaps[O, Mat](source: Source[O, Mat], count: Int)(flow: => Graph[FlowShape[O, O], _]): Source[O, Mat] = { + private def mkMaps[O, Mat](source: Source[O, Mat], count: Int)(flow: ⇒ Graph[FlowShape[O, O], _]): Source[O, Mat] = { var f = source for (i ← 1 to count) f = f.via(flow) diff --git a/akka-bench-jmh/src/main/scala/akka/stream/FusedGraphsBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/stream/FusedGraphsBenchmark.scala index f394420e18..eb91b397e7 100644 --- a/akka-bench-jmh/src/main/scala/akka/stream/FusedGraphsBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/stream/FusedGraphsBenchmark.scala @@ -116,7 +116,7 @@ class FusedGraphsBenchmark { materializer = ActorMaterializer(settings) testElements = Array.fill(ElementCount)(new MutableElement(0)) - val addFunc = (x: MutableElement) => { x.value += 1; x } + val addFunc = (x: MutableElement) ⇒ { x.value += 1; x } val testSource = Source.fromGraph(new TestSource(testElements)) val testSink = Sink.fromGraph(new JitSafeCompletionLatch) @@ -179,7 +179,7 @@ class FusedGraphsBenchmark { .take(ElementCount) .map(addFunc) .map(addFunc) - .fold(new MutableElement(0))((acc, x) => { acc.value += x.value; acc }) + .fold(new MutableElement(0))((acc, x) ⇒ { acc.value += x.value; acc }) .toMat(testSink)(Keep.right) ) @@ -206,7 +206,7 @@ class FusedGraphsBenchmark { .toMat(testSink)(Keep.right) ) - val broadcastZipFlow: Flow[MutableElement, MutableElement, NotUsed] = Flow.fromGraph(GraphDSL.create() { implicit b => + val broadcastZipFlow: Flow[MutableElement, MutableElement, NotUsed] = Flow.fromGraph(GraphDSL.create() { implicit b ⇒ import GraphDSL.Implicits._ val bcast = b.add(Broadcast[MutableElement](2)) @@ -218,7 +218,7 @@ class FusedGraphsBenchmark { FlowShape(bcast.in, zip.out.map(_._1).outlet) }) - val balanceMergeFlow: Flow[MutableElement, MutableElement, NotUsed] = Flow.fromGraph(GraphDSL.create() { implicit b => + val balanceMergeFlow: Flow[MutableElement, MutableElement, NotUsed] = Flow.fromGraph(GraphDSL.create() { implicit b ⇒ import GraphDSL.Implicits._ val balance = b.add(Balance[MutableElement](2)) diff --git a/akka-bench-jmh/src/main/scala/akka/stream/InterpreterBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/stream/InterpreterBenchmark.scala index d48653187c..1508997495 100644 --- a/akka-bench-jmh/src/main/scala/akka/stream/InterpreterBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/stream/InterpreterBenchmark.scala @@ -35,7 +35,7 @@ class InterpreterBenchmark { .connect(identities.last.out, sink) // FIXME: This should not be here, this is pure setup overhead - for (i <- (0 until identities.size - 1)) { + for (i ← (0 until identities.size - 1)) { b.connect(identities(i).out, identities(i + 1).in) } diff --git a/akka-bench-jmh/src/main/scala/akka/stream/MaterializationBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/stream/MaterializationBenchmark.scala index b29c1e2bbd..5343d8eb3f 100644 --- a/akka-bench-jmh/src/main/scala/akka/stream/MaterializationBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/stream/MaterializationBenchmark.scala @@ -16,21 +16,21 @@ import akka.Done object MaterializationBenchmark { - val flowWithMapBuilder = (numOfCombinators: Int) => { + val flowWithMapBuilder = (numOfCombinators: Int) ⇒ { var source = Source.single(()) - for (_ <- 1 to numOfCombinators) { + for (_ ← 1 to numOfCombinators) { source = source.map(identity) } source.to(Sink.ignore) } - val graphWithJunctionsGradualBuilder = (numOfJunctions: Int) => - RunnableGraph.fromGraph(GraphDSL.create() { implicit b => + val graphWithJunctionsGradualBuilder = (numOfJunctions: Int) ⇒ + RunnableGraph.fromGraph(GraphDSL.create() { implicit b ⇒ import GraphDSL.Implicits._ val broadcast = b.add(Broadcast[Unit](numOfJunctions)) var outlet = broadcast.out(0) - for (i <- 1 until numOfJunctions) { + for (i ← 1 until numOfJunctions) { val merge = b.add(Merge[Unit](2)) outlet ~> merge broadcast.out(i) ~> merge @@ -42,13 +42,13 @@ object MaterializationBenchmark { ClosedShape }) - val graphWithJunctionsImmediateBuilder = (numOfJunctions: Int) => - RunnableGraph.fromGraph(GraphDSL.create() { implicit b => + val graphWithJunctionsImmediateBuilder = (numOfJunctions: Int) ⇒ + RunnableGraph.fromGraph(GraphDSL.create() { implicit b ⇒ import GraphDSL.Implicits._ val broadcast = b.add(Broadcast[Unit](numOfJunctions)) val merge = b.add(Merge[Unit](numOfJunctions)) - for (i <- 0 until numOfJunctions) { + for (i ← 0 until numOfJunctions) { broadcast ~> merge } @@ -57,12 +57,12 @@ object MaterializationBenchmark { ClosedShape }) - val graphWithImportedFlowBuilder = (numOfFlows: Int) => + val graphWithImportedFlowBuilder = (numOfFlows: Int) ⇒ RunnableGraph.fromGraph(GraphDSL.create(Source.single(())) { implicit b ⇒ source ⇒ import GraphDSL.Implicits._ val flow = Flow[Unit].map(identity) var out: Outlet[Unit] = source.out - for (i <- 0 until numOfFlows) { + for (i ← 0 until numOfFlows) { val flowShape = b.add(flow) out ~> flowShape out = flowShape.outlet @@ -73,11 +73,11 @@ object MaterializationBenchmark { final val subStreamCount = 10000 - val subStreamBuilder: Int => RunnableGraph[Future[Unit]] = numOfCombinators => { + val subStreamBuilder: Int ⇒ RunnableGraph[Future[Unit]] = numOfCombinators ⇒ { val subFlow = { var flow = Flow[Unit] - for (_ <- 1 to numOfCombinators) { + for (_ ← 1 to numOfCombinators) { flow = flow.map(identity) } flow diff --git a/akka-bench-jmh/src/main/scala/akka/stream/PartitionHubBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/stream/PartitionHubBenchmark.scala index f8c1c7ead6..c1f2408547 100644 --- a/akka-bench-jmh/src/main/scala/akka/stream/PartitionHubBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/stream/PartitionHubBenchmark.scala @@ -79,11 +79,11 @@ class PartitionHubBenchmark { val source = testSource .runWith(PartitionHub.sink[java.lang.Integer]( - (size, elem) => elem.intValue % NumberOfStreams, + (size, elem) ⇒ elem.intValue % NumberOfStreams, startAfterNrOfConsumers = NumberOfStreams, bufferSize = BufferSize ))(materializer) - for (_ <- 0 until NumberOfStreams) + for (_ ← 0 until NumberOfStreams) source.runWith(new LatchSink(N / NumberOfStreams, latch))(materializer) if (!latch.await(30, TimeUnit.SECONDS)) { @@ -106,7 +106,7 @@ class PartitionHubBenchmark { )) )(materializer) - for (_ <- 0 until NumberOfStreams) + for (_ ← 0 until NumberOfStreams) source.runWith(new LatchSink(N / NumberOfStreams, latch))(materializer) if (!latch.await(30, TimeUnit.SECONDS)) { diff --git a/akka-bench-jmh/src/main/scala/akka/stream/io/FileSourcesScaleBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/stream/io/FileSourcesScaleBenchmark.scala index f230ee5a0d..d7a6cc5da0 100644 --- a/akka-bench-jmh/src/main/scala/akka/stream/io/FileSourcesScaleBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/stream/io/FileSourcesScaleBenchmark.scala @@ -34,7 +34,7 @@ class FileSourcesScaleBenchmark { val FILES_NUMBER = 40 val files: Seq[Path] = { val line = ByteString("x" * 2048 + "\n") - (1 to FILES_NUMBER).map(i => { + (1 to FILES_NUMBER).map(i ⇒ { val f = Files.createTempFile(getClass.getName, i + ".bench.tmp") val ft = Source.fromIterator(() ⇒ Iterator.continually(line)) @@ -67,16 +67,16 @@ class FileSourcesScaleBenchmark { @Benchmark def flatMapMerge(): Unit = { - val h = Source.fromIterator(() => files.iterator) - .flatMapMerge(FILES_NUMBER, path => FileIO.fromPath(path, bufSize)).runWith(Sink.ignore) + val h = Source.fromIterator(() ⇒ files.iterator) + .flatMapMerge(FILES_NUMBER, path ⇒ FileIO.fromPath(path, bufSize)).runWith(Sink.ignore) Await.result(h, 300.seconds) } @Benchmark def mapAsync(): Unit = { - val h = Source.fromIterator(() => files.iterator) - .mapAsync(FILES_NUMBER)(path => FileIO.fromPath(path, bufSize).runWith(Sink.ignore)).runWith(Sink.ignore) + val h = Source.fromIterator(() ⇒ files.iterator) + .mapAsync(FILES_NUMBER)(path ⇒ FileIO.fromPath(path, bufSize).runWith(Sink.ignore)).runWith(Sink.ignore) Await.result(h, 300.seconds) } diff --git a/akka-bench-jmh/src/main/scala/akka/util/ByteString_copyToBuffer_Benchmark.scala b/akka-bench-jmh/src/main/scala/akka/util/ByteString_copyToBuffer_Benchmark.scala index 70f61470b0..dd76e822e8 100644 --- a/akka-bench-jmh/src/main/scala/akka/util/ByteString_copyToBuffer_Benchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/util/ByteString_copyToBuffer_Benchmark.scala @@ -36,8 +36,8 @@ class ByteString_copyToBuffer_Benchmark { [info] ByteStringBenchmark.bs_large_copyToBuffer thrpt 40 142 163 289.866 ± 21751578.294 ops/s [info] ByteStringBenchmark.bss_large_copyToBuffer thrpt 40 1 489 195.631 ± 209165.487 ops/s << that's the interesting case, we needlessly fold and allocate tons of Stream etc [info] ByteStringBenchmark.bss_large_pc_copyToBuffer thrpt 40 184 466 756.364 ± 9169108.378 ops/s // "can't beat that" - - + + [info] ....[Thread state: RUNNABLE]........................................................................ [info] 35.9% 35.9% scala.collection.Iterator$class.toStream [info] 20.2% 20.2% scala.collection.immutable.Stream.foldLeft @@ -50,10 +50,10 @@ class ByteString_copyToBuffer_Benchmark { [info] 1.2% 1.2% akka.util.generated.ByteStringBenchmark_bss_large_copyToBuffer.bss_large_copyToBuffer_thrpt_jmhStub [info] 0.3% 0.3% akka.util.ByteIterator$MultiByteArrayIterator.copyToBuffer [info] 1.2% 1.2% - - + + AFTER specializing impls - + [info] ....[Thread state: RUNNABLE]........................................................................ [info] 99.5% 99.6% akka.util.generated.ByteStringBenchmark_bss_large_copyToBuffer_jmhTest.bss_large_copyToBuffer_thrpt_jmhStub [info] 0.1% 0.1% java.util.concurrent.CountDownLatch.countDown @@ -63,12 +63,12 @@ class ByteString_copyToBuffer_Benchmark { [info] 0.1% 0.1% java.lang.Thread.currentThread [info] 0.1% 0.1% sun.misc.Unsafe.compareAndSwapInt [info] 0.1% 0.1% sun.reflect.AccessorGenerator.internalize - + [info] Benchmark Mode Cnt Score Error Units [info] ByteStringBenchmark.bs_large_copyToBuffer thrpt 40 177 328 585.473 ± 7742067.648 ops/s [info] ByteStringBenchmark.bss_large_copyToBuffer thrpt 40 113 535 003.488 ± 3899763.124 ops/s // previous bad case now very good (was 2M/s) [info] ByteStringBenchmark.bss_large_pc_copyToBuffer thrpt 40 203 590 896.493 ± 7582752.024 ops/s // "can't beat that" - + */ @Benchmark diff --git a/akka-bench-jmh/src/main/scala/akka/util/ByteString_decode_Benchmark.scala b/akka-bench-jmh/src/main/scala/akka/util/ByteString_decode_Benchmark.scala index 4833135887..2eb10e8ca3 100644 --- a/akka-bench-jmh/src/main/scala/akka/util/ByteString_decode_Benchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/util/ByteString_decode_Benchmark.scala @@ -25,17 +25,17 @@ class ByteString_decode_Benchmark { /* Using Charset helps a bit, but nothing impressive: - + [info] ByteString_decode_Benchmark.bc_large_decodeString_stringCharset_utf8 thrpt 20 21 612.293 ± 825.099 ops/s => [info] ByteString_decode_Benchmark.bc_large_decodeString_charsetCharset_utf8 thrpt 20 22 473.372 ± 851.597 ops/s - - + + [info] ByteString_decode_Benchmark.bs_large_decodeString_stringCharset_utf8 thrpt 20 84 443.674 ± 3723.987 ops/s => [info] ByteString_decode_Benchmark.bs_large_decodeString_charsetCharset_utf8 thrpt 20 93 865.033 ± 2052.476 ops/s - - + + [info] ByteString_decode_Benchmark.bss_large_decodeString_stringCharset_utf8 thrpt 20 14 886.553 ± 326.752 ops/s => [info] ByteString_decode_Benchmark.bss_large_decodeString_charsetCharset_utf8 thrpt 20 16 031.670 ± 474.565 ops/s diff --git a/akka-bench-jmh/src/main/scala/akka/util/ByteString_dropSliceTake_Benchmark.scala b/akka-bench-jmh/src/main/scala/akka/util/ByteString_dropSliceTake_Benchmark.scala index 11a085172d..15e7a3d453 100644 --- a/akka-bench-jmh/src/main/scala/akka/util/ByteString_dropSliceTake_Benchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/util/ByteString_dropSliceTake_Benchmark.scala @@ -27,7 +27,7 @@ class ByteString_dropSliceTake_Benchmark { val bss_pc_large = bss_large.compact /* - --------------------------------- BASELINE -------------------------------------------------------------------- + --------------------------------- BASELINE -------------------------------------------------------------------- [info] Benchmark Mode Cnt Score Error Units [info] ByteString_dropSliceTake_Benchmark.bs_large_dropRight_100 thrpt 20 111 122 621.983 ± 6172679.160 ops/s [info] ByteString_dropSliceTake_Benchmark.bs_large_dropRight_256 thrpt 20 110 238 003.870 ± 4042572.908 ops/s @@ -48,9 +48,9 @@ class ByteString_dropSliceTake_Benchmark { [info] ByteString_dropSliceTake_Benchmark.bs_large_slice_129_129 thrpt 20 105 640 836.625 ± 9112709.942 ops/s [info] ByteString_dropSliceTake_Benchmark.bss_large_slice_80_80 thrpt 20 10 868 202.262 ± 526537.133 ops/s [info] ByteString_dropSliceTake_Benchmark.bss_large_slice_129_129 thrpt 20 9 429 199.802 ± 1321542.453 ops/s - + --------------------------------- AFTER ----------------------------------------------------------------------- - + ------ TODAY ––––––– [info] Benchmark Mode Cnt Score Error Units [info] ByteString_dropSliceTake_Benchmark.bs_large_dropRight_100 thrpt 20 126 091 961.654 ± 2813125.268 ops/s @@ -59,7 +59,7 @@ class ByteString_dropSliceTake_Benchmark { [info] ByteString_dropSliceTake_Benchmark.bss_large_dropRight_100 thrpt 20 8 813 065.392 ± 234570.880 ops/s [info] ByteString_dropSliceTake_Benchmark.bss_large_dropRight_256 thrpt 20 9 039 585.934 ± 297168.301 ops/s [info] ByteString_dropSliceTake_Benchmark.bss_large_dropRight_2000 thrpt 20 9 629 458.168 ± 124846.904 ops/s - + [info] ByteString_dropSliceTake_Benchmark.bs_large_drop_100 thrpt 20 111 666 137.955 ± 4846727.674 ops/s [info] ByteString_dropSliceTake_Benchmark.bs_large_drop_256 thrpt 20 114 405 514.622 ± 4985750.805 ops/s [info] ByteString_dropSliceTake_Benchmark.bs_large_drop_2000 thrpt 20 114 364 716.297 ± 2512280.603 ops/s @@ -67,15 +67,15 @@ class ByteString_dropSliceTake_Benchmark { [info] ByteString_dropSliceTake_Benchmark.bss_large_drop_100 thrpt 20 9 184 934.769 ± 549140.840 ops/s [info] ByteString_dropSliceTake_Benchmark.bss_large_drop_256 thrpt 20 10 887 437.121 ± 195606.240 ops/s [info] ByteString_dropSliceTake_Benchmark.bss_large_drop_2000 thrpt 20 10 725 300.292 ± 403470.413 ops/s - + [info] ByteString_dropSliceTake_Benchmark.bs_large_slice_80_80 thrpt 20 233 017 314.148 ± 7070246.826 ops/s [info] ByteString_dropSliceTake_Benchmark.bs_large_slice_129_129 thrpt 20 275 245 086.247 ± 4969752.048 ops/s [info] ByteString_dropSliceTake_Benchmark.bss_large_slice_80_80 thrpt 20 264 963 420.976 ± 4259289.143 ops/s [info] ByteString_dropSliceTake_Benchmark.bss_large_slice_129_129 thrpt 20 265 477 577.022 ± 4623974.283 ops/s - + */ - // 18 == "http://example.com", a typical url length + // 18 == "http://example.com", a typical url length @Benchmark def bs_large_drop_0: ByteString = diff --git a/akka-bench-jmh/src/main/scala/akka/util/ImmutableIntMapBench.scala b/akka-bench-jmh/src/main/scala/akka/util/ImmutableIntMapBench.scala index e7aa0c129b..603ded1114 100644 --- a/akka-bench-jmh/src/main/scala/akka/util/ImmutableIntMapBench.scala +++ b/akka-bench-jmh/src/main/scala/akka/util/ImmutableIntMapBench.scala @@ -48,7 +48,7 @@ class ImmutableIntMapBench { } else from } - val odd1000 = (0 to 1000).iterator.filter(_ % 2 == 1).foldLeft(ImmutableIntMap.empty)((l, i) => l.updated(i, i)) + val odd1000 = (0 to 1000).iterator.filter(_ % 2 == 1).foldLeft(ImmutableIntMap.empty)((l, i) ⇒ l.updated(i, i)) @Benchmark @OperationsPerInvocation(1) @@ -109,4 +109,4 @@ class ImmutableIntMapBench { @Benchmark @OperationsPerInvocation(1000) def getHiElement(): ImmutableIntMap = getKey(iterations = 1000, key = 999, from = odd1000) -} \ No newline at end of file +} diff --git a/akka-bench-jmh/src/main/scala/akka/util/LruBoundedCacheBench.scala b/akka-bench-jmh/src/main/scala/akka/util/LruBoundedCacheBench.scala index 53875aee82..d7bc56d0d6 100644 --- a/akka-bench-jmh/src/main/scala/akka/util/LruBoundedCacheBench.scala +++ b/akka-bench-jmh/src/main/scala/akka/util/LruBoundedCacheBench.scala @@ -45,7 +45,7 @@ class LruBoundedCacheBench { } // Loading - for (i <- 1 to threshold) { + for (i ← 1 to threshold) { val value = random.nextString(stringSize) if (i == 1) toGet = value toRemove = value diff --git a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ClusterSharding.scala b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ClusterSharding.scala index 2c2e4f0cec..10f6b69fb4 100644 --- a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ClusterSharding.scala +++ b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ClusterSharding.scala @@ -281,9 +281,9 @@ class ClusterSharding(system: ExtendedActorSystem) extends Extension { start(typeName, entityProps, settings, extractEntityId = { - case msg if messageExtractor.entityId(msg) ne null ⇒ - (messageExtractor.entityId(msg), messageExtractor.entityMessage(msg)) - }, + case msg if messageExtractor.entityId(msg) ne null ⇒ + (messageExtractor.entityId(msg), messageExtractor.entityMessage(msg)) + }, extractShardId = msg ⇒ messageExtractor.shardId(msg), allocationStrategy = allocationStrategy, handOffStopMessage = handOffStopMessage) @@ -438,9 +438,9 @@ class ClusterSharding(system: ExtendedActorSystem) extends Extension { startProxy(typeName, Option(role.orElse(null)), Option(dataCenter.orElse(null)), extractEntityId = { - case msg if messageExtractor.entityId(msg) ne null ⇒ - (messageExtractor.entityId(msg), messageExtractor.entityMessage(msg)) - }, + case msg if messageExtractor.entityId(msg) ne null ⇒ + (messageExtractor.entityId(msg), messageExtractor.entityMessage(msg)) + }, extractShardId = msg ⇒ messageExtractor.shardId(msg)) } diff --git a/akka-cluster-tools/src/test/scala/akka/cluster/pubsub/DistributedPubSubMediatorDeadLettersSpec.scala b/akka-cluster-tools/src/test/scala/akka/cluster/pubsub/DistributedPubSubMediatorDeadLettersSpec.scala index fb39fa37e6..a018222ba0 100644 --- a/akka-cluster-tools/src/test/scala/akka/cluster/pubsub/DistributedPubSubMediatorDeadLettersSpec.scala +++ b/akka-cluster-tools/src/test/scala/akka/cluster/pubsub/DistributedPubSubMediatorDeadLettersSpec.scala @@ -121,4 +121,4 @@ class DistributedPubSubMediatorNotSendingToDeadLettersSpec } } } -} \ No newline at end of file +} diff --git a/akka-cluster/src/main/scala/akka/cluster/ClusterHeartbeat.scala b/akka-cluster/src/main/scala/akka/cluster/ClusterHeartbeat.scala index a6a454a7f5..a20a821737 100644 --- a/akka-cluster/src/main/scala/akka/cluster/ClusterHeartbeat.scala +++ b/akka-cluster/src/main/scala/akka/cluster/ClusterHeartbeat.scala @@ -134,7 +134,7 @@ private[cluster] final class ClusterHeartbeatSender extends Actor with ActorLogg } def addMember(m: Member): Unit = - if (m.uniqueAddress != selfUniqueAddress && // is not self + if (m.uniqueAddress != selfUniqueAddress && // is not self !state.contains(m.uniqueAddress) && // not already added filterInternalClusterMembers(m) // should be watching members from this DC (internal / external) ) { diff --git a/akka-cluster/src/main/scala/akka/cluster/CrossDcClusterHeartbeat.scala b/akka-cluster/src/main/scala/akka/cluster/CrossDcClusterHeartbeat.scala index 2d6b1e56f1..736dd25fbb 100644 --- a/akka-cluster/src/main/scala/akka/cluster/CrossDcClusterHeartbeat.scala +++ b/akka-cluster/src/main/scala/akka/cluster/CrossDcClusterHeartbeat.scala @@ -321,20 +321,20 @@ private[cluster] object CrossDcHeartbeatingState { crossDcFailureDetector, nrOfMonitoredNodesPerDc, state = { - // TODO unduplicate this with the logic in MembershipState.ageSortedTopOldestMembersPerDc - val groupedByDc = members.filter(atLeastInUpState).groupBy(_.dataCenter) + // TODO unduplicate this with the logic in MembershipState.ageSortedTopOldestMembersPerDc + val groupedByDc = members.filter(atLeastInUpState).groupBy(_.dataCenter) - if (members.ordering == Member.ageOrdering) { - // we already have the right ordering - groupedByDc - } else { - // we need to enforce the ageOrdering for the SortedSet in each DC - groupedByDc.map { - case (dc, ms) ⇒ - dc → (SortedSet.empty[Member](Member.ageOrdering) union ms) + if (members.ordering == Member.ageOrdering) { + // we already have the right ordering + groupedByDc + } else { + // we need to enforce the ageOrdering for the SortedSet in each DC + groupedByDc.map { + case (dc, ms) ⇒ + dc → (SortedSet.empty[Member](Member.ageOrdering) union ms) + } } - } - }) + }) } } diff --git a/akka-cluster/src/main/scala/akka/cluster/routing/ClusterRouterConfig.scala b/akka-cluster/src/main/scala/akka/cluster/routing/ClusterRouterConfig.scala index cc707ffbfb..e4d80ab9c5 100644 --- a/akka-cluster/src/main/scala/akka/cluster/routing/ClusterRouterConfig.scala +++ b/akka-cluster/src/main/scala/akka/cluster/routing/ClusterRouterConfig.scala @@ -532,4 +532,4 @@ private[akka] trait ClusterRouterActor { this: RouterActor ⇒ case ReachableMember(m) ⇒ if (isAvailable(m)) addMember(m) } -} \ No newline at end of file +} diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/MultiNodeClusterSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/MultiNodeClusterSpec.scala index d83c159d58..c77200d270 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/MultiNodeClusterSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/MultiNodeClusterSpec.scala @@ -343,7 +343,7 @@ trait MultiNodeClusterSpec extends Suite with STMultiNodeSpec with WatchedByCoro case cause: Exception ⇒ throw new AssertionError(s"Member ${toBeRemovedAddress} was not removed within ${timeout}!", cause) } - awaiter ! PoisonPill // you've done your job, now die + awaiter ! PoisonPill // you've done your job, now die enterBarrier("member-shutdown") markNodeAsUnavailable(toBeRemovedAddress) diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/UseRoleIgnoredSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/UseRoleIgnoredSpec.scala index ecee9c051b..2155a95f5f 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/UseRoleIgnoredSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/UseRoleIgnoredSpec.scala @@ -133,9 +133,9 @@ abstract class UseRoleIgnoredSpec extends MultiNodeSpec(UseRoleIgnoredMultiJvmSp val router = system.actorOf( ClusterRouterGroup( - RoundRobinGroup(paths = Nil), - ClusterRouterGroupSettings(totalInstances = 6, routeesPaths = List("/user/foo", "/user/bar"), - allowLocalRoutees = false, useRoles = roles)).props, + RoundRobinGroup(paths = Nil), + ClusterRouterGroupSettings(totalInstances = 6, routeesPaths = List("/user/foo", "/user/bar"), + allowLocalRoutees = false, useRoles = roles)).props, "router-2b") awaitAssert(currentRoutees(router).size should ===(4)) @@ -193,9 +193,9 @@ abstract class UseRoleIgnoredSpec extends MultiNodeSpec(UseRoleIgnoredMultiJvmSp val router = system.actorOf( ClusterRouterGroup( - RoundRobinGroup(paths = Nil), - ClusterRouterGroupSettings(totalInstances = 6, routeesPaths = List("/user/foo", "/user/bar"), - allowLocalRoutees = true, useRoles = roles)).props, + RoundRobinGroup(paths = Nil), + ClusterRouterGroupSettings(totalInstances = 6, routeesPaths = List("/user/foo", "/user/bar"), + allowLocalRoutees = true, useRoles = roles)).props, "router-3b") awaitAssert(currentRoutees(router).size should ===(4)) @@ -253,9 +253,9 @@ abstract class UseRoleIgnoredSpec extends MultiNodeSpec(UseRoleIgnoredMultiJvmSp val router = system.actorOf( ClusterRouterGroup( - RoundRobinGroup(paths = Nil), - ClusterRouterGroupSettings(totalInstances = 6, routeesPaths = List("/user/foo", "/user/bar"), - allowLocalRoutees = true, useRoles = roles)).props, + RoundRobinGroup(paths = Nil), + ClusterRouterGroupSettings(totalInstances = 6, routeesPaths = List("/user/foo", "/user/bar"), + allowLocalRoutees = true, useRoles = roles)).props, "router-4b") awaitAssert(currentRoutees(router).size should ===(2)) @@ -313,9 +313,9 @@ abstract class UseRoleIgnoredSpec extends MultiNodeSpec(UseRoleIgnoredMultiJvmSp val router = system.actorOf( ClusterRouterGroup( - RoundRobinGroup(paths = Nil), - ClusterRouterGroupSettings(totalInstances = 6, routeesPaths = List("/user/foo", "/user/bar"), - allowLocalRoutees = true, useRoles = roles)).props, + RoundRobinGroup(paths = Nil), + ClusterRouterGroupSettings(totalInstances = 6, routeesPaths = List("/user/foo", "/user/bar"), + allowLocalRoutees = true, useRoles = roles)).props, "router-5b") awaitAssert(currentRoutees(router).size should ===(6)) diff --git a/akka-cluster/src/test/scala/akka/cluster/ClusterSpec.scala b/akka-cluster/src/test/scala/akka/cluster/ClusterSpec.scala index c1b1d938d7..0f8a6efe2d 100644 --- a/akka-cluster/src/test/scala/akka/cluster/ClusterSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/ClusterSpec.scala @@ -249,4 +249,4 @@ akka.loglevel=DEBUG } } -} \ No newline at end of file +} diff --git a/akka-contrib/src/test/scala/akka/contrib/circuitbreaker/CircuitBreakerProxySpec.scala b/akka-contrib/src/test/scala/akka/contrib/circuitbreaker/CircuitBreakerProxySpec.scala index 3b3fb7af03..34c6509b81 100644 --- a/akka-contrib/src/test/scala/akka/contrib/circuitbreaker/CircuitBreakerProxySpec.scala +++ b/akka-contrib/src/test/scala/akka/contrib/circuitbreaker/CircuitBreakerProxySpec.scala @@ -21,8 +21,8 @@ class CircuitBreakerProxySpec extends AkkaSpec() with GivenWhenThen { callTimeout = 200 millis, resetTimeout = 1 second, failureDetector = { - _ == "FAILURE" - }) + _ == "FAILURE" + }) trait CircuitBreakerScenario { val sender = TestProbe() diff --git a/akka-contrib/src/test/scala/akka/contrib/circuitbreaker/sample/CircuitBreaker.scala b/akka-contrib/src/test/scala/akka/contrib/circuitbreaker/sample/CircuitBreaker.scala index 818f70d030..b32c06fd70 100644 --- a/akka-contrib/src/test/scala/akka/contrib/circuitbreaker/sample/CircuitBreaker.scala +++ b/akka-contrib/src/test/scala/akka/contrib/circuitbreaker/sample/CircuitBreaker.scala @@ -64,11 +64,11 @@ class CircuitBreaker(potentiallyFailingService: ActorRef) extends Actor with Act CircuitBreakerPropsBuilder(maxFailures = 3, callTimeout = 2.seconds, resetTimeout = 30.seconds) .copy( failureDetector = { - _ match { - case Response(Left(_)) ⇒ true - case _ ⇒ false - } - }) + _ match { + case Response(Left(_)) ⇒ true + case _ ⇒ false + } + }) .props(potentiallyFailingService), "serviceCircuitBreaker") @@ -106,15 +106,15 @@ class CircuitBreakerAsk(potentiallyFailingService: ActorRef) extends Actor with CircuitBreakerPropsBuilder(maxFailures = 3, callTimeout = askTimeout, resetTimeout = 30.seconds) .copy( failureDetector = { - _ match { - case Response(Left(_)) ⇒ true - case _ ⇒ false - } - }) + _ match { + case Response(Left(_)) ⇒ true + case _ ⇒ false + } + }) .copy( openCircuitFailureConverter = { failure ⇒ - Left(s"Circuit open when processing ${failure.failedMsg}") - }) + Left(s"Circuit open when processing ${failure.failedMsg}") + }) .props(potentiallyFailingService), "serviceCircuitBreaker") diff --git a/akka-contrib/src/test/scala/akka/contrib/pattern/ReceivePipelineSpec.scala b/akka-contrib/src/test/scala/akka/contrib/pattern/ReceivePipelineSpec.scala index 5a80ee6cb5..2e66a0cda8 100644 --- a/akka-contrib/src/test/scala/akka/contrib/pattern/ReceivePipelineSpec.scala +++ b/akka-contrib/src/test/scala/akka/contrib/pattern/ReceivePipelineSpec.scala @@ -246,7 +246,7 @@ class PersistentReceivePipelineSpec(config: Config) extends AkkaSpec(config) wit override def unhandled(message: Any) = probeRef ! message })) - // 11 ( -> not handled by EvenHalverInterceptor) -> 22 but > 10 so not handled in main receive: + // 11 ( -> not handled by EvenHalverInterceptor) -> 22 but > 10 so not handled in main receive: // original message falls back to unhandled implementation... replier ! 11 probe.expectMsg(11) @@ -505,4 +505,4 @@ object AfterSamples { } } //#interceptor-after -} \ No newline at end of file +} diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/Key.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/Key.scala index e6444322dd..f5b6be6a42 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/Key.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/Key.scala @@ -9,7 +9,7 @@ object Key { */ def unapply(k: Key[_]): Option[String] = Some(k.id) - private[akka]type KeyR = Key[ReplicatedData] + private[akka] type KeyR = Key[ReplicatedData] type KeyId = String diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/ORSet.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/ORSet.scala index f2ac9a6313..4fc244160a 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/ORSet.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/ORSet.scala @@ -36,7 +36,7 @@ object ORSet { /** * INTERNAL API */ - @InternalApi private[akka]type Dot = VersionVector + @InternalApi private[akka] type Dot = VersionVector sealed trait DeltaOp extends ReplicatedDelta with RequiresCausalDeliveryOfDeltas with ReplicatedDataSerialization { type T = DeltaOp diff --git a/akka-docs/src/test/scala/docs/CompileOnlySpec.scala b/akka-docs/src/test/scala/docs/CompileOnlySpec.scala index 4725f55563..735a8d388b 100644 --- a/akka-docs/src/test/scala/docs/CompileOnlySpec.scala +++ b/akka-docs/src/test/scala/docs/CompileOnlySpec.scala @@ -9,5 +9,5 @@ trait CompileOnlySpec { * Given a block of code... does NOT execute it. * Useful when writing code samples in tests, which should only be compiled. */ - def compileOnlySpec(body: => Unit) = () + def compileOnlySpec(body: ⇒ Unit) = () } diff --git a/akka-docs/src/test/scala/docs/actor/ActorDocSpec.scala b/akka-docs/src/test/scala/docs/actor/ActorDocSpec.scala index 0ebaa1fc04..bd8584ac38 100644 --- a/akka-docs/src/test/scala/docs/actor/ActorDocSpec.scala +++ b/akka-docs/src/test/scala/docs/actor/ActorDocSpec.scala @@ -30,8 +30,8 @@ class MyActor extends Actor { val log = Logging(context.system, this) def receive = { - case "test" => log.info("received test") - case _ => log.info("received unknown message") + case "test" ⇒ log.info("received test") + case _ ⇒ log.info("received unknown message") } } //#my-actor @@ -44,20 +44,20 @@ class FirstActor extends Actor { val child = context.actorOf(Props[MyActor], name = "myChild") //#plus-some-behavior def receive = { - case x => sender() ! x + case x ⇒ sender() ! x } //#plus-some-behavior } //#context-actorOf class ActorWithArgs(arg: String) extends Actor { - def receive = { case _ => () } + def receive = { case _ ⇒ () } } //#actor-with-value-class-argument class Argument(val value: String) extends AnyVal class ValueClassActor(arg: Argument) extends Actor { - def receive = { case _ => () } + def receive = { case _ ⇒ () } } object ValueClassActor { @@ -82,7 +82,7 @@ class DemoActorWrapper extends Actor { class DemoActor(magicNumber: Int) extends Actor { def receive = { - case x: Int => sender() ! (x + magicNumber) + case x: Int ⇒ sender() ! (x + magicNumber) } } @@ -92,7 +92,7 @@ class DemoActorWrapper extends Actor { // ... //#props-factory def receive = { - case msg => + case msg ⇒ } //#props-factory } @@ -110,8 +110,8 @@ class ActorWithMessagesWrapper { class MyActor extends Actor with ActorLogging { import MyActor._ def receive = { - case Greeting(greeter) => log.info(s"I was greeted by $greeter.") - case Goodbye => log.info("Someone said goodbye to me.") + case Greeting(greeter) ⇒ log.info(s"I was greeted by $greeter.") + case Goodbye ⇒ log.info("Someone said goodbye to me.") } } //#messages-in-companion @@ -138,13 +138,13 @@ class Hook extends Actor { class ReplyException extends Actor { def receive = { - case _ => + case _ ⇒ //#reply-exception try { val result = operation() sender() ! result } catch { - case e: Exception => + case e: Exception ⇒ sender() ! akka.actor.Status.Failure(e) throw e } @@ -162,10 +162,10 @@ class StoppingActorsWrapper { val child: ActorRef = ??? def receive = { - case "interrupt-child" => + case "interrupt-child" ⇒ context stop child - case "done" => + case "done" ⇒ context stop self } @@ -184,15 +184,15 @@ class Manager extends Actor { val worker = context.watch(context.actorOf(Props[Cruncher], "worker")) def receive = { - case "job" => worker ! "crunch" - case Shutdown => + case "job" ⇒ worker ! "crunch" + case Shutdown ⇒ worker ! PoisonPill context become shuttingDown } def shuttingDown: Receive = { - case "job" => sender() ! "service unavailable, shutting down" - case Terminated(`worker`) => + case "job" ⇒ sender() ! "service unavailable, shutting down" + case Terminated(`worker`) ⇒ context stop self } } @@ -200,7 +200,7 @@ class Manager extends Actor { class Cruncher extends Actor { def receive = { - case "crunch" => // crunch... + case "crunch" ⇒ // crunch... } } @@ -211,10 +211,10 @@ class Swapper extends Actor { val log = Logging(system, this) def receive = { - case Swap => + case Swap ⇒ log.info("Hi") become({ - case Swap => + case Swap ⇒ log.info("Ho") unbecome() // resets the latest 'become' (just for fun) }, discardOld = false) // push on top instead of replace @@ -236,22 +236,22 @@ object SwapperApp extends App { //#receive-orElse trait ProducerBehavior { - this: Actor => + this: Actor ⇒ val producerBehavior: Receive = { - case GiveMeThings => + case GiveMeThings ⇒ sender() ! Give("thing") } } trait ConsumerBehavior { - this: Actor with ActorLogging => + this: Actor with ActorLogging ⇒ val consumerBehavior: Receive = { - case ref: ActorRef => + case ref: ActorRef ⇒ ref ! GiveMeThings - case Give(thing) => + case Give(thing) ⇒ log.info("Got a thing! It's {}", thing) } } @@ -288,7 +288,7 @@ class Pinger extends Actor { var countDown = 100 def receive = { - case Pong => + case Pong ⇒ println(s"${self.path} received pong, count down $countDown") if (countDown > 0) { @@ -303,7 +303,7 @@ class Pinger extends Actor { class Ponger(pinger: ActorRef) extends Actor { def receive = { - case Ping => + case Ping ⇒ println(s"${self.path} received ping") pinger ! Pong } @@ -330,7 +330,7 @@ class ActorDocSpec extends AkkaSpec(""" import context._ val myActor = actorOf(Props[MyActor], name = "myactor") def receive = { - case x => myActor ! x + case x ⇒ myActor ! x } } //#import-context @@ -347,17 +347,17 @@ class ActorDocSpec extends AkkaSpec(""" // TODO: convert docs to AkkaSpec(Map(...)) val filter = EventFilter.custom { - case e: Logging.Info => true - case _ => false + case e: Logging.Info ⇒ true + case _ ⇒ false } system.eventStream.publish(TestEvent.Mute(filter)) system.eventStream.subscribe(testActor, classOf[Logging.Info]) myActor ! "test" - expectMsgPF(1 second) { case Logging.Info(_, _, "received test") => true } + expectMsgPF(1 second) { case Logging.Info(_, _, "received test") ⇒ true } myActor ! "unknown" - expectMsgPF(1 second) { case Logging.Info(_, _, "received unknown message") => true } + expectMsgPF(1 second) { case Logging.Info(_, _, "received unknown message") ⇒ true } system.eventStream.unsubscribe(testActor) system.eventStream.publish(TestEvent.UnMute(filter)) @@ -436,8 +436,8 @@ class ActorDocSpec extends AkkaSpec(""" "creating actor with IndirectActorProducer" in { class Echo(name: String) extends Actor { def receive = { - case n: Int => sender() ! name - case message => + case n: Int ⇒ sender() ! name + case message ⇒ val target = testActor //#forward target forward message @@ -514,10 +514,10 @@ class ActorDocSpec extends AkkaSpec(""" // To set an initial delay context.setReceiveTimeout(30 milliseconds) def receive = { - case "Hello" => + case "Hello" ⇒ // To set in a response to a message context.setReceiveTimeout(100 milliseconds) - case ReceiveTimeout => + case ReceiveTimeout ⇒ // To turn it off context.setReceiveTimeout(Duration.Undefined) throw new RuntimeException("Receive timed out") @@ -530,18 +530,18 @@ class ActorDocSpec extends AkkaSpec(""" class HotSwapActor extends Actor { import context._ def angry: Receive = { - case "foo" => sender() ! "I am already angry?" - case "bar" => become(happy) + case "foo" ⇒ sender() ! "I am already angry?" + case "bar" ⇒ become(happy) } def happy: Receive = { - case "bar" => sender() ! "I am already happy :-)" - case "foo" => become(angry) + case "bar" ⇒ sender() ! "I am already happy :-)" + case "foo" ⇒ become(angry) } def receive = { - case "foo" => become(angry) - case "bar" => become(happy) + case "foo" ⇒ become(angry) + case "bar" ⇒ become(happy) } } //#hot-swap-actor @@ -555,16 +555,16 @@ class ActorDocSpec extends AkkaSpec(""" import akka.actor.Stash class ActorWithProtocol extends Actor with Stash { def receive = { - case "open" => + case "open" ⇒ unstashAll() context.become({ - case "write" => // do writing... - case "close" => + case "write" ⇒ // do writing... + case "close" ⇒ unstashAll() context.unbecome() - case msg => stash() + case msg ⇒ stash() }, discardOld = false) // stack on top instead of replacing - case msg => stash() + case msg ⇒ stash() } } //#stash @@ -581,9 +581,9 @@ class ActorDocSpec extends AkkaSpec(""" var lastSender = context.system.deadLetters def receive = { - case "kill" => + case "kill" ⇒ context.stop(child); lastSender = sender() - case Terminated(`child`) => lastSender ! "finished" + case Terminated(`child`) ⇒ lastSender ! "finished" } } //#watch @@ -606,7 +606,7 @@ class ActorDocSpec extends AkkaSpec(""" victim ! Kill expectMsgPF(hint = "expecting victim to terminate") { - case Terminated(v) if v == victim => v // the Actor has indeed terminated + case Terminated(v) if v == victim ⇒ v // the Actor has indeed terminated } //#kill } @@ -640,15 +640,15 @@ class ActorDocSpec extends AkkaSpec(""" context.actorSelection("/user/another") ! Identify(identifyId) def receive = { - case ActorIdentity(`identifyId`, Some(ref)) => + case ActorIdentity(`identifyId`, Some(ref)) ⇒ context.watch(ref) context.become(active(ref)) - case ActorIdentity(`identifyId`, None) => context.stop(self) + case ActorIdentity(`identifyId`, None) ⇒ context.stop(self) } def active(another: ActorRef): Actor.Receive = { - case Terminated(`another`) => context.stop(self) + case Terminated(`another`) ⇒ context.stop(self) } } //#identify @@ -673,7 +673,7 @@ class ActorDocSpec extends AkkaSpec(""" // the actor has been stopped } catch { // the actor wasn't stopped within 5 seconds - case e: akka.pattern.AskTimeoutException => + case e: akka.pattern.AskTimeoutException ⇒ } //#gracefulStop } @@ -690,9 +690,9 @@ class ActorDocSpec extends AkkaSpec(""" val f: Future[Result] = for { - x <- ask(actorA, Request).mapTo[Int] // call pattern directly - s <- (actorB ask Request).mapTo[String] // call by implicit conversion - d <- (actorC ? Request).mapTo[Double] // call by symbolic name + x ← ask(actorA, Request).mapTo[Int] // call pattern directly + s ← (actorB ask Request).mapTo[String] // call by implicit conversion + d ← (actorC ? Request).mapTo[Double] // call by symbolic name } yield Result(x, s, d) f pipeTo actorD // .. or .. @@ -702,12 +702,12 @@ class ActorDocSpec extends AkkaSpec(""" class Replier extends Actor { def receive = { - case ref: ActorRef => + case ref: ActorRef ⇒ //#reply-with-sender sender().tell("reply", context.parent) // replies will go back to parent sender().!("reply")(context.parent) // alternative syntax (beware of the parens!) //#reply-with-sender - case x => + case x ⇒ //#reply-without-sender sender() ! x // replies will go to this actor //#reply-without-sender @@ -730,8 +730,8 @@ class ActorDocSpec extends AkkaSpec(""" "using ActorDSL outside of akka.actor package" in { import akka.actor.ActorDSL._ actor(new Act { - superviseWith(OneForOneStrategy() { case _ => Stop; Restart; Resume; Escalate }) - superviseWith(AllForOneStrategy() { case _ => Stop; Restart; Resume; Escalate }) + superviseWith(OneForOneStrategy() { case _ ⇒ Stop; Restart; Resume; Escalate }) + superviseWith(AllForOneStrategy() { case _ ⇒ Stop; Restart; Resume; Escalate }) }) } @@ -739,12 +739,12 @@ class ActorDocSpec extends AkkaSpec(""" val someActor = system.actorOf(Props(classOf[Replier], this)) //#coordinated-shutdown-addTask CoordinatedShutdown(system).addTask( - CoordinatedShutdown.PhaseBeforeServiceUnbind, "someTaskName") { () => - import akka.pattern.ask - import system.dispatcher - implicit val timeout = Timeout(5.seconds) - (someActor ? "stop").map(_ => Done) - } + CoordinatedShutdown.PhaseBeforeServiceUnbind, "someTaskName") { () ⇒ + import akka.pattern.ask + import system.dispatcher + implicit val timeout = Timeout(5.seconds) + (someActor ? "stop").map(_ ⇒ Done) + } //#coordinated-shutdown-addTask //#coordinated-shutdown-jvm-hook diff --git a/akka-docs/src/test/scala/docs/actor/BlockingDispatcherSample.scala b/akka-docs/src/test/scala/docs/actor/BlockingDispatcherSample.scala index a57e2350f8..b238a2cadc 100644 --- a/akka-docs/src/test/scala/docs/actor/BlockingDispatcherSample.scala +++ b/akka-docs/src/test/scala/docs/actor/BlockingDispatcherSample.scala @@ -11,7 +11,7 @@ import scala.concurrent.{ ExecutionContext, Future } // #blocking-in-actor class BlockingActor extends Actor { def receive = { - case i: Int => + case i: Int ⇒ Thread.sleep(5000) //block for 5 seconds, representing blocking I/O, etc println(s"Blocking operation finished: ${i}") } @@ -23,7 +23,7 @@ class BlockingFutureActor extends Actor { implicit val executionContext: ExecutionContext = context.dispatcher def receive = { - case i: Int => + case i: Int ⇒ println(s"Calling blocking Future: ${i}") Future { Thread.sleep(5000) //block for 5 seconds @@ -38,7 +38,7 @@ class SeparateDispatcherFutureActor extends Actor { implicit val executionContext: ExecutionContext = context.system.dispatchers.lookup("my-blocking-dispatcher") def receive = { - case i: Int => + case i: Int ⇒ println(s"Calling blocking Future: ${i}") Future { Thread.sleep(5000) //block for 5 seconds @@ -51,7 +51,7 @@ class SeparateDispatcherFutureActor extends Actor { // #print-actor class PrintActor extends Actor { def receive = { - case i: Int => + case i: Int ⇒ println(s"PrintActor: ${i}") } } @@ -66,7 +66,7 @@ object BlockingDispatcherSample { val actor1 = system.actorOf(Props(new BlockingFutureActor)) val actor2 = system.actorOf(Props(new PrintActor)) - for (i <- 1 to 100) { + for (i ← 1 to 100) { actor1 ! i actor2 ! i } @@ -102,7 +102,7 @@ object SeparateDispatcherSample { val actor1 = system.actorOf(Props(new SeparateDispatcherFutureActor)) val actor2 = system.actorOf(Props(new PrintActor)) - for (i <- 1 to 100) { + for (i ← 1 to 100) { actor1 ! i actor2 ! i } diff --git a/akka-docs/src/test/scala/docs/actor/FSMDocSpec.scala b/akka-docs/src/test/scala/docs/actor/FSMDocSpec.scala index 8eb05dc3e7..9437ca3034 100644 --- a/akka-docs/src/test/scala/docs/actor/FSMDocSpec.scala +++ b/akka-docs/src/test/scala/docs/actor/FSMDocSpec.scala @@ -5,7 +5,7 @@ package docs.actor import language.postfixOps -import akka.testkit.{ AkkaSpec => MyFavoriteTestFrameWorkPlusAkkaTestKit } +import akka.testkit.{ AkkaSpec ⇒ MyFavoriteTestFrameWorkPlusAkkaTestKit } import akka.util.ByteString //#test-code @@ -54,24 +54,24 @@ class FSMDocSpec extends MyFavoriteTestFrameWorkPlusAkkaTestKit { //#when-syntax when(Idle) { - case Event(SetTarget(ref), Uninitialized) => + case Event(SetTarget(ref), Uninitialized) ⇒ stay using Todo(ref, Vector.empty) } //#when-syntax //#transition-elided onTransition { - case Active -> Idle => + case Active -> Idle ⇒ stateData match { - case Todo(ref, queue) => ref ! Batch(queue) - case _ => // nothing to do + case Todo(ref, queue) ⇒ ref ! Batch(queue) + case _ ⇒ // nothing to do } } //#transition-elided //#when-syntax when(Active, stateTimeout = 1 second) { - case Event(Flush | StateTimeout, t: Todo) => + case Event(Flush | StateTimeout, t: Todo) ⇒ goto(Idle) using t.copy(queue = Vector.empty) } //#when-syntax @@ -79,10 +79,10 @@ class FSMDocSpec extends MyFavoriteTestFrameWorkPlusAkkaTestKit { //#unhandled-elided whenUnhandled { // common code for both states - case Event(Queue(obj), t @ Todo(_, v)) => + case Event(Queue(obj), t @ Todo(_, v)) ⇒ goto(Active) using t.copy(queue = v :+ obj) - case Event(e, s) => + case Event(e, s) ⇒ log.warning("received unhandled request {} in state {}/{}", e, stateName, s) stay } @@ -108,16 +108,16 @@ class FSMDocSpec extends MyFavoriteTestFrameWorkPlusAkkaTestKit { //#modifier-syntax when(SomeState) { - case Event(msg, _) => + case Event(msg, _) ⇒ goto(Processing) using (newData) forMax (5 seconds) replying (WillDo) } //#modifier-syntax //#transition-syntax onTransition { - case Idle -> Active => setTimer("timeout", Tick, 1 second, repeat = true) - case Active -> _ => cancelTimer("timeout") - case x -> Idle => log.info("entering Idle from " + x) + case Idle -> Active ⇒ setTimer("timeout", Tick, 1 second, repeat = true) + case Active -> _ ⇒ cancelTimer("timeout") + case x -> Idle ⇒ log.info("entering Idle from " + x) } //#transition-syntax @@ -131,7 +131,7 @@ class FSMDocSpec extends MyFavoriteTestFrameWorkPlusAkkaTestKit { //#stop-syntax when(Error) { - case Event("stop", _) => + case Event("stop", _) ⇒ // do cleanup ... stop() } @@ -139,38 +139,38 @@ class FSMDocSpec extends MyFavoriteTestFrameWorkPlusAkkaTestKit { //#transform-syntax when(SomeState)(transform { - case Event(bytes: ByteString, read) => stay using (read + bytes.length) + case Event(bytes: ByteString, read) ⇒ stay using (read + bytes.length) } using { - case s @ FSM.State(state, read, timeout, stopReason, replies) if read > 1000 => + case s @ FSM.State(state, read, timeout, stopReason, replies) if read > 1000 ⇒ goto(Processing) }) //#transform-syntax //#alt-transform-syntax val processingTrigger: PartialFunction[State, State] = { - case s @ FSM.State(state, read, timeout, stopReason, replies) if read > 1000 => + case s @ FSM.State(state, read, timeout, stopReason, replies) if read > 1000 ⇒ goto(Processing) } when(SomeState)(transform { - case Event(bytes: ByteString, read) => stay using (read + bytes.length) + case Event(bytes: ByteString, read) ⇒ stay using (read + bytes.length) } using processingTrigger) //#alt-transform-syntax //#termination-syntax onTermination { - case StopEvent(FSM.Normal, state, data) => // ... - case StopEvent(FSM.Shutdown, state, data) => // ... - case StopEvent(FSM.Failure(cause), state, data) => // ... + case StopEvent(FSM.Normal, state, data) ⇒ // ... + case StopEvent(FSM.Shutdown, state, data) ⇒ // ... + case StopEvent(FSM.Failure(cause), state, data) ⇒ // ... } //#termination-syntax //#unhandled-syntax whenUnhandled { - case Event(x: X, data) => + case Event(x: X, data) ⇒ log.info("Received unhandled event: " + x) stay - case Event(msg, _) => + case Event(msg, _) ⇒ log.warning("Received unknown event: " + msg) goto(Error) } @@ -184,7 +184,7 @@ class FSMDocSpec extends MyFavoriteTestFrameWorkPlusAkkaTestKit { //#body-elided override def logDepth = 12 onTermination { - case StopEvent(FSM.Failure(_), state, data) => + case StopEvent(FSM.Failure(_), state, data) ⇒ val lastEvents = getLog.mkString("\n\t") log.warning("Failure in state " + state + " with data " + data + "\n" + "Events leading up to this point:\n\t" + lastEvents) diff --git a/akka-docs/src/test/scala/docs/actor/FaultHandlingDocSample.scala b/akka-docs/src/test/scala/docs/actor/FaultHandlingDocSample.scala index e83f0f6870..65a873710c 100644 --- a/akka-docs/src/test/scala/docs/actor/FaultHandlingDocSample.scala +++ b/akka-docs/src/test/scala/docs/actor/FaultHandlingDocSample.scala @@ -49,14 +49,14 @@ class Listener extends Actor with ActorLogging { context.setReceiveTimeout(15 seconds) def receive = { - case Progress(percent) => + case Progress(percent) ⇒ log.info("Current progress: {} %", percent) if (percent >= 100.0) { log.info("That's all, shutting down") context.system.terminate() } - case ReceiveTimeout => + case ReceiveTimeout ⇒ // No progress within 15 seconds, ServiceUnavailable log.error("Shutting down due to unavailable service") context.system.terminate() @@ -83,7 +83,7 @@ class Worker extends Actor with ActorLogging { // Stop the CounterService child if it throws ServiceUnavailable override val supervisorStrategy = OneForOneStrategy() { - case _: CounterService.ServiceUnavailable => Stop + case _: CounterService.ServiceUnavailable ⇒ Stop } // The sender of the initial Start message will continuously be notified @@ -94,18 +94,18 @@ class Worker extends Actor with ActorLogging { import context.dispatcher // Use this Actors' Dispatcher as ExecutionContext def receive = LoggingReceive { - case Start if progressListener.isEmpty => + case Start if progressListener.isEmpty ⇒ progressListener = Some(sender()) context.system.scheduler.schedule(Duration.Zero, 1 second, self, Do) - case Do => + case Do ⇒ counterService ! Increment(1) counterService ! Increment(1) counterService ! Increment(1) // Send current progress to the initial sender counterService ? GetCurrentCount map { - case CurrentCount(_, count) => Progress(100.0 * count / totalCount) + case CurrentCount(_, count) ⇒ Progress(100.0 * count / totalCount) } pipeTo progressListener.get } } @@ -137,7 +137,7 @@ class CounterService extends Actor { override val supervisorStrategy = OneForOneStrategy( maxNrOfRetries = 3, withinTimeRange = 5 seconds) { - case _: Storage.StorageException => Restart + case _: Storage.StorageException ⇒ Restart } val key = self.path.name @@ -168,21 +168,21 @@ class CounterService extends Actor { def receive = LoggingReceive { - case Entry(k, v) if k == key && counter == None => + case Entry(k, v) if k == key && counter == None ⇒ // Reply from Storage of the initial value, now we can create the Counter val c = context.actorOf(Props(classOf[Counter], key, v)) counter = Some(c) // Tell the counter to use current storage c ! UseStorage(storage) // and send the buffered backlog to the counter - for ((replyTo, msg) <- backlog) c.tell(msg, sender = replyTo) + for ((replyTo, msg) ← backlog) c.tell(msg, sender = replyTo) backlog = IndexedSeq.empty - case msg: Increment => forwardOrPlaceInBacklog(msg) + case msg: Increment ⇒ forwardOrPlaceInBacklog(msg) - case msg: GetCurrentCount => forwardOrPlaceInBacklog(msg) + case msg: GetCurrentCount ⇒ forwardOrPlaceInBacklog(msg) - case Terminated(actorRef) if Some(actorRef) == storage => + case Terminated(actorRef) if Some(actorRef) == storage ⇒ // After 3 restarts the storage child is stopped. // We receive Terminated because we watch the child, see initStorage. storage = None @@ -191,7 +191,7 @@ class CounterService extends Actor { // Try to re-establish storage after while context.system.scheduler.scheduleOnce(10 seconds, self, Reconnect) - case Reconnect => + case Reconnect ⇒ // Re-establish storage after the scheduled delay initStorage() } @@ -201,8 +201,8 @@ class CounterService extends Actor { // the counter. Before that we place the messages in a backlog, to be sent // to the counter when it is initialized. counter match { - case Some(c) => c forward msg - case None => + case Some(c) ⇒ c forward msg + case None ⇒ if (backlog.size >= MaxBacklog) throw new ServiceUnavailable( "CounterService not available, lack of initial value") @@ -232,15 +232,15 @@ class Counter(key: String, initialValue: Long) extends Actor { var storage: Option[ActorRef] = None def receive = LoggingReceive { - case UseStorage(s) => + case UseStorage(s) ⇒ storage = s storeCount() - case Increment(n) => + case Increment(n) ⇒ count += n storeCount() - case GetCurrentCount => + case GetCurrentCount ⇒ sender() ! CurrentCount(key, count) } @@ -273,8 +273,8 @@ class Storage extends Actor { val db = DummyDB def receive = LoggingReceive { - case Store(Entry(key, count)) => db.save(key, count) - case Get(key) => sender() ! Entry(key, db.load(key).getOrElse(0L)) + case Store(Entry(key, count)) ⇒ db.save(key, count) + case Get(key) ⇒ sender() ! Entry(key, db.load(key).getOrElse(0L)) } } diff --git a/akka-docs/src/test/scala/docs/actor/FaultHandlingDocSpec.scala b/akka-docs/src/test/scala/docs/actor/FaultHandlingDocSpec.scala index 803ba8a1c3..a889d07161 100644 --- a/akka-docs/src/test/scala/docs/actor/FaultHandlingDocSpec.scala +++ b/akka-docs/src/test/scala/docs/actor/FaultHandlingDocSpec.scala @@ -27,15 +27,15 @@ object FaultHandlingDocSpec { override val supervisorStrategy = OneForOneStrategy(maxNrOfRetries = 10, withinTimeRange = 1 minute) { - case _: ArithmeticException => Resume - case _: NullPointerException => Restart - case _: IllegalArgumentException => Stop - case _: Exception => Escalate + case _: ArithmeticException ⇒ Resume + case _: NullPointerException ⇒ Restart + case _: IllegalArgumentException ⇒ Stop + case _: Exception ⇒ Escalate } //#strategy def receive = { - case p: Props => sender() ! context.actorOf(p) + case p: Props ⇒ sender() ! context.actorOf(p) } } //#supervisor @@ -49,15 +49,15 @@ object FaultHandlingDocSpec { override val supervisorStrategy = OneForOneStrategy(maxNrOfRetries = 10, withinTimeRange = 1 minute) { - case _: ArithmeticException => Resume - case _: NullPointerException => Restart - case _: IllegalArgumentException => Stop - case _: Exception => Escalate + case _: ArithmeticException ⇒ Resume + case _: NullPointerException ⇒ Restart + case _: IllegalArgumentException ⇒ Stop + case _: Exception ⇒ Escalate } //#strategy2 def receive = { - case p: Props => sender() ! context.actorOf(p) + case p: Props ⇒ sender() ! context.actorOf(p) } // override default to kill all children during restart override def preRestart(cause: Throwable, msg: Option[Any]) {} @@ -72,9 +72,9 @@ object FaultHandlingDocSpec { override val supervisorStrategy = OneForOneStrategy(maxNrOfRetries = 10, withinTimeRange = 1 minute) { - case _: ArithmeticException => Resume - case t => - super.supervisorStrategy.decider.applyOrElse(t, (_: Any) => Escalate) + case _: ArithmeticException ⇒ Resume + case t ⇒ + super.supervisorStrategy.decider.applyOrElse(t, (_: Any) ⇒ Escalate) } //#default-strategy-fallback @@ -85,9 +85,9 @@ object FaultHandlingDocSpec { class Child extends Actor { var state = 0 def receive = { - case ex: Exception => throw ex - case x: Int => state = x - case "get" => sender() ! state + case ex: Exception ⇒ throw ex + case x: Int ⇒ state = x + case "get" ⇒ sender() ! state } } //#child @@ -146,7 +146,7 @@ class FaultHandlingDocSpec(_system: ActorSystem) extends TestKit(_system) //#stop watch(child) // have testActor watch “child” child ! new IllegalArgumentException // break it - expectMsgPF() { case Terminated(`child`) => () } + expectMsgPF() { case Terminated(`child`) ⇒ () } //#stop } EventFilter[Exception]("CRASH", occurrences = 2) intercept { @@ -159,7 +159,7 @@ class FaultHandlingDocSpec(_system: ActorSystem) extends TestKit(_system) child2 ! new Exception("CRASH") // escalate failure expectMsgPF() { - case t @ Terminated(`child2`) if t.existenceConfirmed => () + case t @ Terminated(`child2`) if t.existenceConfirmed ⇒ () } //#escalate-kill //#escalate-restart diff --git a/akka-docs/src/test/scala/docs/actor/InitializationDocSpec.scala b/akka-docs/src/test/scala/docs/actor/InitializationDocSpec.scala index bad56004fa..c2d30fd4b4 100644 --- a/akka-docs/src/test/scala/docs/actor/InitializationDocSpec.scala +++ b/akka-docs/src/test/scala/docs/actor/InitializationDocSpec.scala @@ -10,7 +10,7 @@ object InitializationDocSpec { class PreStartInitExample extends Actor { override def receive = { - case _ => // Ignore + case _ ⇒ // Ignore } //#preStartInit @@ -37,14 +37,14 @@ object InitializationDocSpec { var initializeMe: Option[String] = None override def receive = { - case "init" => + case "init" ⇒ initializeMe = Some("Up and running") context.become(initialized, discardOld = true) } def initialized: Receive = { - case "U OK?" => initializeMe foreach { sender() ! _ } + case "U OK?" ⇒ initializeMe foreach { sender() ! _ } } //#messageInit diff --git a/akka-docs/src/test/scala/docs/actor/PropsEdgeCaseSpec.scala b/akka-docs/src/test/scala/docs/actor/PropsEdgeCaseSpec.scala index 03965016ed..db92a08fa1 100644 --- a/akka-docs/src/test/scala/docs/actor/PropsEdgeCaseSpec.scala +++ b/akka-docs/src/test/scala/docs/actor/PropsEdgeCaseSpec.scala @@ -17,7 +17,7 @@ class PropsEdgeCaseSpec extends WordSpec with CompileOnlySpec { //#props-edge-cases-value-class-example class ValueActor(value: MyValueClass) extends Actor { def receive = { - case multiplier: Long => sender() ! (value.v * multiplier) + case multiplier: Long ⇒ sender() ! (value.v * multiplier) } } val valueClassProp = Props(classOf[ValueActor], MyValueClass(5)) // Unsupported @@ -26,7 +26,7 @@ class PropsEdgeCaseSpec extends WordSpec with CompileOnlySpec { //#props-edge-cases-default-values class DefaultValueActor(a: Int, b: Int = 5) extends Actor { def receive = { - case x: Int => sender() ! ((a + x) * b) + case x: Int ⇒ sender() ! ((a + x) * b) } } @@ -34,7 +34,7 @@ class PropsEdgeCaseSpec extends WordSpec with CompileOnlySpec { class DefaultValueActor2(b: Int = 5) extends Actor { def receive = { - case x: Int => sender() ! (x * b) + case x: Int ⇒ sender() ! (x * b) } } val defaultValueProp2 = Props[DefaultValueActor2] // Unsupported diff --git a/akka-docs/src/test/scala/docs/actor/SchedulerDocSpec.scala b/akka-docs/src/test/scala/docs/actor/SchedulerDocSpec.scala index 085a8cc7e6..dc4c17fbd6 100644 --- a/akka-docs/src/test/scala/docs/actor/SchedulerDocSpec.scala +++ b/akka-docs/src/test/scala/docs/actor/SchedulerDocSpec.scala @@ -43,7 +43,7 @@ class SchedulerDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) { val Tick = "tick" class TickActor extends Actor { def receive = { - case Tick => //Do something + case Tick ⇒ //Do something } } val tickActor = system.actorOf(Props(classOf[TickActor], this)) diff --git a/akka-docs/src/test/scala/docs/actor/SharedMutableStateDocSpec.scala b/akka-docs/src/test/scala/docs/actor/SharedMutableStateDocSpec.scala index f5b99dab57..25ec18d885 100644 --- a/akka-docs/src/test/scala/docs/actor/SharedMutableStateDocSpec.scala +++ b/akka-docs/src/test/scala/docs/actor/SharedMutableStateDocSpec.scala @@ -18,13 +18,13 @@ class SharedMutableStateDocSpec { class EchoActor extends Actor { def receive = { - case msg => sender() ! msg + case msg ⇒ sender() ! msg } } class CleanUpActor extends Actor { def receive = { - case set: mutable.Set[_] => set.clear() + case set: mutable.Set[_] ⇒ set.clear() } } @@ -43,7 +43,7 @@ class SharedMutableStateDocSpec { } def receive = { - case _ => + case _ ⇒ implicit val ec = context.dispatcher implicit val timeout = Timeout(5 seconds) // needed for `?` below @@ -52,7 +52,7 @@ class SharedMutableStateDocSpec { // application to break in weird ways Future { state = "This will race" } ((echoActor ? Message("With this other one")).mapTo[Message]) - .foreach { received => state = received.msg } + .foreach { received ⇒ state = received.msg } // Very bad: shared mutable object allows // the other actor to mutate your own state, diff --git a/akka-docs/src/test/scala/docs/actor/TimerDocSpec.scala b/akka-docs/src/test/scala/docs/actor/TimerDocSpec.scala index dfae34d9cf..7ea125ba31 100644 --- a/akka-docs/src/test/scala/docs/actor/TimerDocSpec.scala +++ b/akka-docs/src/test/scala/docs/actor/TimerDocSpec.scala @@ -22,10 +22,10 @@ object TimerDocSpec { timers.startSingleTimer(TickKey, FirstTick, 500.millis) def receive = { - case FirstTick => + case FirstTick ⇒ // do something useful here timers.startPeriodicTimer(TickKey, Tick, 1.second) - case Tick => + case Tick ⇒ // do something useful here } } diff --git a/akka-docs/src/test/scala/docs/actor/TypedActorDocSpec.scala b/akka-docs/src/test/scala/docs/actor/TypedActorDocSpec.scala index 3641fcea30..a44589a84a 100644 --- a/akka-docs/src/test/scala/docs/actor/TypedActorDocSpec.scala +++ b/akka-docs/src/test/scala/docs/actor/TypedActorDocSpec.scala @@ -4,7 +4,7 @@ package docs.actor //#imports -import java.lang.String.{ valueOf => println } +import java.lang.String.{ valueOf ⇒ println } import akka.actor.{ ActorContext, ActorRef, TypedActor, TypedProps } import akka.routing.RoundRobinGroup @@ -15,7 +15,7 @@ import scala.concurrent.duration._ //#imports //Mr funny man avoids printing to stdout AND keeping docs alright -import java.lang.String.{ valueOf => println } +import java.lang.String.{ valueOf ⇒ println } //#typed-actor-iface trait Squarer { @@ -111,7 +111,7 @@ class TypedActorDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) { //#typed-actor-extension-tools } catch { - case e: Exception => //dun care + case e: Exception ⇒ //dun care } } @@ -181,7 +181,7 @@ class TypedActorDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) { //Use "childSquarer" as a Squarer //#typed-actor-hierarchy } catch { - case e: Exception => //ignore + case e: Exception ⇒ //ignore } } @@ -204,7 +204,7 @@ class TypedActorDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) { // prepare routees val routees: List[HasName] = List.fill(5) { namedActor() } - val routeePaths = routees map { r => + val routeePaths = routees map { r ⇒ TypedActor(system).getActorRefFor(r).path.toStringWithoutAddress } diff --git a/akka-docs/src/test/scala/docs/actor/UnnestedReceives.scala b/akka-docs/src/test/scala/docs/actor/UnnestedReceives.scala index 9cf2b28f20..ab38d2a966 100644 --- a/akka-docs/src/test/scala/docs/actor/UnnestedReceives.scala +++ b/akka-docs/src/test/scala/docs/actor/UnnestedReceives.scala @@ -34,16 +34,16 @@ class UnnestedReceives extends Actor { } def receive = { - case 'Replay => //Our first message should be a 'Replay message, all others are invalid + case 'Replay ⇒ //Our first message should be a 'Replay message, all others are invalid allOldMessages() foreach process //Process all old messages/events become { //Switch behavior to look for the GoAhead signal - case 'GoAhead => //When we get the GoAhead signal we process all our buffered messages/events + case 'GoAhead ⇒ //When we get the GoAhead signal we process all our buffered messages/events queue foreach process queue.clear become { //Then we change behaviour to process incoming messages/events as they arrive - case msg => process(msg) + case msg ⇒ process(msg) } - case msg => //While we haven't gotten the GoAhead signal, buffer all incoming messages + case msg ⇒ //While we haven't gotten the GoAhead signal, buffer all incoming messages queue += msg //Here you have full control, you can handle overflow etc } } diff --git a/akka-docs/src/test/scala/docs/agent/AgentDocSpec.scala b/akka-docs/src/test/scala/docs/agent/AgentDocSpec.scala index be736b8e99..9b7bdec637 100644 --- a/akka-docs/src/test/scala/docs/agent/AgentDocSpec.scala +++ b/akka-docs/src/test/scala/docs/agent/AgentDocSpec.scala @@ -59,7 +59,7 @@ class AgentDocSpec extends AkkaSpec { agent send (_ * 2) //#send - def longRunningOrBlockingFunction = (i: Int) => i * 1 // Just for the example code + def longRunningOrBlockingFunction = (i: Int) ⇒ i * 1 // Just for the example code def someExecutionContext() = scala.concurrent.ExecutionContext.Implicits.global // Just for the example code //#send-off // the ExecutionContext you want to run the function on @@ -82,7 +82,7 @@ class AgentDocSpec extends AkkaSpec { val f3: Future[Int] = agent alter (_ * 2) //#alter - def longRunningOrBlockingFunction = (i: Int) => i * 1 // Just for the example code + def longRunningOrBlockingFunction = (i: Int) ⇒ i * 1 // Just for the example code def someExecutionContext() = ExecutionContext.global // Just for the example code //#alter-off @@ -103,7 +103,7 @@ class AgentDocSpec extends AkkaSpec { import scala.concurrent.stm._ def transfer(from: Agent[Int], to: Agent[Int], amount: Int): Boolean = { - atomic { txn => + atomic { txn ⇒ if (from.get < amount) false else { from send (_ - amount) @@ -134,19 +134,19 @@ class AgentDocSpec extends AkkaSpec { val agent2 = Agent(5) // uses foreach - for (value <- agent1) + for (value ← agent1) println(value) // uses map - val agent3 = for (value <- agent1) yield value + 1 + val agent3 = for (value ← agent1) yield value + 1 // or using map directly val agent4 = agent1 map (_ + 1) // uses flatMap val agent5 = for { - value1 <- agent1 - value2 <- agent2 + value1 ← agent1 + value2 ← agent2 } yield value1 + value2 //#monadic-example diff --git a/akka-docs/src/test/scala/docs/camel/Consumers.scala b/akka-docs/src/test/scala/docs/camel/Consumers.scala index 67d7d29dbf..410544db2d 100644 --- a/akka-docs/src/test/scala/docs/camel/Consumers.scala +++ b/akka-docs/src/test/scala/docs/camel/Consumers.scala @@ -15,7 +15,7 @@ object Consumers { def endpointUri = "file:data/input/actor" def receive = { - case msg: CamelMessage => println("received %s" format msg.bodyAs[String]) + case msg: CamelMessage ⇒ println("received %s" format msg.bodyAs[String]) } } //#Consumer1 @@ -28,7 +28,7 @@ object Consumers { def endpointUri = "jetty:http://localhost:8877/camel/default" def receive = { - case msg: CamelMessage => sender() ! ("Hello %s" format msg.bodyAs[String]) + case msg: CamelMessage ⇒ sender() ! ("Hello %s" format msg.bodyAs[String]) } } //#Consumer2 @@ -45,7 +45,7 @@ object Consumers { def endpointUri = "jms:queue:test" def receive = { - case msg: CamelMessage => + case msg: CamelMessage ⇒ sender() ! Ack // on success // .. @@ -65,7 +65,7 @@ object Consumers { def endpointUri = "jetty:http://localhost:8877/camel/default" override def replyTimeout = 500 millis def receive = { - case msg: CamelMessage => sender() ! ("Hello %s" format msg.bodyAs[String]) + case msg: CamelMessage ⇒ sender() ! ("Hello %s" format msg.bodyAs[String]) } } //#Consumer4 diff --git a/akka-docs/src/test/scala/docs/camel/CustomRoute.scala b/akka-docs/src/test/scala/docs/camel/CustomRoute.scala index 25ba5b4a05..a0612901d1 100644 --- a/akka-docs/src/test/scala/docs/camel/CustomRoute.scala +++ b/akka-docs/src/test/scala/docs/camel/CustomRoute.scala @@ -18,9 +18,9 @@ object CustomRoute { import akka.camel._ class Responder extends Actor { def receive = { - case msg: CamelMessage => + case msg: CamelMessage ⇒ sender() ! (msg.mapBody { - body: String => "received %s" format body + body: String ⇒ "received %s" format body }) } } @@ -47,9 +47,9 @@ object CustomRoute { class ErrorThrowingConsumer(override val endpointUri: String) extends Consumer { def receive = { - case msg: CamelMessage => throw new Exception("error: %s" format msg.body) + case msg: CamelMessage ⇒ throw new Exception("error: %s" format msg.body) } - override def onRouteDefinition = (rd) => rd.onException(classOf[Exception]). + override def onRouteDefinition = (rd) ⇒ rd.onException(classOf[Exception]). handled(true).transform(Builder.exceptionMessage).end final override def preRestart(reason: Throwable, message: Option[Any]) { diff --git a/akka-docs/src/test/scala/docs/camel/Introduction.scala b/akka-docs/src/test/scala/docs/camel/Introduction.scala index f79c7e16e4..7301dcc501 100644 --- a/akka-docs/src/test/scala/docs/camel/Introduction.scala +++ b/akka-docs/src/test/scala/docs/camel/Introduction.scala @@ -17,8 +17,8 @@ object Introduction { def endpointUri = "mina2:tcp://localhost:6200?textline=true" def receive = { - case msg: CamelMessage => { /* ... */ } - case _ => { /* ... */ } + case msg: CamelMessage ⇒ { /* ... */ } + case _ ⇒ { /* ... */ } } } @@ -37,8 +37,8 @@ object Introduction { def endpointUri = "jetty:http://localhost:8877/example" def receive = { - case msg: CamelMessage => { /* ... */ } - case _ => { /* ... */ } + case msg: CamelMessage ⇒ { /* ... */ } + case _ ⇒ { /* ... */ } } } //#Consumer @@ -87,8 +87,8 @@ object Introduction { def endpointUri = "mina2:tcp://localhost:6200?textline=true" def receive = { - case msg: CamelMessage => { /* ... */ } - case _ => { /* ... */ } + case msg: CamelMessage ⇒ { /* ... */ } + case _ ⇒ { /* ... */ } } } val system = ActorSystem("some-system") diff --git a/akka-docs/src/test/scala/docs/camel/Producers.scala b/akka-docs/src/test/scala/docs/camel/Producers.scala index e168660a7e..2c01c77ef2 100644 --- a/akka-docs/src/test/scala/docs/camel/Producers.scala +++ b/akka-docs/src/test/scala/docs/camel/Producers.scala @@ -33,7 +33,7 @@ object Producers { class ResponseReceiver extends Actor { def receive = { - case msg: CamelMessage => + case msg: CamelMessage ⇒ // do something with the forwarded response } } @@ -61,11 +61,11 @@ object Producers { def endpointUri = uri def upperCase(msg: CamelMessage) = msg.mapBody { - body: String => body.toUpperCase + body: String ⇒ body.toUpperCase } override def transformOutgoingMessage(msg: Any) = msg match { - case msg: CamelMessage => upperCase(msg) + case msg: CamelMessage ⇒ upperCase(msg) } } //#TransformOutgoingMessage @@ -106,7 +106,7 @@ object Producers { import akka.actor.Actor class MyActor extends Actor { def receive = { - case msg => + case msg ⇒ val template = CamelExtension(context.system).template template.sendBody("direct:news", msg) } @@ -118,7 +118,7 @@ object Producers { import akka.actor.Actor class MyActor extends Actor { def receive = { - case msg => + case msg ⇒ val template = CamelExtension(context.system).template sender() ! template.requestBody("direct:news", msg) } @@ -126,4 +126,4 @@ object Producers { //#RequestProducerTemplate } -} \ No newline at end of file +} diff --git a/akka-docs/src/test/scala/docs/camel/PublishSubscribe.scala b/akka-docs/src/test/scala/docs/camel/PublishSubscribe.scala index 8884723da0..b7cae8eaa1 100644 --- a/akka-docs/src/test/scala/docs/camel/PublishSubscribe.scala +++ b/akka-docs/src/test/scala/docs/camel/PublishSubscribe.scala @@ -9,7 +9,7 @@ object PublishSubscribe { def endpointUri = uri def receive = { - case msg: CamelMessage => println("%s received: %s" format (name, msg.body)) + case msg: CamelMessage ⇒ println("%s received: %s" format (name, msg.body)) } } @@ -25,7 +25,7 @@ object PublishSubscribe { def endpointUri = uri def receive = { - case msg: CamelMessage => { + case msg: CamelMessage ⇒ { publisher ! msg.bodyAs[String] sender() ! ("message published") } diff --git a/akka-docs/src/test/scala/docs/circuitbreaker/CircuitBreakerDocSpec.scala b/akka-docs/src/test/scala/docs/circuitbreaker/CircuitBreakerDocSpec.scala index 7c21856140..f3d109b0a2 100644 --- a/akka-docs/src/test/scala/docs/circuitbreaker/CircuitBreakerDocSpec.scala +++ b/akka-docs/src/test/scala/docs/circuitbreaker/CircuitBreakerDocSpec.scala @@ -36,9 +36,9 @@ class DangerousActor extends Actor with ActorLogging { def dangerousCall: String = "This really isn't that dangerous of a call after all" def receive = { - case "is my middle name" => + case "is my middle name" ⇒ breaker.withCircuitBreaker(Future(dangerousCall)) pipeTo sender() - case "block for me" => + case "block for me" ⇒ sender() ! breaker.withSyncCircuitBreaker(dangerousCall) } //#circuit-breaker-usage @@ -62,16 +62,16 @@ class TellPatternActor(recipient: ActorRef) extends Actor with ActorLogging { import akka.actor.ReceiveTimeout def receive = { - case "call" if breaker.isClosed => { + case "call" if breaker.isClosed ⇒ { recipient ! "message" } - case "response" => { + case "response" ⇒ { breaker.succeed() } - case err: Throwable => { + case err: Throwable ⇒ { breaker.fail() } - case ReceiveTimeout => { + case ReceiveTimeout ⇒ { breaker.fail() } } @@ -82,9 +82,9 @@ class EvenNoFailureActor extends Actor { import context.dispatcher //#even-no-as-failure def luckyNumber(): Future[Int] = { - val evenNumberAsFailure: Try[Int] => Boolean = { - case Success(n) => n % 2 == 0 - case Failure(_) => true + val evenNumberAsFailure: Try[Int] ⇒ Boolean = { + case Success(n) ⇒ n % 2 == 0 + case Failure(_) ⇒ true } val breaker = @@ -100,6 +100,6 @@ class EvenNoFailureActor extends Actor { //#even-no-as-failure override def receive = { - case x: Int => + case x: Int ⇒ } } diff --git a/akka-docs/src/test/scala/docs/cluster/FactorialBackend.scala b/akka-docs/src/test/scala/docs/cluster/FactorialBackend.scala index 3acfbdec03..a2bbe28b32 100644 --- a/akka-docs/src/test/scala/docs/cluster/FactorialBackend.scala +++ b/akka-docs/src/test/scala/docs/cluster/FactorialBackend.scala @@ -15,8 +15,8 @@ class FactorialBackend extends Actor with ActorLogging { import context.dispatcher def receive = { - case (n: Int) => - Future(factorial(n)) map { result => (n, result) } pipeTo sender() + case (n: Int) ⇒ + Future(factorial(n)) map { result ⇒ (n, result) } pipeTo sender() } def factorial(n: Int): BigInt = { @@ -43,4 +43,4 @@ object FactorialBackend { system.actorOf(Props[MetricsListener], name = "metricsListener") } -} \ No newline at end of file +} diff --git a/akka-docs/src/test/scala/docs/cluster/FactorialFrontend.scala b/akka-docs/src/test/scala/docs/cluster/FactorialFrontend.scala index aa1db19db6..cd2623f3dd 100644 --- a/akka-docs/src/test/scala/docs/cluster/FactorialFrontend.scala +++ b/akka-docs/src/test/scala/docs/cluster/FactorialFrontend.scala @@ -27,13 +27,13 @@ class FactorialFrontend(upToN: Int, repeat: Boolean) extends Actor with ActorLog } def receive = { - case (n: Int, factorial: BigInt) => + case (n: Int, factorial: BigInt) ⇒ if (n == upToN) { log.debug("{}! = {}", n, factorial) if (repeat) sendJobs() else context.stop(self) } - case ReceiveTimeout => + case ReceiveTimeout ⇒ log.info("Timeout") sendJobs() } diff --git a/akka-docs/src/test/scala/docs/cluster/MetricsListener.scala b/akka-docs/src/test/scala/docs/cluster/MetricsListener.scala index 6dcaf70625..0dee9c4af2 100644 --- a/akka-docs/src/test/scala/docs/cluster/MetricsListener.scala +++ b/akka-docs/src/test/scala/docs/cluster/MetricsListener.scala @@ -23,24 +23,24 @@ class MetricsListener extends Actor with ActorLogging { override def postStop(): Unit = extension.unsubscribe(self) def receive = { - case ClusterMetricsChanged(clusterMetrics) => - clusterMetrics.filter(_.address == selfAddress) foreach { nodeMetrics => + case ClusterMetricsChanged(clusterMetrics) ⇒ + clusterMetrics.filter(_.address == selfAddress) foreach { nodeMetrics ⇒ logHeap(nodeMetrics) logCpu(nodeMetrics) } - case state: CurrentClusterState => // Ignore. + case state: CurrentClusterState ⇒ // Ignore. } def logHeap(nodeMetrics: NodeMetrics): Unit = nodeMetrics match { - case HeapMemory(address, timestamp, used, committed, max) => + case HeapMemory(address, timestamp, used, committed, max) ⇒ log.info("Used heap: {} MB", used.doubleValue / 1024 / 1024) - case _ => // No heap info. + case _ ⇒ // No heap info. } def logCpu(nodeMetrics: NodeMetrics): Unit = nodeMetrics match { - case Cpu(address, timestamp, Some(systemLoadAverage), cpuCombined, cpuStolen, processors) => + case Cpu(address, timestamp, Some(systemLoadAverage), cpuCombined, cpuStolen, processors) ⇒ log.info("Load: {} ({} processors)", systemLoadAverage, processors) - case _ => // No cpu info. + case _ ⇒ // No cpu info. } } //#metrics-listener diff --git a/akka-docs/src/test/scala/docs/cluster/SimpleClusterListener.scala b/akka-docs/src/test/scala/docs/cluster/SimpleClusterListener.scala index 45a07c6b48..1cf0c70731 100644 --- a/akka-docs/src/test/scala/docs/cluster/SimpleClusterListener.scala +++ b/akka-docs/src/test/scala/docs/cluster/SimpleClusterListener.scala @@ -19,14 +19,14 @@ class SimpleClusterListener extends Actor with ActorLogging { override def postStop(): Unit = cluster.unsubscribe(self) def receive = { - case MemberUp(member) => + case MemberUp(member) ⇒ log.info("Member is Up: {}", member.address) - case UnreachableMember(member) => + case UnreachableMember(member) ⇒ log.info("Member detected as unreachable: {}", member) - case MemberRemoved(member, previousStatus) => + case MemberRemoved(member, previousStatus) ⇒ log.info( "Member is Removed: {} after {}", member.address, previousStatus) - case _: MemberEvent => // ignore + case _: MemberEvent ⇒ // ignore } } diff --git a/akka-docs/src/test/scala/docs/cluster/SimpleClusterListener2.scala b/akka-docs/src/test/scala/docs/cluster/SimpleClusterListener2.scala index 7c06cfb66a..90a3896a17 100644 --- a/akka-docs/src/test/scala/docs/cluster/SimpleClusterListener2.scala +++ b/akka-docs/src/test/scala/docs/cluster/SimpleClusterListener2.scala @@ -18,16 +18,16 @@ class SimpleClusterListener2 extends Actor with ActorLogging { override def postStop(): Unit = cluster.unsubscribe(self) def receive = { - case state: CurrentClusterState => + case state: CurrentClusterState ⇒ log.info("Current members: {}", state.members.mkString(", ")) - case MemberUp(member) => + case MemberUp(member) ⇒ log.info("Member is Up: {}", member.address) - case UnreachableMember(member) => + case UnreachableMember(member) ⇒ log.info("Member detected as unreachable: {}", member) - case MemberRemoved(member, previousStatus) => + case MemberRemoved(member, previousStatus) ⇒ log.info( "Member is Removed: {} after {}", member.address, previousStatus) - case _: MemberEvent => // ignore + case _: MemberEvent ⇒ // ignore } } diff --git a/akka-docs/src/test/scala/docs/cluster/TransformationBackend.scala b/akka-docs/src/test/scala/docs/cluster/TransformationBackend.scala index 25eceb0332..8d9b857554 100644 --- a/akka-docs/src/test/scala/docs/cluster/TransformationBackend.scala +++ b/akka-docs/src/test/scala/docs/cluster/TransformationBackend.scala @@ -25,10 +25,10 @@ class TransformationBackend extends Actor { override def postStop(): Unit = cluster.unsubscribe(self) def receive = { - case TransformationJob(text) => sender() ! TransformationResult(text.toUpperCase) - case state: CurrentClusterState => + case TransformationJob(text) ⇒ sender() ! TransformationResult(text.toUpperCase) + case state: CurrentClusterState ⇒ state.members.filter(_.status == MemberStatus.Up) foreach register - case MemberUp(m) => register(m) + case MemberUp(m) ⇒ register(m) } def register(member: Member): Unit = diff --git a/akka-docs/src/test/scala/docs/cluster/TransformationFrontend.scala b/akka-docs/src/test/scala/docs/cluster/TransformationFrontend.scala index da2744be73..07baf4b37c 100644 --- a/akka-docs/src/test/scala/docs/cluster/TransformationFrontend.scala +++ b/akka-docs/src/test/scala/docs/cluster/TransformationFrontend.scala @@ -19,18 +19,18 @@ class TransformationFrontend extends Actor { var jobCounter = 0 def receive = { - case job: TransformationJob if backends.isEmpty => + case job: TransformationJob if backends.isEmpty ⇒ sender() ! JobFailed("Service unavailable, try again later", job) - case job: TransformationJob => + case job: TransformationJob ⇒ jobCounter += 1 backends(jobCounter % backends.size) forward job - case BackendRegistration if !backends.contains(sender()) => + case BackendRegistration if !backends.contains(sender()) ⇒ context watch sender() backends = backends :+ sender() - case Terminated(a) => + case Terminated(a) ⇒ backends = backends.filterNot(_ == a) } } @@ -52,7 +52,7 @@ object TransformationFrontend { system.scheduler.schedule(2.seconds, 2.seconds) { implicit val timeout = Timeout(5 seconds) (frontend ? TransformationJob("hello-" + counter.incrementAndGet())) onSuccess { - case result => println(result) + case result ⇒ println(result) } } diff --git a/akka-docs/src/test/scala/docs/cluster/TransformationMessages.scala b/akka-docs/src/test/scala/docs/cluster/TransformationMessages.scala index 591d5d9862..d016e09419 100644 --- a/akka-docs/src/test/scala/docs/cluster/TransformationMessages.scala +++ b/akka-docs/src/test/scala/docs/cluster/TransformationMessages.scala @@ -5,4 +5,4 @@ final case class TransformationJob(text: String) final case class TransformationResult(text: String) final case class JobFailed(reason: String, job: TransformationJob) case object BackendRegistration -//#messages \ No newline at end of file +//#messages diff --git a/akka-docs/src/test/scala/docs/ddata/DistributedDataDocSpec.scala b/akka-docs/src/test/scala/docs/ddata/DistributedDataDocSpec.scala index c87a1ce531..e3ad490045 100644 --- a/akka-docs/src/test/scala/docs/ddata/DistributedDataDocSpec.scala +++ b/akka-docs/src/test/scala/docs/ddata/DistributedDataDocSpec.scala @@ -72,7 +72,7 @@ object DistributedDataDocSpec { replicator ! Subscribe(DataKey, self) def receive = { - case Tick => + case Tick ⇒ val s = ThreadLocalRandom.current().nextInt(97, 123).toChar.toString if (ThreadLocalRandom.current().nextBoolean()) { // add @@ -84,9 +84,9 @@ object DistributedDataDocSpec { replicator ! Update(DataKey, ORSet.empty[String], WriteLocal)(_ - s) } - case _: UpdateResponse[_] => // ignore + case _: UpdateResponse[_] ⇒ // ignore - case c @ Changed(DataKey) => + case c @ Changed(DataKey) ⇒ val data = c.get(DataKey) log.info("Current elements: {}", data.elements) } @@ -128,19 +128,19 @@ class DistributedDataDocSpec extends AkkaSpec(DistributedDataDocSpec.config) { probe.expectMsgType[UpdateResponse[_]] match { //#update-response1 - case UpdateSuccess(Counter1Key, req) => // ok + case UpdateSuccess(Counter1Key, req) ⇒ // ok //#update-response1 - case unexpected => fail("Unexpected response: " + unexpected) + case unexpected ⇒ fail("Unexpected response: " + unexpected) } probe.expectMsgType[UpdateResponse[_]] match { //#update-response2 - case UpdateSuccess(Set1Key, req) => // ok - case UpdateTimeout(Set1Key, req) => + case UpdateSuccess(Set1Key, req) ⇒ // ok + case UpdateTimeout(Set1Key, req) ⇒ // write to 3 nodes failed within 1.second //#update-response2 - case UpdateSuccess(Set2Key, None) => - case unexpected => fail("Unexpected response: " + unexpected) + case UpdateSuccess(Set2Key, None) ⇒ + case unexpected ⇒ fail("Unexpected response: " + unexpected) } } @@ -157,14 +157,14 @@ class DistributedDataDocSpec extends AkkaSpec(DistributedDataDocSpec.config) { val Counter1Key = PNCounterKey("counter1") def receive: Receive = { - case "increment" => + case "increment" ⇒ // incoming command to increase the counter val upd = Update(Counter1Key, PNCounter(), writeTwo, request = Some(sender()))(_ + 1) replicator ! upd - case UpdateSuccess(Counter1Key, Some(replyTo: ActorRef)) => + case UpdateSuccess(Counter1Key, Some(replyTo: ActorRef)) ⇒ replyTo ! "ack" - case UpdateTimeout(Counter1Key, Some(replyTo: ActorRef)) => + case UpdateTimeout(Counter1Key, Some(replyTo: ActorRef)) ⇒ replyTo ! "nack" } //#update-request-context @@ -195,24 +195,24 @@ class DistributedDataDocSpec extends AkkaSpec(DistributedDataDocSpec.config) { probe.expectMsgType[GetResponse[_]] match { //#get-response1 - case g @ GetSuccess(Counter1Key, req) => + case g @ GetSuccess(Counter1Key, req) ⇒ val value = g.get(Counter1Key).value - case NotFound(Counter1Key, req) => // key counter1 does not exist + case NotFound(Counter1Key, req) ⇒ // key counter1 does not exist //#get-response1 - case unexpected => fail("Unexpected response: " + unexpected) + case unexpected ⇒ fail("Unexpected response: " + unexpected) } probe.expectMsgType[GetResponse[_]] match { //#get-response2 - case g @ GetSuccess(Set1Key, req) => + case g @ GetSuccess(Set1Key, req) ⇒ val elements = g.get(Set1Key).elements - case GetFailure(Set1Key, req) => + case GetFailure(Set1Key, req) ⇒ // read from 3 nodes failed within 1.second - case NotFound(Set1Key, req) => // key set1 does not exist + case NotFound(Set1Key, req) ⇒ // key set1 does not exist //#get-response2 - case g @ GetSuccess(Set2Key, None) => + case g @ GetSuccess(Set2Key, None) ⇒ val elements = g.get(Set2Key).elements - case unexpected => fail("Unexpected response: " + unexpected) + case unexpected ⇒ fail("Unexpected response: " + unexpected) } } @@ -229,16 +229,16 @@ class DistributedDataDocSpec extends AkkaSpec(DistributedDataDocSpec.config) { val Counter1Key = PNCounterKey("counter1") def receive: Receive = { - case "get-count" => + case "get-count" ⇒ // incoming request to retrieve current value of the counter replicator ! Get(Counter1Key, readTwo, request = Some(sender())) - case g @ GetSuccess(Counter1Key, Some(replyTo: ActorRef)) => + case g @ GetSuccess(Counter1Key, Some(replyTo: ActorRef)) ⇒ val value = g.get(Counter1Key).value.longValue replyTo ! value - case GetFailure(Counter1Key, Some(replyTo: ActorRef)) => + case GetFailure(Counter1Key, Some(replyTo: ActorRef)) ⇒ replyTo ! -1L - case NotFound(Counter1Key, Some(replyTo: ActorRef)) => + case NotFound(Counter1Key, Some(replyTo: ActorRef)) ⇒ replyTo ! 0L } //#get-request-context @@ -258,9 +258,9 @@ class DistributedDataDocSpec extends AkkaSpec(DistributedDataDocSpec.config) { var currentValue = BigInt(0) def receive: Receive = { - case c @ Changed(Counter1Key) => + case c @ Changed(Counter1Key) ⇒ currentValue = c.get(Counter1Key).value - case "get-count" => + case "get-count" ⇒ // incoming request to retrieve current value of the counter sender() ! currentValue } @@ -304,7 +304,7 @@ class DistributedDataDocSpec extends AkkaSpec(DistributedDataDocSpec.config) { val m2 = m1.decrement("a", 2) val m3 = m2.increment("b", 1) println(m3.get("a")) // 5 - m3.entries.foreach { case (key, value) => println(s"$key -> $value") } + m3.entries.foreach { case (key, value) ⇒ println(s"$key -> $value") } //#pncountermap } diff --git a/akka-docs/src/test/scala/docs/ddata/ShoppingCart.scala b/akka-docs/src/test/scala/docs/ddata/ShoppingCart.scala index ecfcec750e..5a806033d0 100644 --- a/akka-docs/src/test/scala/docs/ddata/ShoppingCart.scala +++ b/akka-docs/src/test/scala/docs/ddata/ShoppingCart.scala @@ -45,18 +45,18 @@ class ShoppingCart(userId: String) extends Actor { //#get-cart def receiveGetCart: Receive = { - case GetCart => + case GetCart ⇒ replicator ! Get(DataKey, readMajority, Some(sender())) - case g @ GetSuccess(DataKey, Some(replyTo: ActorRef)) => + case g @ GetSuccess(DataKey, Some(replyTo: ActorRef)) ⇒ val data = g.get(DataKey) val cart = Cart(data.entries.values.toSet) replyTo ! cart - case NotFound(DataKey, Some(replyTo: ActorRef)) => + case NotFound(DataKey, Some(replyTo: ActorRef)) ⇒ replyTo ! Cart(Set.empty) - case GetFailure(DataKey, Some(replyTo: ActorRef)) => + case GetFailure(DataKey, Some(replyTo: ActorRef)) ⇒ // ReadMajority failure, try again with local read replicator ! Get(DataKey, ReadLocal, Some(replyTo)) } @@ -64,9 +64,9 @@ class ShoppingCart(userId: String) extends Actor { //#add-item def receiveAddItem: Receive = { - case cmd @ AddItem(item) => + case cmd @ AddItem(item) ⇒ val update = Update(DataKey, LWWMap.empty[String, LineItem], writeMajority, Some(cmd)) { - cart => updateCart(cart, item) + cart ⇒ updateCart(cart, item) } replicator ! update } @@ -74,38 +74,38 @@ class ShoppingCart(userId: String) extends Actor { def updateCart(data: LWWMap[String, LineItem], item: LineItem): LWWMap[String, LineItem] = data.get(item.productId) match { - case Some(LineItem(_, _, existingQuantity)) => + case Some(LineItem(_, _, existingQuantity)) ⇒ data + (item.productId -> item.copy(quantity = existingQuantity + item.quantity)) - case None => data + (item.productId -> item) + case None ⇒ data + (item.productId -> item) } //#remove-item def receiveRemoveItem: Receive = { - case cmd @ RemoveItem(productId) => + case cmd @ RemoveItem(productId) ⇒ // Try to fetch latest from a majority of nodes first, since ORMap // remove must have seen the item to be able to remove it. replicator ! Get(DataKey, readMajority, Some(cmd)) - case GetSuccess(DataKey, Some(RemoveItem(productId))) => + case GetSuccess(DataKey, Some(RemoveItem(productId))) ⇒ replicator ! Update(DataKey, LWWMap(), writeMajority, None) { _ - productId } - case GetFailure(DataKey, Some(RemoveItem(productId))) => + case GetFailure(DataKey, Some(RemoveItem(productId))) ⇒ // ReadMajority failed, fall back to best effort local value replicator ! Update(DataKey, LWWMap(), writeMajority, None) { _ - productId } - case NotFound(DataKey, Some(RemoveItem(productId))) => + case NotFound(DataKey, Some(RemoveItem(productId))) ⇒ // nothing to remove } //#remove-item def receiveOther: Receive = { - case _: UpdateSuccess[_] | _: UpdateTimeout[_] => + case _: UpdateSuccess[_] | _: UpdateTimeout[_] ⇒ // UpdateTimeout, will eventually be replicated - case e: UpdateFailure[_] => throw new IllegalStateException("Unexpected failure: " + e) + case e: UpdateFailure[_] ⇒ throw new IllegalStateException("Unexpected failure: " + e) } } diff --git a/akka-docs/src/test/scala/docs/ddata/protobuf/TwoPhaseSetSerializer.scala b/akka-docs/src/test/scala/docs/ddata/protobuf/TwoPhaseSetSerializer.scala index 1b85b384c9..da98acee9e 100644 --- a/akka-docs/src/test/scala/docs/ddata/protobuf/TwoPhaseSetSerializer.scala +++ b/akka-docs/src/test/scala/docs/ddata/protobuf/TwoPhaseSetSerializer.scala @@ -22,8 +22,8 @@ class TwoPhaseSetSerializer(val system: ExtendedActorSystem) override def identifier = 99999 override def toBinary(obj: AnyRef): Array[Byte] = obj match { - case m: TwoPhaseSet => twoPhaseSetToProto(m).toByteArray - case _ => throw new IllegalArgumentException( + case m: TwoPhaseSet ⇒ twoPhaseSetToProto(m).toByteArray + case _ ⇒ throw new IllegalArgumentException( s"Can't serialize object of type ${obj.getClass}") } @@ -53,8 +53,8 @@ class TwoPhaseSetSerializer(val system: ExtendedActorSystem) val msg = TwoPhaseSetMessages.TwoPhaseSet.parseFrom(bytes) val addsSet = msg.getAddsList.iterator.asScala.toSet val removalsSet = msg.getRemovalsList.iterator.asScala.toSet - val adds = addsSet.foldLeft(GSet.empty[String])((acc, el) => acc.add(el)) - val removals = removalsSet.foldLeft(GSet.empty[String])((acc, el) => acc.add(el)) + val adds = addsSet.foldLeft(GSet.empty[String])((acc, el) ⇒ acc.add(el)) + val removals = removalsSet.foldLeft(GSet.empty[String])((acc, el) ⇒ acc.add(el)) // GSet will accumulate deltas when adding elements, // but those are not of interest in the result of the deserialization TwoPhaseSet(adds.resetDelta, removals.resetDelta) @@ -66,8 +66,8 @@ class TwoPhaseSetSerializerWithCompression(system: ExtendedActorSystem) extends TwoPhaseSetSerializer(system) { //#compression override def toBinary(obj: AnyRef): Array[Byte] = obj match { - case m: TwoPhaseSet => compress(twoPhaseSetToProto(m)) - case _ => throw new IllegalArgumentException( + case m: TwoPhaseSet ⇒ compress(twoPhaseSetToProto(m)) + case _ ⇒ throw new IllegalArgumentException( s"Can't serialize object of type ${obj.getClass}") } diff --git a/akka-docs/src/test/scala/docs/ddata/protobuf/TwoPhaseSetSerializer2.scala b/akka-docs/src/test/scala/docs/ddata/protobuf/TwoPhaseSetSerializer2.scala index 3a130f89b0..f2f95f7e06 100644 --- a/akka-docs/src/test/scala/docs/ddata/protobuf/TwoPhaseSetSerializer2.scala +++ b/akka-docs/src/test/scala/docs/ddata/protobuf/TwoPhaseSetSerializer2.scala @@ -22,8 +22,8 @@ class TwoPhaseSetSerializer2(val system: ExtendedActorSystem) val replicatedDataSerializer = new ReplicatedDataSerializer(system) override def toBinary(obj: AnyRef): Array[Byte] = obj match { - case m: TwoPhaseSet => twoPhaseSetToProto(m).toByteArray - case _ => throw new IllegalArgumentException( + case m: TwoPhaseSet ⇒ twoPhaseSetToProto(m).toByteArray + case _ ⇒ throw new IllegalArgumentException( s"Can't serialize object of type ${obj.getClass}") } diff --git a/akka-docs/src/test/scala/docs/dispatcher/DispatcherDocSpec.scala b/akka-docs/src/test/scala/docs/dispatcher/DispatcherDocSpec.scala index 7af5de2967..e9e42ed84c 100644 --- a/akka-docs/src/test/scala/docs/dispatcher/DispatcherDocSpec.scala +++ b/akka-docs/src/test/scala/docs/dispatcher/DispatcherDocSpec.scala @@ -229,16 +229,16 @@ object DispatcherDocSpec { // Create a new PriorityGenerator, lower prio means more important PriorityGenerator { // 'highpriority messages should be treated first if possible - case 'highpriority => 0 + case 'highpriority ⇒ 0 // 'lowpriority messages should be treated last if possible - case 'lowpriority => 2 + case 'lowpriority ⇒ 2 // PoisonPill when no other left - case PoisonPill => 3 + case PoisonPill ⇒ 3 // We default to 1, which is in between high and low - case otherwise => 1 + case otherwise ⇒ 1 }) //#prio-mailbox @@ -250,7 +250,7 @@ object DispatcherDocSpec { class MyActor extends Actor { def receive = { - case x => + case x ⇒ } } @@ -267,7 +267,7 @@ object DispatcherDocSpec { with RequiresMessageQueue[MyUnboundedMessageQueueSemantics] { //#require-mailbox-on-actor def receive = { - case _ => + case _ ⇒ } //#require-mailbox-on-actor // ... @@ -370,7 +370,7 @@ class DispatcherDocSpec extends AkkaSpec(DispatcherDocSpec.config) { self ! PoisonPill def receive = { - case x => log.info(x.toString) + case x ⇒ log.info(x.toString) } } val a = system.actorOf(Props(classOf[Logger], this).withDispatcher( @@ -389,7 +389,7 @@ class DispatcherDocSpec extends AkkaSpec(DispatcherDocSpec.config) { //#prio-dispatcher watch(a) - expectMsgPF() { case Terminated(`a`) => () } + expectMsgPF() { case Terminated(`a`) ⇒ () } } } @@ -407,7 +407,7 @@ class DispatcherDocSpec extends AkkaSpec(DispatcherDocSpec.config) { self ! PoisonPill def receive = { - case x => log.info(x.toString) + case x ⇒ log.info(x.toString) } } val a = system.actorOf(Props(classOf[Logger], this).withDispatcher( @@ -422,7 +422,7 @@ class DispatcherDocSpec extends AkkaSpec(DispatcherDocSpec.config) { //#control-aware-dispatcher watch(a) - expectMsgPF() { case Terminated(`a`) => () } + expectMsgPF() { case Terminated(`a`) ⇒ () } } } diff --git a/akka-docs/src/test/scala/docs/dispatcher/MyUnboundedMailbox.scala b/akka-docs/src/test/scala/docs/dispatcher/MyUnboundedMailbox.scala index a49029e894..93ce5584c6 100644 --- a/akka-docs/src/test/scala/docs/dispatcher/MyUnboundedMailbox.scala +++ b/akka-docs/src/test/scala/docs/dispatcher/MyUnboundedMailbox.scala @@ -59,4 +59,4 @@ class MyUnboundedMailbox extends MailboxType //#mailbox-marker-interface // Marker trait used for mailbox requirements mapping trait MyUnboundedMessageQueueSemantics -//#mailbox-marker-interface \ No newline at end of file +//#mailbox-marker-interface diff --git a/akka-docs/src/test/scala/docs/event/EventBusDocSpec.scala b/akka-docs/src/test/scala/docs/event/EventBusDocSpec.scala index 102147b5f1..8910932c84 100644 --- a/akka-docs/src/test/scala/docs/event/EventBusDocSpec.scala +++ b/akka-docs/src/test/scala/docs/event/EventBusDocSpec.scala @@ -25,7 +25,7 @@ object EventBusDocSpec { type Classifier = String type Subscriber = ActorRef - // is used for extracting the classifier from the incoming events + // is used for extracting the classifier from the incoming events override protected def classify(event: Event): Classifier = event.topic // will be invoked for each event for all subscribers which registered themselves @@ -74,7 +74,7 @@ object EventBusDocSpec { override protected val subclassification: Subclassification[Classifier] = new StartsWithSubclassification - // is used for extracting the classifier from the incoming events + // is used for extracting the classifier from the incoming events override protected def classify(event: Event): Classifier = event.topic // will be invoked for each event for all subscribers which registered @@ -102,7 +102,7 @@ object EventBusDocSpec { override protected def compareClassifiers(a: Classifier, b: Classifier): Int = if (a < b) -1 else if (a == b) 0 else 1 - // is needed for storing subscribers in an ordered collection + // is needed for storing subscribers in an ordered collection override protected def compareSubscribers(a: Subscriber, b: Subscriber): Int = a.compareTo(b) diff --git a/akka-docs/src/test/scala/docs/event/LoggingDocSpec.scala b/akka-docs/src/test/scala/docs/event/LoggingDocSpec.scala index 525893ef77..0dc22568a5 100644 --- a/akka-docs/src/test/scala/docs/event/LoggingDocSpec.scala +++ b/akka-docs/src/test/scala/docs/event/LoggingDocSpec.scala @@ -21,8 +21,8 @@ object LoggingDocSpec { reason.getMessage, message.getOrElse("")) } def receive = { - case "test" => log.info("Received test") - case x => log.warning("Received unknown message: {}", x) + case "test" ⇒ log.info("Received test") + case x ⇒ log.warning("Received unknown message: {}", x) } } //#my-actor @@ -33,7 +33,7 @@ object LoggingDocSpec { val log = Logging(this) def receive = { - case _ => { + case _ ⇒ { //#mdc val mdc = Map("requestId" -> 1234, "visitorId" -> 5678) log.mdc(mdc) @@ -59,14 +59,14 @@ object LoggingDocSpec { reqId += 1 val always = Map("requestId" -> reqId) val perMessage = currentMessage match { - case r: Req => Map("visitorId" -> r.visitorId) - case _ => Map() + case r: Req ⇒ Map("visitorId" -> r.visitorId) + case _ ⇒ Map() } always ++ perMessage } def receive: Receive = { - case r: Req => { + case r: Req ⇒ { log.info(s"Starting new request: ${r.work}") } } @@ -84,11 +84,11 @@ object LoggingDocSpec { class MyEventListener extends Actor { def receive = { - case InitializeLogger(_) => sender() ! LoggerInitialized - case Error(cause, logSource, logClass, message) => // ... - case Warning(logSource, logClass, message) => // ... - case Info(logSource, logClass, message) => // ... - case Debug(logSource, logClass, message) => // ... + case InitializeLogger(_) ⇒ sender() ! LoggerInitialized + case Error(cause, logSource, logClass, message) ⇒ // ... + case Warning(logSource, logClass, message) ⇒ // ... + case Info(logSource, logClass, message) ⇒ // ... + case Debug(logSource, logClass, message) ⇒ // ... } } //#my-event-listener @@ -120,7 +120,7 @@ object LoggingDocSpec { class DeadLetterListener extends Actor { def receive = { - case d: DeadLetter => println(d) + case d: DeadLetter ⇒ println(d) } } //#deadletters @@ -132,8 +132,8 @@ object LoggingDocSpec { class Listener extends Actor { def receive = { - case m: Jazz => println(s"${self.path.name} is listening to: ${m.artist}") - case m: Electronic => println(s"${self.path.name} is listening to: ${m.artist}") + case m: Jazz ⇒ println(s"${self.path.name} is listening to: ${m.artist}") + case m: Electronic ⇒ println(s"${self.path.name} is listening to: ${m.artist}") } } //#superclass-subscription-eventstream diff --git a/akka-docs/src/test/scala/docs/extension/ExtensionDocSpec.scala b/akka-docs/src/test/scala/docs/extension/ExtensionDocSpec.scala index 86cbdb7ee1..a8c1c7bfc2 100644 --- a/akka-docs/src/test/scala/docs/extension/ExtensionDocSpec.scala +++ b/akka-docs/src/test/scala/docs/extension/ExtensionDocSpec.scala @@ -60,7 +60,7 @@ object ExtensionDocSpec { class MyActor extends Actor { def receive = { - case someMessage => + case someMessage ⇒ CountExtension(context.system).increment() } } @@ -68,12 +68,12 @@ object ExtensionDocSpec { //#extension-usage-actor-trait - trait Counting { self: Actor => + trait Counting { self: Actor ⇒ def increment() = CountExtension(context.system).increment() } class MyCounterActor extends Actor with Counting { def receive = { - case someMessage => increment() + case someMessage ⇒ increment() } } //#extension-usage-actor-trait diff --git a/akka-docs/src/test/scala/docs/extension/SettingsExtensionDocSpec.scala b/akka-docs/src/test/scala/docs/extension/SettingsExtensionDocSpec.scala index 2d71926347..2a6686c397 100644 --- a/akka-docs/src/test/scala/docs/extension/SettingsExtensionDocSpec.scala +++ b/akka-docs/src/test/scala/docs/extension/SettingsExtensionDocSpec.scala @@ -66,7 +66,7 @@ object SettingsExtensionDocSpec { //#extension-usage-actor def receive = { - case someMessage => + case someMessage ⇒ } def connect(dbUri: String, circuitBreakerTimeout: Duration) = { diff --git a/akka-docs/src/test/scala/docs/faq/Faq.scala b/akka-docs/src/test/scala/docs/faq/Faq.scala index 9ba5025db1..661d5a66cf 100644 --- a/akka-docs/src/test/scala/docs/faq/Faq.scala +++ b/akka-docs/src/test/scala/docs/faq/Faq.scala @@ -17,14 +17,14 @@ object MyActor { class MyActor extends Actor { import MyActor._ def receive = { - case message: Message => message match { - case BarMessage(bar) => sender() ! BazMessage("Got " + bar) - // warning here: + case message: Message ⇒ message match { + case BarMessage(bar) ⇒ sender() ! BazMessage("Got " + bar) + // warning here: // "match may not be exhaustive. It would fail on the following input: FooMessage(_)" //#exhaustiveness-check - case FooMessage(_) => // avoid the warning in our build logs + case FooMessage(_) ⇒ // avoid the warning in our build logs //#exhaustiveness-check } } } -//#exhaustiveness-check \ No newline at end of file +//#exhaustiveness-check diff --git a/akka-docs/src/test/scala/docs/future/FutureDocSpec.scala b/akka-docs/src/test/scala/docs/future/FutureDocSpec.scala index 58e5c9861e..84120ed7d7 100644 --- a/akka-docs/src/test/scala/docs/future/FutureDocSpec.scala +++ b/akka-docs/src/test/scala/docs/future/FutureDocSpec.scala @@ -18,9 +18,9 @@ object FutureDocSpec { class MyActor extends Actor { def receive = { - case x: String => sender() ! x.toUpperCase - case x: Int if x < 0 => sender() ! Status.Failure(new ArithmeticException("Negative values not supported")) - case x: Int => sender() ! x + case x: String ⇒ sender() ! x.toUpperCase + case x: Int if x < 0 ⇒ sender() ! Status.Failure(new ArithmeticException("Negative values not supported")) + case x: Int ⇒ sender() ! x } } @@ -29,7 +29,7 @@ object FutureDocSpec { class OddActor extends Actor { var n = 1 def receive = { - case GetNext => + case GetNext ⇒ sender() ! n n += 2 } @@ -40,7 +40,7 @@ class FutureDocSpec extends AkkaSpec { import FutureDocSpec._ import system.dispatcher - val println: PartialFunction[Any, Unit] = { case _ => } + val println: PartialFunction[Any, Unit] = { case _ ⇒ } "demonstrate usage custom ExecutionContext" in { val yourExecutorServiceGoesHere = java.util.concurrent.Executors.newSingleThreadExecutor() @@ -112,7 +112,7 @@ class FutureDocSpec extends AkkaSpec { val f1 = Future { "Hello" + "World" } - val f2 = f1 map { x => + val f2 = f1 map { x ⇒ x.length } f2 foreach println @@ -128,8 +128,8 @@ class FutureDocSpec extends AkkaSpec { "Hello" + "World" } val f2 = Future.successful(3) - val f3 = f1 map { x => - f2 map { y => + val f3 = f1 map { x ⇒ + f2 map { y ⇒ x.length * y } } @@ -144,8 +144,8 @@ class FutureDocSpec extends AkkaSpec { "Hello" + "World" } val f2 = Future.successful(3) - val f3 = f1 flatMap { x => - f2 map { y => + val f3 = f1 flatMap { x ⇒ + f2 map { y ⇒ x.length * y } } @@ -164,7 +164,7 @@ class FutureDocSpec extends AkkaSpec { val failedFilter = future1.filter(_ % 2 == 1).recover { // When filter fails, it will have a java.util.NoSuchElementException - case m: NoSuchElementException => 0 + case m: NoSuchElementException ⇒ 0 } failedFilter foreach println @@ -178,9 +178,9 @@ class FutureDocSpec extends AkkaSpec { "demonstrate usage of for comprehension" in { //#for-comprehension val f = for { - a <- Future(10 / 2) // 10 / 2 = 5 - b <- Future(a + 1) // 5 + 1 = 6 - c <- Future(a - 1) // 5 - 1 = 4 + a ← Future(10 / 2) // 10 / 2 = 5 + b ← Future(a + 1) // 5 + 1 = 6 + c ← Future(a - 1) // 5 - 1 = 4 if c > 3 // Future.filter } yield b * c // 6 * 4 = 24 @@ -232,9 +232,9 @@ class FutureDocSpec extends AkkaSpec { val f2 = ask(actor2, msg2) val f3 = for { - a <- f1.mapTo[Int] - b <- f2.mapTo[Int] - c <- ask(actor3, (a + b)).mapTo[Int] + a ← f1.mapTo[Int] + b ← f2.mapTo[Int] + c ← ask(actor3, (a + b)).mapTo[Int] } yield c f3 foreach println @@ -262,7 +262,7 @@ class FutureDocSpec extends AkkaSpec { "demonstrate usage of sequence" in { //#sequence - val futureList = Future.sequence((1 to 100).toList.map(x => Future(x * 2 - 1))) + val futureList = Future.sequence((1 to 100).toList.map(x ⇒ Future(x * 2 - 1))) val oddSum = futureList.map(_.sum) oddSum foreach println //#sequence @@ -271,7 +271,7 @@ class FutureDocSpec extends AkkaSpec { "demonstrate usage of traverse" in { //#traverse - val futureList = Future.traverse((1 to 100).toList)(x => Future(x * 2 - 1)) + val futureList = Future.traverse((1 to 100).toList)(x ⇒ Future(x * 2 - 1)) val oddSum = futureList.map(_.sum) oddSum foreach println //#traverse @@ -281,7 +281,7 @@ class FutureDocSpec extends AkkaSpec { "demonstrate usage of fold" in { //#fold // Create a sequence of Futures - val futures = for (i <- 1 to 1000) yield Future(i * 2) + val futures = for (i ← 1 to 1000) yield Future(i * 2) val futureSum = Future.fold(futures)(0)(_ + _) futureSum foreach println //#fold @@ -291,7 +291,7 @@ class FutureDocSpec extends AkkaSpec { "demonstrate usage of reduce" in { //#reduce // Create a sequence of Futures - val futures = for (i <- 1 to 1000) yield Future(i * 2) + val futures = for (i ← 1 to 1000) yield Future(i * 2) val futureSum = Future.reduce(futures)(_ + _) futureSum foreach println //#reduce @@ -304,7 +304,7 @@ class FutureDocSpec extends AkkaSpec { val msg1 = -1 //#recover val future = akka.pattern.ask(actor, msg1) recover { - case e: ArithmeticException => 0 + case e: ArithmeticException ⇒ 0 } future foreach println //#recover @@ -317,8 +317,8 @@ class FutureDocSpec extends AkkaSpec { val msg1 = -1 //#try-recover val future = akka.pattern.ask(actor, msg1) recoverWith { - case e: ArithmeticException => Future.successful(0) - case foo: IllegalArgumentException => + case e: ArithmeticException ⇒ Future.successful(0) + case foo: IllegalArgumentException ⇒ Future.failed[Int](new IllegalStateException("All br0ken!")) } future foreach println @@ -330,7 +330,7 @@ class FutureDocSpec extends AkkaSpec { val future1 = Future { "foo" } val future2 = Future { "bar" } //#zip - val future3 = future1 zip future2 map { case (a, b) => a + " " + b } + val future3 = future1 zip future2 map { case (a, b) ⇒ a + " " + b } future3 foreach println //#zip Await.result(future3, 3 seconds) should be("foo bar") @@ -343,9 +343,9 @@ class FutureDocSpec extends AkkaSpec { def watchSomeTV(): Unit = () //#and-then val result = Future { loadPage(url) } andThen { - case Failure(exception) => log(exception) + case Failure(exception) ⇒ log(exception) } andThen { - case _ => watchSomeTV() + case _ ⇒ watchSomeTV() } result foreach println //#and-then @@ -368,8 +368,8 @@ class FutureDocSpec extends AkkaSpec { val future = Future { "foo" } //#onSuccess future onSuccess { - case "bar" => println("Got my bar alright!") - case x: String => println("Got some random string: " + x) + case "bar" ⇒ println("Got my bar alright!") + case x: String ⇒ println("Got some random string: " + x) } //#onSuccess Await.result(future, 3 seconds) should be("foo") @@ -378,9 +378,9 @@ class FutureDocSpec extends AkkaSpec { val future = Future.failed[String](new IllegalStateException("OHNOES")) //#onFailure future onFailure { - case ise: IllegalStateException if ise.getMessage == "OHNOES" => + case ise: IllegalStateException if ise.getMessage == "OHNOES" ⇒ //OHNOES! We are in deep trouble, do something! - case e: Exception => + case e: Exception ⇒ //Do something else } //#onFailure @@ -391,8 +391,8 @@ class FutureDocSpec extends AkkaSpec { def doSomethingOnFailure(t: Throwable) = () //#onComplete future onComplete { - case Success(result) => doSomethingOnSuccess(result) - case Failure(failure) => doSomethingOnFailure(failure) + case Success(result) ⇒ doSomethingOnSuccess(result) + case Failure(failure) ⇒ doSomethingOnFailure(failure) } //#onComplete Await.result(future, 3 seconds) should be("foo") @@ -436,7 +436,7 @@ class FutureDocSpec extends AkkaSpec { val f = Future("hello") def receive = { //#receive-omitted - case _ => + case _ ⇒ //#receive-omitted } } diff --git a/akka-docs/src/test/scala/docs/io/EchoServer.scala b/akka-docs/src/test/scala/docs/io/EchoServer.scala index 21b7c71923..4480a2a6ab 100644 --- a/akka-docs/src/test/scala/docs/io/EchoServer.scala +++ b/akka-docs/src/test/scala/docs/io/EchoServer.scala @@ -53,15 +53,15 @@ class EchoManager(handlerClass: Class[_]) extends Actor with ActorLogging { override def postRestart(thr: Throwable): Unit = context stop self def receive = { - case Bound(localAddress) => + case Bound(localAddress) ⇒ log.info("listening on port {}", localAddress.getPort) - case CommandFailed(Bind(_, local, _, _, _)) => + case CommandFailed(Bind(_, local, _, _, _)) ⇒ log.warning(s"cannot bind to [$local]") context stop self //#echo-manager - case Connected(remote, local) => + case Connected(remote, local) ⇒ log.info("received connection from {}", remote) val handler = context.actorOf(Props(handlerClass, sender(), remote)) sender() ! Register(handler, keepOpenOnPeerClosed = true) @@ -92,18 +92,18 @@ class EchoHandler(connection: ActorRef, remote: InetSocketAddress) //#writing def writing: Receive = { - case Received(data) => + case Received(data) ⇒ connection ! Write(data, Ack(currentOffset)) buffer(data) - case Ack(ack) => + case Ack(ack) ⇒ acknowledge(ack) - case CommandFailed(Write(_, Ack(ack))) => + case CommandFailed(Write(_, Ack(ack))) ⇒ connection ! ResumeWriting context become buffering(ack) - case PeerClosed => + case PeerClosed ⇒ if (storage.isEmpty) context stop self else context become closing } @@ -115,11 +115,11 @@ class EchoHandler(connection: ActorRef, remote: InetSocketAddress) var peerClosed = false { - case Received(data) => buffer(data) - case WritingResumed => writeFirst() - case PeerClosed => peerClosed = true - case Ack(ack) if ack < nack => acknowledge(ack) - case Ack(ack) => + case Received(data) ⇒ buffer(data) + case WritingResumed ⇒ writeFirst() + case PeerClosed ⇒ peerClosed = true + case Ack(ack) if ack < nack ⇒ acknowledge(ack) + case Ack(ack) ⇒ acknowledge(ack) if (storage.nonEmpty) { if (toAck > 0) { @@ -139,19 +139,19 @@ class EchoHandler(connection: ActorRef, remote: InetSocketAddress) //#closing def closing: Receive = { - case CommandFailed(_: Write) => + case CommandFailed(_: Write) ⇒ connection ! ResumeWriting context.become({ - case WritingResumed => + case WritingResumed ⇒ writeAll() context.unbecome() - case ack: Int => acknowledge(ack) + case ack: Int ⇒ acknowledge(ack) }, discardOld = false) - case Ack(ack) => + case Ack(ack) ⇒ acknowledge(ack) if (storage.isEmpty) context stop self } @@ -214,7 +214,7 @@ class EchoHandler(connection: ActorRef, remote: InetSocketAddress) } private def writeAll(): Unit = { - for ((data, i) <- storage.zipWithIndex) { + for ((data, i) ← storage.zipWithIndex) { connection ! Write(data, Ack(storageOffset + i)) } } @@ -235,17 +235,17 @@ class SimpleEchoHandler(connection: ActorRef, remote: InetSocketAddress) case object Ack extends Event def receive = { - case Received(data) => + case Received(data) ⇒ buffer(data) connection ! Write(data, Ack) context.become({ - case Received(data) => buffer(data) - case Ack => acknowledge() - case PeerClosed => closing = true + case Received(data) ⇒ buffer(data) + case Ack ⇒ acknowledge() + case PeerClosed ⇒ closing = true }, discardOld = false) - case PeerClosed => context stop self + case PeerClosed ⇒ context stop self } //#storage-omitted diff --git a/akka-docs/src/test/scala/docs/io/IODocSpec.scala b/akka-docs/src/test/scala/docs/io/IODocSpec.scala index b78806cd0a..a8e5651070 100644 --- a/akka-docs/src/test/scala/docs/io/IODocSpec.scala +++ b/akka-docs/src/test/scala/docs/io/IODocSpec.scala @@ -34,14 +34,14 @@ class Server extends Actor { IO(Tcp) ! Bind(self, new InetSocketAddress("localhost", 0)) def receive = { - case b @ Bound(localAddress) => + case b @ Bound(localAddress) ⇒ //#do-some-logging-or-setup context.parent ! b //#do-some-logging-or-setup - case CommandFailed(_: Bind) => context stop self + case CommandFailed(_: Bind) ⇒ context stop self - case c @ Connected(remote, local) => + case c @ Connected(remote, local) ⇒ //#server context.parent ! c //#server @@ -57,8 +57,8 @@ class Server extends Actor { class SimplisticHandler extends Actor { import Tcp._ def receive = { - case Received(data) => sender() ! Write(data) - case PeerClosed => context stop self + case Received(data) ⇒ sender() ! Write(data) + case PeerClosed ⇒ context stop self } } //#simplistic-handler @@ -77,25 +77,25 @@ class Client(remote: InetSocketAddress, listener: ActorRef) extends Actor { IO(Tcp) ! Connect(remote) def receive = { - case CommandFailed(_: Connect) => + case CommandFailed(_: Connect) ⇒ listener ! "connect failed" context stop self - case c @ Connected(remote, local) => + case c @ Connected(remote, local) ⇒ listener ! c val connection = sender() connection ! Register(self) context become { - case data: ByteString => + case data: ByteString ⇒ connection ! Write(data) - case CommandFailed(w: Write) => + case CommandFailed(w: Write) ⇒ // O/S buffer was full listener ! "write failed" - case Received(data) => + case Received(data) ⇒ listener ! data - case "close" => + case "close" ⇒ connection ! Close - case _: ConnectionClosed => + case _: ConnectionClosed ⇒ listener ! "connection closed" context stop self } @@ -108,7 +108,7 @@ class IODocSpec extends AkkaSpec { class Parent extends Actor { context.actorOf(Props[Server], "server") def receive = { - case msg => testActor forward msg + case msg ⇒ testActor forward msg } } diff --git a/akka-docs/src/test/scala/docs/io/ReadBackPressure.scala b/akka-docs/src/test/scala/docs/io/ReadBackPressure.scala index b043a9e03d..c2864b184e 100644 --- a/akka-docs/src/test/scala/docs/io/ReadBackPressure.scala +++ b/akka-docs/src/test/scala/docs/io/ReadBackPressure.scala @@ -26,7 +26,7 @@ object PullReadingExample { def receive = { //#pull-accepting - case Bound(localAddress) => + case Bound(localAddress) ⇒ // Accept connections one by one sender() ! ResumeAccepting(batchSize = 1) context.become(listening(sender())) @@ -36,7 +36,7 @@ object PullReadingExample { //#pull-accepting-cont def listening(listener: ActorRef): Receive = { - case Connected(remote, local) => + case Connected(remote, local) ⇒ val handler = context.actorOf(Props(classOf[PullEcho], sender())) sender() ! Register(handler, keepOpenOnPeerClosed = true) listener ! ResumeAccepting(batchSize = 1) @@ -53,8 +53,8 @@ object PullReadingExample { override def preStart: Unit = connection ! ResumeReading def receive = { - case Received(data) => connection ! Write(data, Ack) - case Ack => connection ! ResumeReading + case Received(data) ⇒ connection ! Write(data, Ack) + case Ack ⇒ connection ! ResumeReading } //#pull-reading-echo } diff --git a/akka-docs/src/test/scala/docs/io/ScalaUdpMulticast.scala b/akka-docs/src/test/scala/docs/io/ScalaUdpMulticast.scala index 086f857544..6d8c6915cc 100644 --- a/akka-docs/src/test/scala/docs/io/ScalaUdpMulticast.scala +++ b/akka-docs/src/test/scala/docs/io/ScalaUdpMulticast.scala @@ -38,10 +38,10 @@ class Listener(iface: String, group: String, port: Int, sink: ActorRef) extends //#bind def receive = { - case b @ Udp.Bound(to) => + case b @ Udp.Bound(to) ⇒ log.info("Bound to {}", to) sink ! (b) - case Udp.Received(data, remote) => + case Udp.Received(data, remote) ⇒ val msg = data.decodeString("utf-8") log.info("Received '{}' from {}", msg, remote) sink ! msg @@ -53,7 +53,7 @@ class Sender(iface: String, group: String, port: Int, msg: String) extends Actor IO(Udp) ! Udp.SimpleSender(List(Inet6ProtocolFamily())) def receive = { - case Udp.SimpleSenderReady => { + case Udp.SimpleSenderReady ⇒ { val remote = new InetSocketAddress(s"$group%$iface", port) log.info("Sending message to {}", remote) sender() ! Udp.Send(ByteString(msg), remote) diff --git a/akka-docs/src/test/scala/docs/io/ScalaUdpMulticastSpec.scala b/akka-docs/src/test/scala/docs/io/ScalaUdpMulticastSpec.scala index ec05d89b4d..2e4b5a900c 100644 --- a/akka-docs/src/test/scala/docs/io/ScalaUdpMulticastSpec.scala +++ b/akka-docs/src/test/scala/docs/io/ScalaUdpMulticastSpec.scala @@ -20,7 +20,7 @@ class ScalaUdpMulticastSpec extends TestKit(ActorSystem("ScalaUdpMulticastSpec") "listener" should { "send message back to sink" in { val ipv6ifaces = - NetworkInterface.getNetworkInterfaces.toSeq.filter(iface => + NetworkInterface.getNetworkInterfaces.toSeq.filter(iface ⇒ iface.supportsMulticast && iface.isUp && iface.getInetAddresses.exists(_.isInstanceOf[Inet6Address])) @@ -33,7 +33,7 @@ class ScalaUdpMulticastSpec extends TestKit(ActorSystem("ScalaUdpMulticastSpec") // on the platform (awsdl0 can't be used on OSX, docker[0-9] can't be used in a docker machine etc.) // therefore: try hard to find an interface that _does_ work, and only fail if there was any potentially // working interfaces but all failed - ipv6ifaces.exists { ipv6iface => + ipv6ifaces.exists { ipv6iface ⇒ // host assigned link local multicast address http://tools.ietf.org/html/rfc3307#section-4.3.2 // generate a random 32 bit multicast address with the high order bit set val randomAddress: String = (Random.nextInt().abs.toLong | (1L << 31)).toHexString.toUpperCase @@ -51,7 +51,7 @@ class ScalaUdpMulticastSpec extends TestKit(ActorSystem("ScalaUdpMulticastSpec") true } catch { - case _: AssertionError => + case _: AssertionError ⇒ system.log.info("Failed to run test on interface {}", ipv6iface.getDisplayName) false diff --git a/akka-docs/src/test/scala/docs/io/UdpDocSpec.scala b/akka-docs/src/test/scala/docs/io/UdpDocSpec.scala index e8c11587c9..9490191ff1 100644 --- a/akka-docs/src/test/scala/docs/io/UdpDocSpec.scala +++ b/akka-docs/src/test/scala/docs/io/UdpDocSpec.scala @@ -21,7 +21,7 @@ object ScalaUdpDocSpec { IO(Udp) ! Udp.SimpleSender def receive = { - case Udp.SimpleSenderReady => + case Udp.SimpleSenderReady ⇒ context.become(ready(sender())) //#sender sender() ! Udp.Send(ByteString("hello"), remote) @@ -29,7 +29,7 @@ object ScalaUdpDocSpec { } def ready(send: ActorRef): Receive = { - case msg: String => + case msg: String ⇒ send ! Udp.Send(ByteString(msg), remote) //#sender if (msg == "world") send ! PoisonPill @@ -44,7 +44,7 @@ object ScalaUdpDocSpec { IO(Udp) ! Udp.Bind(self, new InetSocketAddress("localhost", 0)) def receive = { - case Udp.Bound(local) => + case Udp.Bound(local) ⇒ //#listener nextActor forward local //#listener @@ -52,15 +52,15 @@ object ScalaUdpDocSpec { } def ready(socket: ActorRef): Receive = { - case Udp.Received(data, remote) => + case Udp.Received(data, remote) ⇒ val processed = // parse data etc., e.g. using PipelineStage //#listener data.utf8String //#listener socket ! Udp.Send(data, remote) // example server echoes back nextActor ! processed - case Udp.Unbind => socket ! Udp.Unbind - case Udp.Unbound => context.stop(self) + case Udp.Unbind ⇒ socket ! Udp.Unbind + case Udp.Unbound ⇒ context.stop(self) } } //#listener @@ -71,7 +71,7 @@ object ScalaUdpDocSpec { IO(UdpConnected) ! UdpConnected.Connect(self, remote) def receive = { - case UdpConnected.Connected => + case UdpConnected.Connected ⇒ context.become(ready(sender())) //#connected sender() ! UdpConnected.Send(ByteString("hello")) @@ -79,17 +79,17 @@ object ScalaUdpDocSpec { } def ready(connection: ActorRef): Receive = { - case UdpConnected.Received(data) => + case UdpConnected.Received(data) ⇒ // process data, send it on, etc. //#connected if (data.utf8String == "hello") connection ! UdpConnected.Send(ByteString("world")) //#connected - case msg: String => + case msg: String ⇒ connection ! UdpConnected.Send(ByteString(msg)) - case UdpConnected.Disconnect => + case UdpConnected.Disconnect ⇒ connection ! UdpConnected.Disconnect - case UdpConnected.Disconnected => context.stop(self) + case UdpConnected.Disconnected ⇒ context.stop(self) } } //#connected diff --git a/akka-docs/src/test/scala/docs/pattern/BackoffSupervisorDocSpec.scala b/akka-docs/src/test/scala/docs/pattern/BackoffSupervisorDocSpec.scala index 22b4fd74d0..7644276a9b 100644 --- a/akka-docs/src/test/scala/docs/pattern/BackoffSupervisorDocSpec.scala +++ b/akka-docs/src/test/scala/docs/pattern/BackoffSupervisorDocSpec.scala @@ -59,13 +59,13 @@ class BackoffSupervisorDocSpec { //#backoff-custom-stop val supervisor = BackoffSupervisor.props( Backoff.onStop( - childProps, - childName = "myEcho", - minBackoff = 3.seconds, - maxBackoff = 30.seconds, - randomFactor = 0.2 // adds 20% "noise" to vary the intervals slightly - ).withManualReset // the child must send BackoffSupervisor.Reset to its parent - .withDefaultStoppingStrategy // Stop at any Exception thrown + childProps, + childName = "myEcho", + minBackoff = 3.seconds, + maxBackoff = 30.seconds, + randomFactor = 0.2 // adds 20% "noise" to vary the intervals slightly + ).withManualReset // the child must send BackoffSupervisor.Reset to its parent + .withDefaultStoppingStrategy // Stop at any Exception thrown ) //#backoff-custom-stop @@ -89,8 +89,8 @@ class BackoffSupervisorDocSpec { ).withAutoReset(10.seconds) // reset if the child does not throw any errors within 10 seconds .withSupervisorStrategy( OneForOneStrategy() { - case _: MyException => SupervisorStrategy.Restart - case _ => SupervisorStrategy.Escalate + case _: MyException ⇒ SupervisorStrategy.Restart + case _ ⇒ SupervisorStrategy.Escalate })) //#backoff-custom-fail diff --git a/akka-docs/src/test/scala/docs/persistence/PersistenceDocSpec.scala b/akka-docs/src/test/scala/docs/persistence/PersistenceDocSpec.scala index d076177ce6..8dea85266a 100644 --- a/akka-docs/src/test/scala/docs/persistence/PersistenceDocSpec.scala +++ b/akka-docs/src/test/scala/docs/persistence/PersistenceDocSpec.scala @@ -48,14 +48,14 @@ object PersistenceDocSpec { //#recovery-completed override def receiveRecover: Receive = { - case RecoveryCompleted => + case RecoveryCompleted ⇒ // perform init after recovery, before any other messages //... - case evt => //... + case evt ⇒ //... } override def receiveCommand: Receive = { - case msg => //... + case msg ⇒ //... } //#recovery-completed } @@ -84,10 +84,10 @@ object PersistenceDocSpec { //#persistence-id-override override def receiveRecover: Receive = { - case _ => + case _ ⇒ } override def receiveCommand: Receive = { - case _ => + case _ ⇒ } } } @@ -128,25 +128,25 @@ object PersistenceDocSpec { override def persistenceId: String = "persistence-id" override def receiveCommand: Receive = { - case s: String => persist(MsgSent(s))(updateState) - case Confirm(deliveryId) => persist(MsgConfirmed(deliveryId))(updateState) + case s: String ⇒ persist(MsgSent(s))(updateState) + case Confirm(deliveryId) ⇒ persist(MsgConfirmed(deliveryId))(updateState) } override def receiveRecover: Receive = { - case evt: Evt => updateState(evt) + case evt: Evt ⇒ updateState(evt) } def updateState(evt: Evt): Unit = evt match { - case MsgSent(s) => - deliver(destination)(deliveryId => Msg(deliveryId, s)) + case MsgSent(s) ⇒ + deliver(destination)(deliveryId ⇒ Msg(deliveryId, s)) - case MsgConfirmed(deliveryId) => confirmDelivery(deliveryId) + case MsgConfirmed(deliveryId) ⇒ confirmDelivery(deliveryId) } } class MyDestination extends Actor { def receive = { - case Msg(deliveryId, s) => + case Msg(deliveryId, s) ⇒ // ... sender() ! Confirm(deliveryId) } @@ -166,10 +166,10 @@ object PersistenceDocSpec { val snapShotInterval = 1000 override def receiveCommand: Receive = { - case SaveSnapshotSuccess(metadata) => // ... - case SaveSnapshotFailure(metadata, reason) => // ... - case cmd: String => - persist(s"evt-$cmd") { e => + case SaveSnapshotSuccess(metadata) ⇒ // ... + case SaveSnapshotFailure(metadata, reason) ⇒ // ... + case cmd: String ⇒ + persist(s"evt-$cmd") { e ⇒ updateState(e) if (lastSequenceNr % snapShotInterval == 0 && lastSequenceNr != 0) saveSnapshot(state) @@ -195,9 +195,9 @@ object PersistenceDocSpec { var state: Any = _ override def receiveRecover: Receive = { - case SnapshotOffer(metadata, offeredSnapshot) => state = offeredSnapshot - case RecoveryCompleted => - case event => // ... + case SnapshotOffer(metadata, offeredSnapshot) ⇒ state = offeredSnapshot + case RecoveryCompleted ⇒ + case event ⇒ // ... } //#snapshot-offer @@ -214,14 +214,14 @@ object PersistenceDocSpec { override def persistenceId = "my-stable-persistence-id" override def receiveRecover: Receive = { - case _ => // handle recovery here + case _ ⇒ // handle recovery here } override def receiveCommand: Receive = { - case c: String => { + case c: String ⇒ { sender() ! c - persistAsync(s"evt-$c-1") { e => sender() ! e } - persistAsync(s"evt-$c-2") { e => sender() ! e } + persistAsync(s"evt-$c-1") { e ⇒ sender() ! e } + persistAsync(s"evt-$c-2") { e ⇒ sender() ! e } } } } @@ -249,15 +249,15 @@ object PersistenceDocSpec { override def persistenceId = "my-stable-persistence-id" override def receiveRecover: Receive = { - case _ => // handle recovery here + case _ ⇒ // handle recovery here } override def receiveCommand: Receive = { - case c: String => { + case c: String ⇒ { sender() ! c - persistAsync(s"evt-$c-1") { e => sender() ! e } - persistAsync(s"evt-$c-2") { e => sender() ! e } - deferAsync(s"evt-$c-3") { e => sender() ! e } + persistAsync(s"evt-$c-1") { e ⇒ sender() ! e } + persistAsync(s"evt-$c-2") { e ⇒ sender() ! e } + deferAsync(s"evt-$c-3") { e ⇒ sender() ! e } } } } @@ -287,15 +287,15 @@ object PersistenceDocSpec { override def persistenceId = "my-stable-persistence-id" override def receiveRecover: Receive = { - case _ => // handle recovery here + case _ ⇒ // handle recovery here } override def receiveCommand: Receive = { - case c: String => { + case c: String ⇒ { sender() ! c - persist(s"evt-$c-1") { e => sender() ! e } - persist(s"evt-$c-2") { e => sender() ! e } - deferAsync(s"evt-$c-3") { e => sender() ! e } + persist(s"evt-$c-1") { e ⇒ sender() ! e } + persist(s"evt-$c-2") { e ⇒ sender() ! e } + deferAsync(s"evt-$c-3") { e ⇒ sender() ! e } } } } @@ -308,24 +308,24 @@ object PersistenceDocSpec { override def persistenceId = "my-stable-persistence-id" override def receiveRecover: Receive = { - case _ => // handle recovery here + case _ ⇒ // handle recovery here } //#nested-persist-persist override def receiveCommand: Receive = { - case c: String => + case c: String ⇒ sender() ! c - persist(s"$c-1-outer") { outer1 => + persist(s"$c-1-outer") { outer1 ⇒ sender() ! outer1 - persist(s"$c-1-inner") { inner1 => + persist(s"$c-1-inner") { inner1 ⇒ sender() ! inner1 } } - persist(s"$c-2-outer") { outer2 => + persist(s"$c-2-outer") { outer2 ⇒ sender() ! outer2 - persist(s"$c-2-inner") { inner2 => + persist(s"$c-2-inner") { inner2 ⇒ sender() ! inner2 } } @@ -356,20 +356,20 @@ object PersistenceDocSpec { override def persistenceId = "my-stable-persistence-id" override def receiveRecover: Receive = { - case _ => // handle recovery here + case _ ⇒ // handle recovery here } //#nested-persistAsync-persistAsync override def receiveCommand: Receive = { - case c: String => + case c: String ⇒ sender() ! c - persistAsync(c + "-outer-1") { outer => + persistAsync(c + "-outer-1") { outer ⇒ sender() ! outer - persistAsync(c + "-inner-1") { inner => sender() ! inner } + persistAsync(c + "-inner-1") { inner ⇒ sender() ! inner } } - persistAsync(c + "-outer-2") { outer => + persistAsync(c + "-outer-2") { outer ⇒ sender() ! outer - persistAsync(c + "-inner-2") { inner => sender() ! inner } + persistAsync(c + "-inner-2") { inner ⇒ sender() ! inner } } } //#nested-persistAsync-persistAsync @@ -408,15 +408,15 @@ object PersistenceDocSpec { override def persistenceId = "safe-actor" override def receiveCommand: Receive = { - case c: String => + case c: String ⇒ println(c) persist(s"handle-$c") { println(_) } - case Shutdown => + case Shutdown ⇒ context.stop(self) } override def receiveRecover: Receive = { - case _ => // handle recovery here + case _ ⇒ // handle recovery here } } //#safe-shutdown diff --git a/akka-docs/src/test/scala/docs/persistence/PersistenceEventAdapterDocSpec.scala b/akka-docs/src/test/scala/docs/persistence/PersistenceEventAdapterDocSpec.scala index 1fd6288421..3d7724a01b 100644 --- a/akka-docs/src/test/scala/docs/persistence/PersistenceEventAdapterDocSpec.scala +++ b/akka-docs/src/test/scala/docs/persistence/PersistenceEventAdapterDocSpec.scala @@ -76,12 +76,12 @@ class PersistenceEventAdapterDocSpec(config: String) extends AkkaSpec(config) { override def journalPluginId: String = "akka.persistence.journal.auto-json-store" override def receiveRecover: Receive = { - case RecoveryCompleted => // ignore... - case e => p.ref ! e + case RecoveryCompleted ⇒ // ignore... + case e ⇒ p.ref ! e } override def receiveCommand: Receive = { - case c => persist(c) { e => p.ref ! e } + case c ⇒ persist(c) { e ⇒ p.ref ! e } } }) @@ -108,12 +108,12 @@ class PersistenceEventAdapterDocSpec(config: String) extends AkkaSpec(config) { override def journalPluginId: String = "akka.persistence.journal.manual-json-store" override def receiveRecover: Receive = { - case RecoveryCompleted => // ignore... - case e => p.ref ! e + case RecoveryCompleted ⇒ // ignore... + case e ⇒ p.ref ! e } override def receiveCommand: Receive = { - case c => persist(c) { e => p.ref ! e } + case c ⇒ persist(c) { e ⇒ p.ref ! e } } }) @@ -165,7 +165,7 @@ class MyAutoJsonEventAdapter(system: ExtendedActorSystem) extends EventAdapter { override def fromJournal(event: Any, manifest: String): EventSeq = EventSeq.single { event match { - case json: JsonElement => + case json: JsonElement ⇒ val clazz = system.dynamicAccess.getClassFor[Any](manifest).get gson.fromJson(json, clazz) } @@ -202,7 +202,7 @@ class MyManualJsonEventAdapter(system: ExtendedActorSystem) extends EventAdapter } override def fromJournal(event: Any, m: String): EventSeq = event match { - case json: JsonElement => + case json: JsonElement ⇒ val manifest = json.getAsJsonObject.get("_manifest").getAsString val clazz = system.dynamicAccess.getClassFor[Any](manifest).get @@ -214,14 +214,14 @@ class MyTaggingEventAdapter(system: ExtendedActorSystem) extends EventAdapter { override def manifest(event: Any): String = "" override def fromJournal(event: Any, manifest: String): EventSeq = event match { - case j: MyTaggingJournalModel => EventSeq.single(j) + case j: MyTaggingJournalModel ⇒ EventSeq.single(j) } override def toJournal(event: Any): Any = { event match { - case Person(_, age) if age >= 18 => MyTaggingJournalModel(event, tags = Set("adult")) - case Person(_, age) => MyTaggingJournalModel(event, tags = Set("minor")) - case _ => MyTaggingJournalModel(event, tags = Set.empty) + case Person(_, age) if age >= 18 ⇒ MyTaggingJournalModel(event, tags = Set("adult")) + case Person(_, age) ⇒ MyTaggingJournalModel(event, tags = Set("minor")) + case _ ⇒ MyTaggingJournalModel(event, tags = Set.empty) } } } diff --git a/akka-docs/src/test/scala/docs/persistence/PersistencePluginDocSpec.scala b/akka-docs/src/test/scala/docs/persistence/PersistencePluginDocSpec.scala index 56a8a517f0..ec8b712337 100644 --- a/akka-docs/src/test/scala/docs/persistence/PersistencePluginDocSpec.scala +++ b/akka-docs/src/test/scala/docs/persistence/PersistencePluginDocSpec.scala @@ -129,7 +129,7 @@ object SharedLeveldbPluginDocSpec { } def receive = { - case ActorIdentity(1, Some(store)) => + case ActorIdentity(1, Some(store)) ⇒ SharedLeveldbJournal.setStore(store, context.system) } } @@ -161,7 +161,7 @@ class MyJournal extends AsyncWriteJournal { def asyncDeleteMessagesTo(persistenceId: String, toSequenceNr: Long): Future[Unit] = ??? def asyncReplayMessages(persistenceId: String, fromSequenceNr: Long, toSequenceNr: Long, max: Long)( - replayCallback: (PersistentRepr) => Unit): Future[Unit] = ??? + replayCallback: (PersistentRepr) ⇒ Unit): Future[Unit] = ??? def asyncReadHighestSequenceNr( persistenceId: String, fromSequenceNr: Long): Future[Long] = ??? diff --git a/akka-docs/src/test/scala/docs/persistence/PersistenceSchemaEvolutionDocSpec.scala b/akka-docs/src/test/scala/docs/persistence/PersistenceSchemaEvolutionDocSpec.scala index 91c125eb11..f71fdbedbb 100644 --- a/akka-docs/src/test/scala/docs/persistence/PersistenceSchemaEvolutionDocSpec.scala +++ b/akka-docs/src/test/scala/docs/persistence/PersistenceSchemaEvolutionDocSpec.scala @@ -53,10 +53,10 @@ class ProtobufReadOptional { sealed abstract class SeatType { def code: String } object SeatType { def fromString(s: String) = s match { - case Window.code => Window - case Aisle.code => Aisle - case Other.code => Other - case _ => Unknown + case Window.code ⇒ Window + case Aisle.code ⇒ Aisle + case Other.code ⇒ Other + case _ ⇒ Unknown } case object Window extends SeatType { override val code = "W" } case object Aisle extends SeatType { override val code = "A" } @@ -82,15 +82,15 @@ class ProtobufReadOptional { override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = manifest match { - case SeatReservedManifest => + case SeatReservedManifest ⇒ // use generated protobuf serializer seatReserved(FlightAppModels.SeatReserved.parseFrom(bytes)) - case _ => + case _ ⇒ throw new NotSerializableException("Unable to handle manifest: " + manifest) } override def toBinary(o: AnyRef): Array[Byte] = o match { - case s: SeatReserved => + case s: SeatReserved ⇒ FlightAppModels.SeatReserved.newBuilder .setRow(s.row) .setLetter(s.letter) @@ -142,12 +142,12 @@ class RenamePlainJson { marshaller.toJson(event) override def fromJournal(event: Any, manifest: String): EventSeq = event match { - case json: JsObject => EventSeq(marshaller.fromJson(manifest match { - case V1 => rename(json, "code", "seatNr") - case V2 => json // pass-through - case unknown => throw new IllegalArgumentException(s"Unknown manifest: $unknown") + case json: JsObject ⇒ EventSeq(marshaller.fromJson(manifest match { + case V1 ⇒ rename(json, "code", "seatNr") + case V2 ⇒ json // pass-through + case unknown ⇒ throw new IllegalArgumentException(s"Unknown manifest: $unknown") })) - case _ => + case _ ⇒ val c = event.getClass throw new IllegalArgumentException("Can only work with JSON, was: %s".format(c)) } @@ -189,19 +189,19 @@ object SimplestCustomSerializer { // serialize the object override def toBinary(obj: AnyRef): Array[Byte] = obj match { - case p: Person => s"""${p.name}|${p.surname}""".getBytes(Utf8) - case _ => throw new IllegalArgumentException( + case p: Person ⇒ s"""${p.name}|${p.surname}""".getBytes(Utf8) + case _ ⇒ throw new IllegalArgumentException( s"Unable to serialize to bytes, clazz was: ${obj.getClass}!") } // deserialize the object, using the manifest to indicate which logic to apply override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = manifest match { - case PersonManifest => + case PersonManifest ⇒ val nameAndSurname = new String(bytes, Utf8) val Array(name, surname) = nameAndSurname.split("[|]") Person(name, surname) - case _ => throw new NotSerializableException( + case _ ⇒ throw new NotSerializableException( s"Unable to deserialize from bytes, manifest was: $manifest! Bytes length: " + bytes.length) } @@ -248,13 +248,13 @@ class UserEventsAdapter extends EventAdapter { override def manifest(event: Any): String = "" override def fromJournal(event: Any, manifest: String): EventSeq = event match { - case UserDetailsChanged(null, address) => EventSeq(UserAddressChanged(address)) - case UserDetailsChanged(name, null) => EventSeq(UserNameChanged(name)) - case UserDetailsChanged(name, address) => + case UserDetailsChanged(null, address) ⇒ EventSeq(UserAddressChanged(address)) + case UserDetailsChanged(name, null) ⇒ EventSeq(UserNameChanged(name)) + case UserDetailsChanged(name, address) ⇒ EventSeq( UserNameChanged(name), UserAddressChanged(address)) - case event: V2 => EventSeq(event) + case event: V2 ⇒ EventSeq(event) } override def toJournal(event: Any): Any = event @@ -277,15 +277,15 @@ class RemovedEventsAwareSerializer extends SerializerWithStringManifest { override def manifest(o: AnyRef): String = o.getClass.getName override def toBinary(o: AnyRef): Array[Byte] = o match { - case _ => o.toString.getBytes(utf8) // example serialization + case _ ⇒ o.toString.getBytes(utf8) // example serialization } override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = manifest match { - case m if SkipEventManifestsEvents.contains(m) => + case m if SkipEventManifestsEvents.contains(m) ⇒ EventDeserializationSkipped - case other => new String(bytes, utf8) + case other ⇒ new String(bytes, utf8) } } //#string-serializer-skip-deleved-event-by-manifest @@ -296,8 +296,8 @@ class SkippedEventsAwareAdapter extends EventAdapter { override def toJournal(event: Any) = event override def fromJournal(event: Any, manifest: String) = event match { - case EventDeserializationSkipped => EventSeq.empty - case _ => EventSeq(event) + case EventDeserializationSkipped ⇒ EventSeq.empty + case _ ⇒ EventSeq(event) } } //#string-serializer-skip-deleved-event-by-manifest-adapter @@ -313,15 +313,15 @@ class RenamedEventAwareSerializer extends SerializerWithStringManifest { override def manifest(o: AnyRef): String = o.getClass.getName override def toBinary(o: AnyRef): Array[Byte] = o match { - case SamplePayload(data) => s"""$data""".getBytes(Utf8) + case SamplePayload(data) ⇒ s"""$data""".getBytes(Utf8) // previously also handled "old" events here. } override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = manifest match { - case OldPayloadClassName => SamplePayload(new String(bytes, Utf8)) - case MyPayloadClassName => SamplePayload(new String(bytes, Utf8)) - case other => throw new NotSerializableException(s"unexpected manifest [$other]") + case OldPayloadClassName ⇒ SamplePayload(new String(bytes, Utf8)) + case MyPayloadClassName ⇒ SamplePayload(new String(bytes, Utf8)) + case other ⇒ throw new NotSerializableException(s"unexpected manifest [$other]") } } //#string-serializer-handle-rename @@ -348,11 +348,11 @@ class DetachedModelsAdapter extends EventAdapter { override def manifest(event: Any): String = "" override def toJournal(event: Any): Any = event match { - case DomainModel.SeatBooked(code, customer) => + case DomainModel.SeatBooked(code, customer) ⇒ DataModel.SeatBooked(code, customer.name) } override def fromJournal(event: Any, manifest: String): EventSeq = event match { - case DataModel.SeatBooked(code, customerName) => + case DataModel.SeatBooked(code, customerName) ⇒ EventSeq(DomainModel.SeatBooked(code, DomainModel.Customer(customerName))) } } @@ -374,9 +374,9 @@ class JsonDataModelAdapter extends EventAdapter { marshaller.toJson(event) override def fromJournal(event: Any, manifest: String): EventSeq = event match { - case json: JsObject => + case json: JsObject ⇒ EventSeq(marshaller.fromJson(json)) - case _ => + case _ ⇒ throw new IllegalArgumentException( "Unable to fromJournal a non-JSON object! Was: " + event.getClass) } diff --git a/akka-docs/src/test/scala/docs/persistence/PersistentActorExample.scala b/akka-docs/src/test/scala/docs/persistence/PersistentActorExample.scala index 5ea21796e3..f6764c5d3d 100644 --- a/akka-docs/src/test/scala/docs/persistence/PersistentActorExample.scala +++ b/akka-docs/src/test/scala/docs/persistence/PersistentActorExample.scala @@ -25,20 +25,20 @@ class ExamplePersistentActor extends PersistentActor { state.size val receiveRecover: Receive = { - case evt: Evt => updateState(evt) - case SnapshotOffer(_, snapshot: ExampleState) => state = snapshot + case evt: Evt ⇒ updateState(evt) + case SnapshotOffer(_, snapshot: ExampleState) ⇒ state = snapshot } val snapShotInterval = 1000 val receiveCommand: Receive = { - case Cmd(data) => - persist(Evt(s"${data}-${numEvents}")) { event => + case Cmd(data) ⇒ + persist(Evt(s"${data}-${numEvents}")) { event ⇒ updateState(event) context.system.eventStream.publish(event) if (lastSequenceNr % snapShotInterval == 0 && lastSequenceNr != 0) saveSnapshot(state) } - case "print" => println(state) + case "print" ⇒ println(state) } } diff --git a/akka-docs/src/test/scala/docs/persistence/query/LeveldbPersistenceQueryDocSpec.scala b/akka-docs/src/test/scala/docs/persistence/query/LeveldbPersistenceQueryDocSpec.scala index a52e650097..6d39541482 100644 --- a/akka-docs/src/test/scala/docs/persistence/query/LeveldbPersistenceQueryDocSpec.scala +++ b/akka-docs/src/test/scala/docs/persistence/query/LeveldbPersistenceQueryDocSpec.scala @@ -23,13 +23,13 @@ object LeveldbPersistenceQueryDocSpec { class MyTaggingEventAdapter extends WriteEventAdapter { val colors = Set("green", "black", "blue") override def toJournal(event: Any): Any = event match { - case s: String => - var tags = colors.foldLeft(Set.empty[String]) { (acc, c) => + case s: String ⇒ + var tags = colors.foldLeft(Set.empty[String]) { (acc, c) ⇒ if (s.contains(c)) acc + c else acc } if (tags.isEmpty) event else Tagged(event, tags) - case _ => event + case _ ⇒ event } override def manifest(event: Any): String = "" diff --git a/akka-docs/src/test/scala/docs/persistence/query/MyEventsByTagPublisher.scala b/akka-docs/src/test/scala/docs/persistence/query/MyEventsByTagPublisher.scala index 886438f299..e684ab5808 100644 --- a/akka-docs/src/test/scala/docs/persistence/query/MyEventsByTagPublisher.scala +++ b/akka-docs/src/test/scala/docs/persistence/query/MyEventsByTagPublisher.scala @@ -39,11 +39,11 @@ class MyEventsByTagPublisher(tag: String, offset: Long, refreshInterval: FiniteD } def receive = { - case _: Request | Continue => + case _: Request | Continue ⇒ query() deliverBuf() - case Cancel => + case Cancel ⇒ context.stop(self) } @@ -79,12 +79,12 @@ class MyEventsByTagPublisher(tag: String, offset: Long, refreshInterval: FiniteD val serialization = SerializationExtension(context.system) buf = result.map { - case (id, bytes) => + case (id, bytes) ⇒ val p = serialization.deserialize(bytes, classOf[PersistentRepr]).get EventEnvelope(offset = Sequence(id), p.persistenceId, p.sequenceNr, p.payload) } } catch { - case e: Exception => + case e: Exception ⇒ onErrorThenStop(e) } } diff --git a/akka-docs/src/test/scala/docs/persistence/query/PersistenceQueryDocSpec.scala b/akka-docs/src/test/scala/docs/persistence/query/PersistenceQueryDocSpec.scala index 4efab08729..19823228d2 100644 --- a/akka-docs/src/test/scala/docs/persistence/query/PersistenceQueryDocSpec.scala +++ b/akka-docs/src/test/scala/docs/persistence/query/PersistenceQueryDocSpec.scala @@ -70,8 +70,8 @@ object PersistenceQueryDocSpec { case Sequence(offsetValue) ⇒ val props = MyEventsByTagPublisher.props(tag, offsetValue, refreshInterval) Source.actorPublisher[EventEnvelope](props) - .mapMaterializedValue(_ => NotUsed) - case NoOffset => eventsByTag(tag, Sequence(0L)) //recursive + .mapMaterializedValue(_ ⇒ NotUsed) + case NoOffset ⇒ eventsByTag(tag, Sequence(0L)) //recursive case _ ⇒ throw new IllegalArgumentException("LevelDB does not support " + offset.getClass.getName + " offsets") } @@ -166,7 +166,7 @@ object PersistenceQueryDocSpec { // Using an example (Reactive Streams) Database driver readJournal .eventsByPersistenceId("user-1337") - .map(envelope => envelope.event) + .map(envelope ⇒ envelope.event) .map(convertToReadSideTypes) // convert to datatype .grouped(20) // batch inserts into groups of 20 .runWith(Sink.fromSubscriber(dbBatchWriter)) // write batches to read-side database @@ -180,7 +180,7 @@ object PersistenceQueryDocSpec { var state: ComplexState = ComplexState() def receive = { - case m => + case m ⇒ state = updateState(state, m) if (state.readyToSave) store.save(Record(state)) } @@ -223,7 +223,7 @@ class PersistenceQueryDocSpec(s: String) extends AkkaSpec(s) { // materialize stream, consuming events implicit val mat = ActorMaterializer() - source.runForeach { event => println("Event: " + event) } + source.runForeach { event ⇒ println("Event: " + event) } //#basic-usage //#all-persistence-ids-live @@ -261,12 +261,12 @@ class PersistenceQueryDocSpec(s: String) extends AkkaSpec(s) { readJournal.byTagsWithMeta(Set("red", "blue")) query - .mapMaterializedValue { meta => + .mapMaterializedValue { meta ⇒ println(s"The query is: " + s"ordered deterministically: ${meta.deterministicOrder}, " + s"infinite: ${meta.infinite}") } - .map { event => println(s"Event payload: ${event.payload}") } + .map { event ⇒ println(s"Event payload: ${event.payload}") } .runWith(Sink.ignore) //#advanced-journal-query-usage @@ -293,11 +293,11 @@ class PersistenceQueryDocSpec(s: String) extends AkkaSpec(s) { val writerProps = Props(classOf[TheOneWhoWritesToQueryJournal], "bid") val writer = system.actorOf(writerProps, "bid-projection-writer") - bidProjection.latestOffset.foreach { startFromOffset => + bidProjection.latestOffset.foreach { startFromOffset ⇒ readJournal .eventsByTag("bid", Sequence(startFromOffset)) - .mapAsync(8) { envelope => (writer ? envelope.event).map(_ => envelope.offset) } - .mapAsync(1) { offset => bidProjection.saveProgress(offset) } + .mapAsync(8) { envelope ⇒ (writer ? envelope.event).map(_ ⇒ envelope.offset) } + .mapAsync(1) { offset ⇒ bidProjection.saveProgress(offset) } .runWith(Sink.ignore) } //#projection-into-different-store-actor-run @@ -319,7 +319,7 @@ class PersistenceQueryDocSpec(s: String) extends AkkaSpec(s) { readJournal .eventsByTag("bid") - .mapAsync(1) { e => store.save(e) } + .mapAsync(1) { e ⇒ store.save(e) } .runWith(Sink.ignore) //#projection-into-different-store-simple } diff --git a/akka-docs/src/test/scala/docs/remoting/RemoteDeploymentDocSpec.scala b/akka-docs/src/test/scala/docs/remoting/RemoteDeploymentDocSpec.scala index e823eab60a..0223646a75 100644 --- a/akka-docs/src/test/scala/docs/remoting/RemoteDeploymentDocSpec.scala +++ b/akka-docs/src/test/scala/docs/remoting/RemoteDeploymentDocSpec.scala @@ -13,7 +13,7 @@ import akka.remote.RemoteScope object RemoteDeploymentDocSpec { class SampleActor extends Actor { - def receive = { case _ => sender() ! self } + def receive = { case _ ⇒ sender() ! self } } } diff --git a/akka-docs/src/test/scala/docs/routing/ConsistentHashingRouterDocSpec.scala b/akka-docs/src/test/scala/docs/routing/ConsistentHashingRouterDocSpec.scala index c39bc903f5..c4b5f5df53 100644 --- a/akka-docs/src/test/scala/docs/routing/ConsistentHashingRouterDocSpec.scala +++ b/akka-docs/src/test/scala/docs/routing/ConsistentHashingRouterDocSpec.scala @@ -18,9 +18,9 @@ object ConsistentHashingRouterDocSpec { var cache = Map.empty[String, String] def receive = { - case Entry(key, value) => cache += (key -> value) - case Get(key) => sender() ! cache.get(key) - case Evict(key) => cache -= key + case Entry(key, value) ⇒ cache += (key -> value) + case Get(key) ⇒ sender() ! cache.get(key) + case Evict(key) ⇒ cache -= key } } @@ -50,7 +50,7 @@ class ConsistentHashingRouterDocSpec extends AkkaSpec with ImplicitSender { import akka.routing.ConsistentHashingRouter.ConsistentHashableEnvelope def hashMapping: ConsistentHashMapping = { - case Evict(key) => key + case Evict(key) ⇒ key } val cache: ActorRef = diff --git a/akka-docs/src/test/scala/docs/routing/CustomRouterDocSpec.scala b/akka-docs/src/test/scala/docs/routing/CustomRouterDocSpec.scala index 72e19ad250..23a0a6c377 100644 --- a/akka-docs/src/test/scala/docs/routing/CustomRouterDocSpec.scala +++ b/akka-docs/src/test/scala/docs/routing/CustomRouterDocSpec.scala @@ -50,7 +50,7 @@ akka.actor.deployment { class RedundancyRoutingLogic(nbrCopies: Int) extends RoutingLogic { val roundRobin = RoundRobinRoutingLogic() def select(message: Any, routees: immutable.IndexedSeq[Routee]): Routee = { - val targets = (1 to nbrCopies).map(_ => roundRobin.select(message, routees)) + val targets = (1 to nbrCopies).map(_ ⇒ roundRobin.select(message, routees)) SeveralRoutees(targets) } } @@ -58,7 +58,7 @@ akka.actor.deployment { class Storage extends Actor { def receive = { - case x => sender() ! x + case x ⇒ sender() ! x } } @@ -101,7 +101,7 @@ class CustomRouterDocSpec extends AkkaSpec(CustomRouterDocSpec.config) with Impl //#unit-test-logic val logic = new RedundancyRoutingLogic(nbrCopies = 3) - val routees = for (n <- 1 to 7) yield TestRoutee(n) + val routees = for (n ← 1 to 7) yield TestRoutee(n) val r1 = logic.select("msg", routees) r1.asInstanceOf[SeveralRoutees].routees should be( @@ -120,9 +120,9 @@ class CustomRouterDocSpec extends AkkaSpec(CustomRouterDocSpec.config) with Impl "demonstrate usage of custom router" in { //#usage-1 - for (n <- 1 to 10) system.actorOf(Props[Storage], "s" + n) + for (n ← 1 to 10) system.actorOf(Props[Storage], "s" + n) - val paths = for (n <- 1 to 10) yield ("/user/s" + n) + val paths = for (n ← 1 to 10) yield ("/user/s" + n) val redundancy1: ActorRef = system.actorOf( RedundancyGroup(paths, nbrCopies = 3).props(), @@ -130,7 +130,7 @@ class CustomRouterDocSpec extends AkkaSpec(CustomRouterDocSpec.config) with Impl redundancy1 ! "important" //#usage-1 - for (_ <- 1 to 3) expectMsg("important") + for (_ ← 1 to 3) expectMsg("important") //#usage-2 val redundancy2: ActorRef = system.actorOf( @@ -139,7 +139,7 @@ class CustomRouterDocSpec extends AkkaSpec(CustomRouterDocSpec.config) with Impl redundancy2 ! "very important" //#usage-2 - for (_ <- 1 to 5) expectMsg("very important") + for (_ ← 1 to 5) expectMsg("very important") } diff --git a/akka-docs/src/test/scala/docs/routing/RouterDocSpec.scala b/akka-docs/src/test/scala/docs/routing/RouterDocSpec.scala index 866e76a6da..095411b97d 100644 --- a/akka-docs/src/test/scala/docs/routing/RouterDocSpec.scala +++ b/akka-docs/src/test/scala/docs/routing/RouterDocSpec.scala @@ -305,9 +305,9 @@ router-dispatcher {} } def receive = { - case w: Work => + case w: Work ⇒ router.route(w, sender()) - case Terminated(a) => + case Terminated(a) ⇒ router = router.removeRoutee(a) val r = context.actorOf(Props[Worker]) context watch r @@ -318,7 +318,7 @@ router-dispatcher {} class Worker extends Actor { def receive = { - case _ => + case _ ⇒ } } @@ -331,7 +331,7 @@ router-dispatcher {} //#create-worker-actors def receive = { - case _ => + case _ ⇒ } } @@ -396,7 +396,7 @@ router-dispatcher {} context.actorOf(BalancingPool(20).props(Props[Worker]), "router10b") //#balancing-pool-3 import scala.collection.JavaConversions._ - for (i <- 1 to 100) router10b ! i + for (i ← 1 to 100) router10b ! i val threads10b = Thread.getAllStackTraces.keySet.filter { _.getName contains "router10b" } val threads10bNr = threads10b.size require(threads10bNr == 5, s"Expected 5 threads for router10b, had $threads10bNr! Got: ${threads10b.map(_.getName)}") @@ -518,14 +518,14 @@ router-dispatcher {} //#optimal-size-exploring-resize-pool def receive = { - case _ => + case _ ⇒ } } class Echo extends Actor { def receive = { - case m => sender() ! m + case m ⇒ sender() ! m } } } diff --git a/akka-docs/src/test/scala/docs/serialization/SerializationDocSpec.scala b/akka-docs/src/test/scala/docs/serialization/SerializationDocSpec.scala index debb929bfc..b139ba314d 100644 --- a/akka-docs/src/test/scala/docs/serialization/SerializationDocSpec.scala +++ b/akka-docs/src/test/scala/docs/serialization/SerializationDocSpec.scala @@ -70,16 +70,16 @@ package docs.serialization { // Use `""` if manifest is not needed. def manifest(obj: AnyRef): String = obj match { - case _: Customer => CustomerManifest - case _: User => UserManifest + case _: Customer ⇒ CustomerManifest + case _: User ⇒ UserManifest } // "toBinary" serializes the given object to an Array of Bytes def toBinary(obj: AnyRef): Array[Byte] = { // Put the real code that serializes the object here obj match { - case Customer(name) => name.getBytes(UTF_8) - case User(name) => name.getBytes(UTF_8) + case Customer(name) ⇒ name.getBytes(UTF_8) + case User(name) ⇒ name.getBytes(UTF_8) } } @@ -88,9 +88,9 @@ package docs.serialization { def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = { // Put the real code that deserializes here manifest match { - case CustomerManifest => + case CustomerManifest ⇒ Customer(new String(bytes, UTF_8)) - case UserManifest => + case UserManifest ⇒ User(new String(bytes, UTF_8)) } } diff --git a/akka-docs/src/test/scala/docs/stream/ActorPublisherDocSpec.scala b/akka-docs/src/test/scala/docs/stream/ActorPublisherDocSpec.scala index d2f31b2084..ec11c19cf4 100644 --- a/akka-docs/src/test/scala/docs/stream/ActorPublisherDocSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/ActorPublisherDocSpec.scala @@ -29,9 +29,9 @@ object ActorPublisherDocSpec { var buf = Vector.empty[Job] def receive = { - case job: Job if buf.size == MaxBufferSize => + case job: Job if buf.size == MaxBufferSize ⇒ sender() ! JobDenied - case job: Job => + case job: Job ⇒ sender() ! JobAccepted if (buf.isEmpty && totalDemand > 0) onNext(job) @@ -39,9 +39,9 @@ object ActorPublisherDocSpec { buf :+= job deliverBuf() } - case Request(_) => + case Request(_) ⇒ deliverBuf() - case Cancel => + case Cancel ⇒ context.stop(self) } @@ -79,7 +79,7 @@ class ActorPublisherDocSpec extends AkkaSpec { val jobManagerSource = Source.actorPublisher[JobManager.Job](JobManager.props) val ref = Flow[JobManager.Job] .map(_.payload.toUpperCase) - .map { elem => println(elem); elem } + .map { elem ⇒ println(elem); elem } .to(Sink.ignore) .runWith(jobManagerSource) diff --git a/akka-docs/src/test/scala/docs/stream/ActorSubscriberDocSpec.scala b/akka-docs/src/test/scala/docs/stream/ActorSubscriberDocSpec.scala index ee56485f49..5b9b0bf803 100644 --- a/akka-docs/src/test/scala/docs/stream/ActorSubscriberDocSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/ActorSubscriberDocSpec.scala @@ -48,17 +48,17 @@ object ActorSubscriberDocSpec { } def receive = { - case OnNext(Msg(id, replyTo)) => + case OnNext(Msg(id, replyTo)) ⇒ queue += (id -> replyTo) assert(queue.size <= MaxQueueSize, s"queued too many: ${queue.size}") router.route(Work(id), self) - case Reply(id) => + case Reply(id) ⇒ queue(id) ! Done(id) queue -= id if (canceled && queue.isEmpty) { context.stop(self) } - case OnComplete => + case OnComplete ⇒ if (queue.isEmpty) { context.stop(self) } @@ -68,7 +68,7 @@ object ActorSubscriberDocSpec { class Worker extends Actor { import WorkerPool._ def receive = { - case Work(id) => + case Work(id) ⇒ // ... sender() ! Reply(id) } diff --git a/akka-docs/src/test/scala/docs/stream/BidiFlowDocSpec.scala b/akka-docs/src/test/scala/docs/stream/BidiFlowDocSpec.scala index 5f3d10db22..99374476a4 100644 --- a/akka-docs/src/test/scala/docs/stream/BidiFlowDocSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/BidiFlowDocSpec.scala @@ -25,8 +25,8 @@ object BidiFlowDocSpec { //#implementation-details-elided implicit val order = ByteOrder.LITTLE_ENDIAN msg match { - case Ping(id) => ByteString.newBuilder.putByte(1).putInt(id).result() - case Pong(id) => ByteString.newBuilder.putByte(2).putInt(id).result() + case Ping(id) ⇒ ByteString.newBuilder.putByte(1).putInt(id).result() + case Pong(id) ⇒ ByteString.newBuilder.putByte(2).putInt(id).result() } //#implementation-details-elided } @@ -36,15 +36,15 @@ object BidiFlowDocSpec { implicit val order = ByteOrder.LITTLE_ENDIAN val it = bytes.iterator it.getByte match { - case 1 => Ping(it.getInt) - case 2 => Pong(it.getInt) - case other => throw new RuntimeException(s"parse error: expected 1|2 got $other") + case 1 ⇒ Ping(it.getInt) + case 2 ⇒ Pong(it.getInt) + case other ⇒ throw new RuntimeException(s"parse error: expected 1|2 got $other") } //#implementation-details-elided } //#codec-impl - val codecVerbose = BidiFlow.fromGraph(GraphDSL.create() { b => + val codecVerbose = BidiFlow.fromGraph(GraphDSL.create() { b ⇒ // construct and add the top flow, going outbound val outbound = b.add(Flow[Message].map(toBytes)) // construct and add the bottom flow, going inbound @@ -58,7 +58,7 @@ object BidiFlowDocSpec { //#codec //#framing - val framing = BidiFlow.fromGraph(GraphDSL.create() { b => + val framing = BidiFlow.fromGraph(GraphDSL.create() { b ⇒ implicit val order = ByteOrder.LITTLE_ENDIAN def addLengthHeader(bytes: ByteString) = { @@ -135,12 +135,12 @@ object BidiFlowDocSpec { }) //#framing - val chopUp = BidiFlow.fromGraph(GraphDSL.create() { b => + val chopUp = BidiFlow.fromGraph(GraphDSL.create() { b ⇒ val f = Flow[ByteString].mapConcat(_.map(ByteString(_))) BidiShape.fromFlows(b.add(f), b.add(f)) }) - val accumulate = BidiFlow.fromGraph(GraphDSL.create() { b => + val accumulate = BidiFlow.fromGraph(GraphDSL.create() { b ⇒ val f = Flow[ByteString].grouped(1000).map(_.fold(ByteString.empty)(_ ++ _)) BidiShape.fromFlows(b.add(f), b.add(f)) }) @@ -169,7 +169,7 @@ class BidiFlowDocSpec extends AkkaSpec { val stack = codec.atop(framing) // test it by plugging it into its own inverse and closing the right end - val pingpong = Flow[Message].collect { case Ping(id) => Pong(id) } + val pingpong = Flow[Message].collect { case Ping(id) ⇒ Pong(id) } val flow = stack.atop(stack.reversed).join(pingpong) val result = Source((0 to 9).map(Ping)).via(flow).limit(20).runWith(Sink.seq) Await.result(result, 1.second) should ===((0 to 9).map(Pong)) @@ -178,14 +178,14 @@ class BidiFlowDocSpec extends AkkaSpec { "work when chopped up" in { val stack = codec.atop(framing) - val flow = stack.atop(chopUp).atop(stack.reversed).join(Flow[Message].map { case Ping(id) => Pong(id) }) + val flow = stack.atop(chopUp).atop(stack.reversed).join(Flow[Message].map { case Ping(id) ⇒ Pong(id) }) val f = Source((0 to 9).map(Ping)).via(flow).limit(20).runWith(Sink.seq) Await.result(f, 1.second) should ===((0 to 9).map(Pong)) } "work when accumulated" in { val stack = codec.atop(framing) - val flow = stack.atop(accumulate).atop(stack.reversed).join(Flow[Message].map { case Ping(id) => Pong(id) }) + val flow = stack.atop(accumulate).atop(stack.reversed).join(Flow[Message].map { case Ping(id) ⇒ Pong(id) }) val f = Source((0 to 9).map(Ping)).via(flow).limit(20).runWith(Sink.seq) Await.result(f, 1.second) should ===((0 to 9).map(Pong)) } diff --git a/akka-docs/src/test/scala/docs/stream/CompositionDocSpec.scala b/akka-docs/src/test/scala/docs/stream/CompositionDocSpec.scala index 505700127a..815cdae557 100644 --- a/akka-docs/src/test/scala/docs/stream/CompositionDocSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/CompositionDocSpec.scala @@ -168,7 +168,7 @@ class CompositionDocSpec extends AkkaSpec { "closed graph" in { //#embed-closed val closed1 = Source.single(0).to(Sink.foreach(println)) - val closed2 = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder => + val closed2 = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder ⇒ val embeddedClosed: ClosedShape = builder.add(closed1) // … embeddedClosed @@ -191,7 +191,7 @@ class CompositionDocSpec extends AkkaSpec { //#mat-combine-2 // Materializes to NotUsed (orange) - val flow2: Flow[Int, ByteString, NotUsed] = Flow[Int].map { i => ByteString(i.toString) } + val flow2: Flow[Int, ByteString, NotUsed] = Flow[Int].map { i ⇒ ByteString(i.toString) } // Materializes to Future[OutgoingConnection] (yellow) val flow3: Flow[ByteString, ByteString, Future[OutgoingConnection]] = diff --git a/akka-docs/src/test/scala/docs/stream/FlowDocSpec.scala b/akka-docs/src/test/scala/docs/stream/FlowDocSpec.scala index 8aa6e0d507..0ef75d4650 100644 --- a/akka-docs/src/test/scala/docs/stream/FlowDocSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/FlowDocSpec.scala @@ -24,10 +24,10 @@ class FlowDocSpec extends AkkaSpec { "source is immutable" in { //#source-immutable val source = Source(1 to 10) - source.map(_ => 0) // has no effect on source, since it's immutable + source.map(_ ⇒ 0) // has no effect on source, since it's immutable source.runWith(Sink.fold(0)(_ + _)) // 55 - val zeroes = source.map(_ => 0) // returns new Source[Int], with `map()` appended + val zeroes = source.map(_ ⇒ 0) // returns new Source[Int], with `map()` appended zeroes.runWith(Sink.fold(0)(_ + _)) // 0 //#source-immutable } @@ -78,12 +78,12 @@ class FlowDocSpec extends AkkaSpec { import scala.concurrent.duration._ case object Tick - val timer = Source.tick(initialDelay = 1.second, interval = 1.seconds, tick = () => Tick) + val timer = Source.tick(initialDelay = 1.second, interval = 1.seconds, tick = () ⇒ Tick) val timerCancel: Cancellable = Sink.ignore.runWith(timer) timerCancel.cancel() - val timerMap = timer.map(tick => "tick") + val timerMap = timer.map(tick ⇒ "tick") // materialize the flow and retrieve the timers Cancellable val timerCancellable = Sink.ignore.runWith(timerMap) timerCancellable.cancel() @@ -149,7 +149,7 @@ class FlowDocSpec extends AkkaSpec { "various ways of transforming materialized values" in { import scala.concurrent.duration._ - val throttler = Flow.fromGraph(GraphDSL.create(Source.tick(1.second, 1.second, "test")) { implicit builder => tickSource => + val throttler = Flow.fromGraph(GraphDSL.create(Source.tick(1.second, 1.second, "test")) { implicit builder ⇒ tickSource ⇒ import GraphDSL.Implicits._ val zip = builder.add(ZipWith[String, Int, Int](Keep.right)) tickSource ~> zip.in0 @@ -197,7 +197,7 @@ class FlowDocSpec extends AkkaSpec { // doubly nested pair, but we want to flatten it out val r11: RunnableGraph[(Promise[Option[Int]], Cancellable, Future[Int])] = r9.mapMaterializedValue { - case ((promise, cancellable), future) => + case ((promise, cancellable), future) ⇒ (promise, cancellable, future) } @@ -211,7 +211,7 @@ class FlowDocSpec extends AkkaSpec { // The result of r11 can be also achieved by using the Graph API val r12: RunnableGraph[(Promise[Option[Int]], Cancellable, Future[Int])] = - RunnableGraph.fromGraph(GraphDSL.create(source, flow, sink)((_, _, _)) { implicit builder => (src, f, dst) => + RunnableGraph.fromGraph(GraphDSL.create(source, flow, sink)((_, _, _)) { implicit builder ⇒ (src, f, dst) ⇒ import GraphDSL.Implicits._ src ~> f ~> dst ClosedShape diff --git a/akka-docs/src/test/scala/docs/stream/FlowErrorDocSpec.scala b/akka-docs/src/test/scala/docs/stream/FlowErrorDocSpec.scala index 24037157fb..b0fb11d1d0 100644 --- a/akka-docs/src/test/scala/docs/stream/FlowErrorDocSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/FlowErrorDocSpec.scala @@ -32,8 +32,8 @@ class FlowErrorDocSpec extends AkkaSpec { "demonstrate resume stream" in { //#resume val decider: Supervision.Decider = { - case _: ArithmeticException => Supervision.Resume - case _ => Supervision.Stop + case _: ArithmeticException ⇒ Supervision.Resume + case _ ⇒ Supervision.Stop } implicit val materializer = ActorMaterializer( ActorMaterializerSettings(system).withSupervisionStrategy(decider)) @@ -50,11 +50,11 @@ class FlowErrorDocSpec extends AkkaSpec { //#resume-section implicit val materializer = ActorMaterializer() val decider: Supervision.Decider = { - case _: ArithmeticException => Supervision.Resume - case _ => Supervision.Stop + case _: ArithmeticException ⇒ Supervision.Resume + case _ ⇒ Supervision.Stop } val flow = Flow[Int] - .filter(100 / _ < 50).map(elem => 100 / (5 - elem)) + .filter(100 / _ < 50).map(elem ⇒ 100 / (5 - elem)) .withAttributes(ActorAttributes.supervisionStrategy(decider)) val source = Source(0 to 5).via(flow) @@ -70,11 +70,11 @@ class FlowErrorDocSpec extends AkkaSpec { //#restart-section implicit val materializer = ActorMaterializer() val decider: Supervision.Decider = { - case _: IllegalArgumentException => Supervision.Restart - case _ => Supervision.Stop + case _: IllegalArgumentException ⇒ Supervision.Restart + case _ ⇒ Supervision.Stop } val flow = Flow[Int] - .scan(0) { (acc, elem) => + .scan(0) { (acc, elem) ⇒ if (elem < 0) throw new IllegalArgumentException("negative not allowed") else acc + elem } @@ -92,11 +92,11 @@ class FlowErrorDocSpec extends AkkaSpec { "demonstrate recover" in { implicit val materializer = ActorMaterializer() //#recover - Source(0 to 6).map(n => + Source(0 to 6).map(n ⇒ if (n < 5) n.toString else throw new RuntimeException("Boom!") ).recover { - case _: RuntimeException => "stream truncated" + case _: RuntimeException ⇒ "stream truncated" }.runForeach(println) //#recover @@ -118,11 +118,11 @@ stream truncated //#recoverWithRetries val planB = Source(List("five", "six", "seven", "eight")) - Source(0 to 10).map(n => + Source(0 to 10).map(n ⇒ if (n < 5) n.toString else throw new RuntimeException("Boom!") ).recoverWithRetries(attempts = 1, { - case _: RuntimeException => planB + case _: RuntimeException ⇒ planB }).runForeach(println) //#recoverWithRetries diff --git a/akka-docs/src/test/scala/docs/stream/FlowParallelismDocSpec.scala b/akka-docs/src/test/scala/docs/stream/FlowParallelismDocSpec.scala index 2c4e6daec2..afc18055bb 100644 --- a/akka-docs/src/test/scala/docs/stream/FlowParallelismDocSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/FlowParallelismDocSpec.scala @@ -37,9 +37,9 @@ class FlowParallelismDocSpec extends AkkaSpec { "Demonstrate parallel processing" in { //#parallelism val fryingPan: Flow[ScoopOfBatter, Pancake, NotUsed] = - Flow[ScoopOfBatter].map { batter => Pancake() } + Flow[ScoopOfBatter].map { batter ⇒ Pancake() } - val pancakeChef: Flow[ScoopOfBatter, Pancake, NotUsed] = Flow.fromGraph(GraphDSL.create() { implicit builder => + val pancakeChef: Flow[ScoopOfBatter, Pancake, NotUsed] = Flow.fromGraph(GraphDSL.create() { implicit builder ⇒ val dispatchBatter = builder.add(Balance[ScoopOfBatter](2)) val mergePancakes = builder.add(Merge[Pancake](2)) @@ -60,7 +60,7 @@ class FlowParallelismDocSpec extends AkkaSpec { "Demonstrate parallelized pipelines" in { //#parallel-pipeline val pancakeChef: Flow[ScoopOfBatter, Pancake, NotUsed] = - Flow.fromGraph(GraphDSL.create() { implicit builder => + Flow.fromGraph(GraphDSL.create() { implicit builder ⇒ val dispatchBatter = builder.add(Balance[ScoopOfBatter](2)) val mergePancakes = builder.add(Merge[Pancake](2)) @@ -78,7 +78,7 @@ class FlowParallelismDocSpec extends AkkaSpec { "Demonstrate pipelined parallel processing" in { //#pipelined-parallel val pancakeChefs1: Flow[ScoopOfBatter, HalfCookedPancake, NotUsed] = - Flow.fromGraph(GraphDSL.create() { implicit builder => + Flow.fromGraph(GraphDSL.create() { implicit builder ⇒ val dispatchBatter = builder.add(Balance[ScoopOfBatter](2)) val mergeHalfPancakes = builder.add(Merge[HalfCookedPancake](2)) @@ -91,7 +91,7 @@ class FlowParallelismDocSpec extends AkkaSpec { }) val pancakeChefs2: Flow[HalfCookedPancake, Pancake, NotUsed] = - Flow.fromGraph(GraphDSL.create() { implicit builder => + Flow.fromGraph(GraphDSL.create() { implicit builder ⇒ val dispatchHalfPancakes = builder.add(Balance[HalfCookedPancake](2)) val mergePancakes = builder.add(Merge[Pancake](2)) diff --git a/akka-docs/src/test/scala/docs/stream/GraphCyclesSpec.scala b/akka-docs/src/test/scala/docs/stream/GraphCyclesSpec.scala index e99adf5a9d..d4124f950f 100644 --- a/akka-docs/src/test/scala/docs/stream/GraphCyclesSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/GraphCyclesSpec.scala @@ -9,7 +9,7 @@ class GraphCyclesSpec extends AkkaSpec { implicit val materializer = ActorMaterializer() "Cycle demonstration" must { - val source = Source.fromIterator(() => Iterator.from(0)) + val source = Source.fromIterator(() ⇒ Iterator.from(0)) "include a deadlocked cycle" in { diff --git a/akka-docs/src/test/scala/docs/stream/GraphDSLDocSpec.scala b/akka-docs/src/test/scala/docs/stream/GraphDSLDocSpec.scala index b0e2ff6ad5..b2e6d808ce 100644 --- a/akka-docs/src/test/scala/docs/stream/GraphDSLDocSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/GraphDSLDocSpec.scala @@ -46,7 +46,7 @@ class GraphDSLDocSpec extends AkkaSpec { "flow connection errors" in { intercept[IllegalStateException] { //#simple-graph - RunnableGraph.fromGraph(GraphDSL.create() { implicit builder => + RunnableGraph.fromGraph(GraphDSL.create() { implicit builder ⇒ import GraphDSL.Implicits._ val source1 = Source(1 to 10) val source2 = Source(1 to 10) @@ -124,7 +124,7 @@ class GraphDSLDocSpec extends AkkaSpec { worker: Flow[In, Out, Any], workerCount: Int): Graph[PriorityWorkerPoolShape[In, Out], NotUsed] = { - GraphDSL.create() { implicit b => + GraphDSL.create() { implicit b ⇒ import GraphDSL.Implicits._ val priorityMerge = b.add(MergePreferred[In](1)) @@ -136,7 +136,7 @@ class GraphDSLDocSpec extends AkkaSpec { // Wire up each of the outputs of the balancer to a worker flow // then merge them back - for (i <- 0 until workerCount) + for (i ← 0 until workerCount) balance.out(i) ~> worker ~> resultsMerge.in(i) // We now expose the input ports of the priorityMerge and the output @@ -159,7 +159,7 @@ class GraphDSLDocSpec extends AkkaSpec { val worker1 = Flow[String].map("step 1 " + _) val worker2 = Flow[String].map("step 2 " + _) - RunnableGraph.fromGraph(GraphDSL.create() { implicit b => + RunnableGraph.fromGraph(GraphDSL.create() { implicit b ⇒ import GraphDSL.Implicits._ val priorityPool1 = b.add(PriorityWorkerPool(worker1, 4)) @@ -194,7 +194,7 @@ class GraphDSLDocSpec extends AkkaSpec { "access to materialized value" in { //#graph-dsl-matvalue import GraphDSL.Implicits._ - val foldFlow: Flow[Int, Int, Future[Int]] = Flow.fromGraph(GraphDSL.create(Sink.fold[Int, Int](0)(_ + _)) { implicit builder => fold => + val foldFlow: Flow[Int, Int, Future[Int]] = Flow.fromGraph(GraphDSL.create(Sink.fold[Int, Int](0)(_ + _)) { implicit builder ⇒ fold ⇒ FlowShape(fold.in, builder.materializedValue.mapAsync(4)(identity).outlet) }) //#graph-dsl-matvalue @@ -204,7 +204,7 @@ class GraphDSLDocSpec extends AkkaSpec { //#graph-dsl-matvalue-cycle import GraphDSL.Implicits._ // This cannot produce any value: - val cyclicFold: Source[Int, Future[Int]] = Source.fromGraph(GraphDSL.create(Sink.fold[Int, Int](0)(_ + _)) { implicit builder => fold => + val cyclicFold: Source[Int, Future[Int]] = Source.fromGraph(GraphDSL.create(Sink.fold[Int, Int](0)(_ + _)) { implicit builder ⇒ fold ⇒ // - Fold cannot complete until its upstream mapAsync completes // - mapAsync cannot complete until the materialized Future produced by // fold completes diff --git a/akka-docs/src/test/scala/docs/stream/GraphStageDocSpec.scala b/akka-docs/src/test/scala/docs/stream/GraphStageDocSpec.scala index 875792e534..13a78bc0e1 100644 --- a/akka-docs/src/test/scala/docs/stream/GraphStageDocSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/GraphStageDocSpec.scala @@ -115,7 +115,7 @@ class GraphStageDocSpec extends AkkaSpec { } //#one-to-one - class Map[A, B](f: A => B) extends GraphStage[FlowShape[A, B]] { + class Map[A, B](f: A ⇒ B) extends GraphStage[FlowShape[A, B]] { val in = Inlet[A]("Map.in") val out = Outlet[B]("Map.out") @@ -145,13 +145,13 @@ class GraphStageDocSpec extends AkkaSpec { val result = Source(Vector("one", "two", "three")) .via(stringLength) - .runFold(Seq.empty[Int])((elem, acc) => elem :+ acc) + .runFold(Seq.empty[Int])((elem, acc) ⇒ elem :+ acc) Await.result(result, 3.seconds) should ===(Seq(3, 3, 5)) } //#many-to-one - class Filter[A](p: A => Boolean) extends GraphStage[FlowShape[A, A]] { + class Filter[A](p: A ⇒ Boolean) extends GraphStage[FlowShape[A, A]] { val in = Inlet[A]("Filter.in") val out = Outlet[A]("Filter.out") @@ -184,7 +184,7 @@ class GraphStageDocSpec extends AkkaSpec { val result = Source(Vector(1, 2, 3, 4, 5, 6)) .via(evenFilter) - .runFold(Seq.empty[Int])((elem, acc) => elem :+ acc) + .runFold(Seq.empty[Int])((elem, acc) ⇒ elem :+ acc) Await.result(result, 3.seconds) should ===(Seq(2, 4, 6)) } @@ -237,7 +237,7 @@ class GraphStageDocSpec extends AkkaSpec { val result = Source(Vector(1, 2, 3)) .via(duplicator) - .runFold(Seq.empty[Int])((elem, acc) => elem :+ acc) + .runFold(Seq.empty[Int])((elem, acc) ⇒ elem :+ acc) Await.result(result, 3.seconds) should ===(Seq(1, 1, 2, 2, 3, 3)) } @@ -277,14 +277,14 @@ class GraphStageDocSpec extends AkkaSpec { val result = Source(Vector(1, 2, 3)) .via(duplicator) - .runFold(Seq.empty[Int])((elem, acc) => elem :+ acc) + .runFold(Seq.empty[Int])((elem, acc) ⇒ elem :+ acc) Await.result(result, 3.seconds) should ===(Seq(1, 1, 2, 2, 3, 3)) } "Demonstrate chaining of graph stages" in { - val sink = Sink.fold[List[Int], Int](List.empty[Int])((acc, n) => acc :+ n) + val sink = Sink.fold[List[Int], Int](List.empty[Int])((acc, n) ⇒ acc :+ n) //#graph-stage-chain val resultFuture = Source(1 to 5) @@ -314,7 +314,7 @@ class GraphStageDocSpec extends AkkaSpec { new GraphStageLogic(shape) { override def preStart(): Unit = { - val callback = getAsyncCallback[Unit] { (_) => + val callback = getAsyncCallback[Unit] { (_) ⇒ completeStage() } switch.foreach(callback.invoke) @@ -401,7 +401,7 @@ class GraphStageDocSpec extends AkkaSpec { Source(Vector(1, 2, 3)) .via(new TimedGate[Int](2.second)) .takeWithin(250.millis) - .runFold(Seq.empty[Int])((elem, acc) => elem :+ acc) + .runFold(Seq.empty[Int])((elem, acc) ⇒ elem :+ acc) Await.result(result, 3.seconds) should ===(Seq(1)) } @@ -526,7 +526,7 @@ class GraphStageDocSpec extends AkkaSpec { // tests: val result1 = Source(Vector(1, 2, 3)) .via(new TwoBuffer) - .runFold(Vector.empty[Int])((acc, n) => acc :+ n) + .runFold(Vector.empty[Int])((acc, n) ⇒ acc :+ n) Await.result(result1, 3.seconds) should ===(Vector(1, 2, 3)) diff --git a/akka-docs/src/test/scala/docs/stream/HubsDocSpec.scala b/akka-docs/src/test/scala/docs/stream/HubsDocSpec.scala index 892676e68c..78cca87565 100644 --- a/akka-docs/src/test/scala/docs/stream/HubsDocSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/HubsDocSpec.scala @@ -60,8 +60,8 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec { val fromProducer: Source[String, NotUsed] = runnableGraph.run() // Print out messages from the producer in two independent consumers - fromProducer.runForeach(msg => println("consumer1: " + msg)) - fromProducer.runForeach(msg => println("consumer2: " + msg)) + fromProducer.runForeach(msg ⇒ println("consumer1: " + msg)) + fromProducer.runForeach(msg ⇒ println("consumer2: " + msg)) //#broadcast-hub } @@ -109,7 +109,7 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec { //#partition-hub // A simple producer that publishes a new "message-" every second val producer = Source.tick(1.second, 1.second, "message") - .zipWith(Source(1 to 100))((a, b) => s"$a-$b") + .zipWith(Source(1 to 100))((a, b) ⇒ s"$a-$b") // Attach a PartitionHub Sink to the producer. This will materialize to a // corresponding Source. @@ -117,7 +117,7 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec { // value to the left is used) val runnableGraph: RunnableGraph[Source[String, NotUsed]] = producer.toMat(PartitionHub.sink( - (size, elem) => math.abs(elem.hashCode) % size, + (size, elem) ⇒ math.abs(elem.hashCode) % size, startAfterNrOfConsumers = 2, bufferSize = 256))(Keep.right) // By running/materializing the producer, we get back a Source, which @@ -125,8 +125,8 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec { val fromProducer: Source[String, NotUsed] = runnableGraph.run() // Print out messages from the producer in two independent consumers - fromProducer.runForeach(msg => println("consumer1: " + msg)) - fromProducer.runForeach(msg => println("consumer2: " + msg)) + fromProducer.runForeach(msg ⇒ println("consumer1: " + msg)) + fromProducer.runForeach(msg ⇒ println("consumer2: " + msg)) //#partition-hub } @@ -134,14 +134,14 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec { //#partition-hub-stateful // A simple producer that publishes a new "message-" every second val producer = Source.tick(1.second, 1.second, "message") - .zipWith(Source(1 to 100))((a, b) => s"$a-$b") + .zipWith(Source(1 to 100))((a, b) ⇒ s"$a-$b") // New instance of the partitioner function and its state is created // for each materialization of the PartitionHub. def roundRobin(): (PartitionHub.ConsumerInfo, String) ⇒ Long = { var i = -1L - (info, elem) => { + (info, elem) ⇒ { i += 1 info.consumerIdByIdx((i % info.size).toInt) } @@ -153,7 +153,7 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec { // value to the left is used) val runnableGraph: RunnableGraph[Source[String, NotUsed]] = producer.toMat(PartitionHub.statefulSink( - () => roundRobin(), + () ⇒ roundRobin(), startAfterNrOfConsumers = 2, bufferSize = 256))(Keep.right) // By running/materializing the producer, we get back a Source, which @@ -161,8 +161,8 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec { val fromProducer: Source[String, NotUsed] = runnableGraph.run() // Print out messages from the producer in two independent consumers - fromProducer.runForeach(msg => println("consumer1: " + msg)) - fromProducer.runForeach(msg => println("consumer2: " + msg)) + fromProducer.runForeach(msg ⇒ println("consumer1: " + msg)) + fromProducer.runForeach(msg ⇒ println("consumer2: " + msg)) //#partition-hub-stateful } @@ -174,14 +174,14 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec { // Note that this is a moving target since the elements are consumed concurrently. val runnableGraph: RunnableGraph[Source[Int, NotUsed]] = producer.toMat(PartitionHub.statefulSink( - () => (info, elem) ⇒ info.consumerIds.minBy(id ⇒ info.queueSize(id)), + () ⇒ (info, elem) ⇒ info.consumerIds.minBy(id ⇒ info.queueSize(id)), startAfterNrOfConsumers = 2, bufferSize = 16))(Keep.right) val fromProducer: Source[Int, NotUsed] = runnableGraph.run() - fromProducer.runForeach(msg => println("consumer1: " + msg)) + fromProducer.runForeach(msg ⇒ println("consumer1: " + msg)) fromProducer.throttle(10, 100.millis, 10, ThrottleMode.Shaping) - .runForeach(msg => println("consumer2: " + msg)) + .runForeach(msg ⇒ println("consumer2: " + msg)) //#partition-hub-fastest } diff --git a/akka-docs/src/test/scala/docs/stream/IntegrationDocSpec.scala b/akka-docs/src/test/scala/docs/stream/IntegrationDocSpec.scala index 108115b3d1..75506a3a3a 100644 --- a/akka-docs/src/test/scala/docs/stream/IntegrationDocSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/IntegrationDocSpec.scala @@ -92,7 +92,7 @@ object IntegrationDocSpec { class DatabaseService(probe: ActorRef) extends Actor { override def receive = { - case Save(tweet: Tweet) => + case Save(tweet: Tweet) ⇒ probe ! tweet.author.handle sender() ! SaveDone } @@ -123,7 +123,7 @@ object IntegrationDocSpec { //#ask-actor class Translator extends Actor { def receive = { - case word: String => + case word: String ⇒ // ... process message val reply = word.toUpperCase sender() ! reply // reply to the ask @@ -148,7 +148,7 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) { Source(List("hello", "hi")) words - .mapAsync(parallelism = 5)(elem => (ref ? elem).mapTo[String]) + .mapAsync(parallelism = 5)(elem ⇒ (ref ? elem).mapTo[String]) // continue processing of the replies from the actor .map(_.toLowerCase) .runWith(Sink.ignore) @@ -170,14 +170,14 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) { //#email-addresses-mapAsync val emailAddresses: Source[String, NotUsed] = authors - .mapAsync(4)(author => addressSystem.lookupEmail(author.handle)) - .collect { case Some(emailAddress) => emailAddress } + .mapAsync(4)(author ⇒ addressSystem.lookupEmail(author.handle)) + .collect { case Some(emailAddress) ⇒ emailAddress } //#email-addresses-mapAsync //#send-emails val sendEmails: RunnableGraph[NotUsed] = emailAddresses - .mapAsync(4)(address => { + .mapAsync(4)(address ⇒ { emailServer.send( Email(to = address, title = "Akka", body = "I like your tweet")) }) @@ -206,7 +206,7 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) { val emailAddresses: Source[String, NotUsed] = authors.via( - Flow[Author].mapAsync(4)(author => addressSystem.lookupEmail(author.handle)) + Flow[Author].mapAsync(4)(author ⇒ addressSystem.lookupEmail(author.handle)) .withAttributes(supervisionStrategy(resumingDecider))) //#email-addresses-mapAsync-supervision } @@ -222,12 +222,12 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) { val emailAddresses: Source[String, NotUsed] = authors - .mapAsyncUnordered(4)(author => addressSystem.lookupEmail(author.handle)) - .collect { case Some(emailAddress) => emailAddress } + .mapAsyncUnordered(4)(author ⇒ addressSystem.lookupEmail(author.handle)) + .collect { case Some(emailAddress) ⇒ emailAddress } val sendEmails: RunnableGraph[NotUsed] = emailAddresses - .mapAsyncUnordered(4)(address => { + .mapAsyncUnordered(4)(address ⇒ { emailServer.send( Email(to = address, title = "Akka", body = "I like your tweet")) }) @@ -254,15 +254,15 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) { val authors = tweets.filter(_.hashtags.contains(akkaTag)).map(_.author) val phoneNumbers = - authors.mapAsync(4)(author => addressSystem.lookupPhoneNumber(author.handle)) - .collect { case Some(phoneNo) => phoneNo } + authors.mapAsync(4)(author ⇒ addressSystem.lookupPhoneNumber(author.handle)) + .collect { case Some(phoneNo) ⇒ phoneNo } //#blocking-mapAsync val blockingExecutionContext = system.dispatchers.lookup("blocking-dispatcher") val sendTextMessages: RunnableGraph[NotUsed] = phoneNumbers - .mapAsync(4)(phoneNo => { + .mapAsync(4)(phoneNo ⇒ { Future { smsServer.send( TextMessage(to = phoneNo, body = "I like your tweet")) @@ -291,12 +291,12 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) { val authors = tweets.filter(_.hashtags.contains(akkaTag)).map(_.author) val phoneNumbers = - authors.mapAsync(4)(author => addressSystem.lookupPhoneNumber(author.handle)) - .collect { case Some(phoneNo) => phoneNo } + authors.mapAsync(4)(author ⇒ addressSystem.lookupPhoneNumber(author.handle)) + .collect { case Some(phoneNo) ⇒ phoneNo } //#blocking-map val send = Flow[String] - .map { phoneNo => + .map { phoneNo ⇒ smsServer.send(TextMessage(to = phoneNo, body = "I like your tweet")) } .withAttributes(ActorAttributes.dispatcher("blocking-dispatcher")) @@ -327,7 +327,7 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) { implicit val timeout = Timeout(3.seconds) val saveTweets: RunnableGraph[NotUsed] = akkaTweets - .mapAsync(4)(tweet => database ? Save(tweet)) + .mapAsync(4)(tweet ⇒ database ? Save(tweet)) .to(Sink.ignore) //#save-tweets @@ -357,9 +357,9 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) { ActorMaterializerSettings(system).withInputBuffer(initialSize = 4, maxSize = 4)) Source(List("a", "B", "C", "D", "e", "F", "g", "H", "i", "J")) - .map(elem => { println(s"before: $elem"); elem }) + .map(elem ⇒ { println(s"before: $elem"); elem }) .mapAsync(4)(service.convert) - .runForeach(elem => println(s"after: $elem")) + .runForeach(elem ⇒ println(s"after: $elem")) //#sometimes-slow-mapAsync probe.expectMsg("after: A") @@ -389,9 +389,9 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) { ActorMaterializerSettings(system).withInputBuffer(initialSize = 4, maxSize = 4)) Source(List("a", "B", "C", "D", "e", "F", "g", "H", "i", "J")) - .map(elem => { println(s"before: $elem"); elem }) + .map(elem ⇒ { println(s"before: $elem"); elem }) .mapAsyncUnordered(4)(service.convert) - .runForeach(elem => println(s"after: $elem")) + .runForeach(elem ⇒ println(s"after: $elem")) //#sometimes-slow-mapAsyncUnordered probe.receiveN(10).toSet should be(Set( diff --git a/akka-docs/src/test/scala/docs/stream/MigrationsScala.scala b/akka-docs/src/test/scala/docs/stream/MigrationsScala.scala index cb33af1ca2..c62c22a1f9 100644 --- a/akka-docs/src/test/scala/docs/stream/MigrationsScala.scala +++ b/akka-docs/src/test/scala/docs/stream/MigrationsScala.scala @@ -15,7 +15,7 @@ class MigrationsScala extends AkkaSpec { Flow[Int].expand(Iterator.continually(_)) //#expand-continually //#expand-state - Flow[Int].expand(i => { + Flow[Int].expand(i ⇒ { var state = 0 Iterator.continually({ state += 1 diff --git a/akka-docs/src/test/scala/docs/stream/QuickStartDocSpec.scala b/akka-docs/src/test/scala/docs/stream/QuickStartDocSpec.scala index c83dc45e20..b8c34413c0 100644 --- a/akka-docs/src/test/scala/docs/stream/QuickStartDocSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/QuickStartDocSpec.scala @@ -42,15 +42,15 @@ class QuickStartDocSpec extends WordSpec with BeforeAndAfterAll with ScalaFuture //#create-source //#run-source - source.runForeach(i => println(i))(materializer) + source.runForeach(i ⇒ println(i))(materializer) //#run-source //#transform-source - val factorials = source.scan(BigInt(1))((acc, next) => acc * next) + val factorials = source.scan(BigInt(1))((acc, next) ⇒ acc * next) val result: Future[IOResult] = factorials - .map(num => ByteString(s"$num\n")) + .map(num ⇒ ByteString(s"$num\n")) .runWith(FileIO.toPath(Paths.get("factorials.txt"))) //#transform-source @@ -60,7 +60,7 @@ class QuickStartDocSpec extends WordSpec with BeforeAndAfterAll with ScalaFuture //#add-streams factorials - .zipWith(Source(0 to 100))((num, idx) => s"$idx! = $num") + .zipWith(Source(0 to 100))((num, idx) ⇒ s"$idx! = $num") .throttle(1, 1.second, 1, ThrottleMode.shaping) //#add-streams .take(3) @@ -69,10 +69,10 @@ class QuickStartDocSpec extends WordSpec with BeforeAndAfterAll with ScalaFuture //#add-streams //#run-source-and-terminate - val done: Future[Done] = source.runForeach(i => println(i))(materializer) + val done: Future[Done] = source.runForeach(i ⇒ println(i))(materializer) implicit val ec = system.dispatcher - done.onComplete(_ => system.terminate()) + done.onComplete(_ ⇒ system.terminate()) //#run-source-and-terminate done.futureValue @@ -81,7 +81,7 @@ class QuickStartDocSpec extends WordSpec with BeforeAndAfterAll with ScalaFuture //#transform-sink def lineSink(filename: String): Sink[String, Future[IOResult]] = Flow[String] - .map(s => ByteString(s + "\n")) + .map(s ⇒ ByteString(s + "\n")) .toMat(FileIO.toPath(Paths.get(filename)))(Keep.right) //#transform-sink diff --git a/akka-docs/src/test/scala/docs/stream/RateTransformationDocSpec.scala b/akka-docs/src/test/scala/docs/stream/RateTransformationDocSpec.scala index e0f82df6ed..7c9372ffa6 100644 --- a/akka-docs/src/test/scala/docs/stream/RateTransformationDocSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/RateTransformationDocSpec.scala @@ -25,15 +25,15 @@ class RateTransformationDocSpec extends AkkaSpec { //#conflate-summarize val statsFlow = Flow[Double] .conflateWithSeed(Seq(_))(_ :+ _) - .map { s => + .map { s ⇒ val μ = s.sum / s.size - val se = s.map(x => pow(x - μ, 2)) + val se = s.map(x ⇒ pow(x - μ, 2)) val σ = sqrt(se.sum / se.size) (σ, μ, s.size) } //#conflate-summarize - val fut = Source.fromIterator(() => Iterator.continually(Random.nextGaussian)) + val fut = Source.fromIterator(() ⇒ Iterator.continually(Random.nextGaussian)) .via(statsFlow) .grouped(10) .runWith(Sink.head) @@ -46,8 +46,8 @@ class RateTransformationDocSpec extends AkkaSpec { val p = 0.01 val sampleFlow = Flow[Double] .conflateWithSeed(Seq(_)) { - case (acc, elem) if Random.nextDouble < p => acc :+ elem - case (acc, _) => acc + case (acc, elem) if Random.nextDouble < p ⇒ acc :+ elem + case (acc, _) ⇒ acc } .mapConcat(identity) //#conflate-sample @@ -81,11 +81,11 @@ class RateTransformationDocSpec extends AkkaSpec { "expand should track drift" in { //#expand-drift val driftFlow = Flow[Double] - .expand(i => Iterator.from(0).map(i -> _)) + .expand(i ⇒ Iterator.from(0).map(i -> _)) //#expand-drift val latch = TestLatch(2) val realDriftFlow = Flow[Double] - .expand(d => { latch.countDown(); Iterator.from(0).map(d -> _) }) + .expand(d ⇒ { latch.countDown(); Iterator.from(0).map(d -> _) }) val (pub, sub) = TestSource.probe[Double] .via(realDriftFlow) diff --git a/akka-docs/src/test/scala/docs/stream/ReactiveStreamsDocSpec.scala b/akka-docs/src/test/scala/docs/stream/ReactiveStreamsDocSpec.scala index 71733e37dd..674ea511bb 100644 --- a/akka-docs/src/test/scala/docs/stream/ReactiveStreamsDocSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/ReactiveStreamsDocSpec.scala @@ -141,7 +141,7 @@ class ReactiveStreamsDocSpec extends AkkaSpec { // An example Processor factory def createProcessor: Processor[Int, Int] = Flow[Int].toProcessor.run() - val flow: Flow[Int, Int, NotUsed] = Flow.fromProcessor(() => createProcessor) + val flow: Flow[Int, Int, NotUsed] = Flow.fromProcessor(() ⇒ createProcessor) //#use-processor } diff --git a/akka-docs/src/test/scala/docs/stream/RestartDocSpec.scala b/akka-docs/src/test/scala/docs/stream/RestartDocSpec.scala index 9c75af6117..71fe4ee24d 100644 --- a/akka-docs/src/test/scala/docs/stream/RestartDocSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/RestartDocSpec.scala @@ -38,7 +38,7 @@ class RestartDocSpec extends AkkaSpec with CompileOnlySpec { minBackoff = 3.seconds, maxBackoff = 30.seconds, randomFactor = 0.2 // adds 20% "noise" to vary the intervals slightly - ) { () => + ) { () ⇒ // Create a source from a future of a source Source.fromFutureSource { // Make a single request with akka-http @@ -54,7 +54,7 @@ class RestartDocSpec extends AkkaSpec with CompileOnlySpec { //#with-kill-switch val killSwitch = restartSource .viaMat(KillSwitches.single)(Keep.right) - .toMat(Sink.foreach(event => println(s"Got event: $event")))(Keep.left) + .toMat(Sink.foreach(event ⇒ println(s"Got event: $event")))(Keep.left) .run() doSomethingElse() @@ -64,4 +64,4 @@ class RestartDocSpec extends AkkaSpec with CompileOnlySpec { } } -} \ No newline at end of file +} diff --git a/akka-docs/src/test/scala/docs/stream/StreamBuffersRateSpec.scala b/akka-docs/src/test/scala/docs/stream/StreamBuffersRateSpec.scala index eee8237169..8839f31386 100644 --- a/akka-docs/src/test/scala/docs/stream/StreamBuffersRateSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/StreamBuffersRateSpec.scala @@ -12,9 +12,9 @@ class StreamBuffersRateSpec extends AkkaSpec { def println(s: Any) = () //#pipelining Source(1 to 3) - .map { i => println(s"A: $i"); i }.async - .map { i => println(s"B: $i"); i }.async - .map { i => println(s"C: $i"); i }.async + .map { i ⇒ println(s"A: $i"); i }.async + .map { i ⇒ println(s"B: $i"); i }.async + .map { i ⇒ println(s"C: $i"); i }.async .runWith(Sink.ignore) //#pipelining } @@ -40,16 +40,16 @@ class StreamBuffersRateSpec extends AkkaSpec { import scala.concurrent.duration._ case class Tick() - RunnableGraph.fromGraph(GraphDSL.create() { implicit b => + RunnableGraph.fromGraph(GraphDSL.create() { implicit b ⇒ import GraphDSL.Implicits._ // this is the asynchronous stage in this graph - val zipper = b.add(ZipWith[Tick, Int, Int]((tick, count) => count).async) + val zipper = b.add(ZipWith[Tick, Int, Int]((tick, count) ⇒ count).async) Source.tick(initialDelay = 3.second, interval = 3.second, Tick()) ~> zipper.in0 Source.tick(initialDelay = 1.second, interval = 1.second, "message!") - .conflateWithSeed(seed = (_) => 1)((count, _) => count + 1) ~> zipper.in1 + .conflateWithSeed(seed = (_) ⇒ 1)((count, _) ⇒ count + 1) ~> zipper.in1 zipper.out ~> Sink.foreach(println) ClosedShape diff --git a/akka-docs/src/test/scala/docs/stream/StreamPartialGraphDSLDocSpec.scala b/akka-docs/src/test/scala/docs/stream/StreamPartialGraphDSLDocSpec.scala index fad5510af7..2755079a96 100644 --- a/akka-docs/src/test/scala/docs/stream/StreamPartialGraphDSLDocSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/StreamPartialGraphDSLDocSpec.scala @@ -19,7 +19,7 @@ class StreamPartialGraphDSLDocSpec extends AkkaSpec { "build with open ports" in { //#simple-partial-graph-dsl - val pickMaxOfThree = GraphDSL.create() { implicit b => + val pickMaxOfThree = GraphDSL.create() { implicit b ⇒ import GraphDSL.Implicits._ val zip1 = b.add(ZipWith[Int, Int, Int](math.max _)) @@ -31,7 +31,7 @@ class StreamPartialGraphDSLDocSpec extends AkkaSpec { val resultSink = Sink.head[Int] - val g = RunnableGraph.fromGraph(GraphDSL.create(resultSink) { implicit b => sink => + val g = RunnableGraph.fromGraph(GraphDSL.create(resultSink) { implicit b ⇒ sink ⇒ import GraphDSL.Implicits._ // importing the partial graph will return its shape (inlets & outlets) @@ -51,12 +51,12 @@ class StreamPartialGraphDSLDocSpec extends AkkaSpec { "build source from partial graph" in { //#source-from-partial-graph-dsl - val pairs = Source.fromGraph(GraphDSL.create() { implicit b => + val pairs = Source.fromGraph(GraphDSL.create() { implicit b ⇒ import GraphDSL.Implicits._ // prepare graph elements val zip = b.add(Zip[Int, Int]()) - def ints = Source.fromIterator(() => Iterator.from(1)) + def ints = Source.fromIterator(() ⇒ Iterator.from(1)) // connect the graph ints.filter(_ % 2 != 0) ~> zip.in0 @@ -74,7 +74,7 @@ class StreamPartialGraphDSLDocSpec extends AkkaSpec { "build flow from partial graph" in { //#flow-from-partial-graph-dsl val pairUpWithToString = - Flow.fromGraph(GraphDSL.create() { implicit b => + Flow.fromGraph(GraphDSL.create() { implicit b ⇒ import GraphDSL.Implicits._ // prepare graph elements @@ -116,7 +116,7 @@ class StreamPartialGraphDSLDocSpec extends AkkaSpec { val actorRef: ActorRef = testActor //#sink-combine val sendRmotely = Sink.actorRef(actorRef, "Done") - val localProcessing = Sink.foreach[Int](_ => /* do something usefull */ ()) + val localProcessing = Sink.foreach[Int](_ ⇒ /* do something usefull */ ()) val sink = Sink.combine(sendRmotely, localProcessing)(Broadcast[Int](_)) diff --git a/akka-docs/src/test/scala/docs/stream/StreamTestKitDocSpec.scala b/akka-docs/src/test/scala/docs/stream/StreamTestKitDocSpec.scala index 645ecdec26..3cb014ed16 100644 --- a/akka-docs/src/test/scala/docs/stream/StreamTestKitDocSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/StreamTestKitDocSpec.scala @@ -137,7 +137,7 @@ class StreamTestKitDocSpec extends AkkaSpec { "test source and a sink" in { import system.dispatcher //#test-source-and-sink - val flowUnderTest = Flow[Int].mapAsyncUnordered(2) { sleep => + val flowUnderTest = Flow[Int].mapAsyncUnordered(2) { sleep ⇒ pattern.after(10.millis * sleep, using = system.scheduler)(Future.successful(sleep)) } diff --git a/akka-docs/src/test/scala/docs/stream/TwitterStreamQuickstartDocSpec.scala b/akka-docs/src/test/scala/docs/stream/TwitterStreamQuickstartDocSpec.scala index b245b6baa3..a64e0bb810 100644 --- a/akka-docs/src/test/scala/docs/stream/TwitterStreamQuickstartDocSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/TwitterStreamQuickstartDocSpec.scala @@ -30,7 +30,7 @@ object TwitterStreamQuickstartDocSpec { final case class Tweet(author: Author, timestamp: Long, body: String) { def hashtags: Set[Hashtag] = body.split(" ").collect { - case t if t.startsWith("#") => Hashtag(t.replaceAll("[^#\\w]", "")) + case t if t.startsWith("#") ⇒ Hashtag(t.replaceAll("[^#\\w]", "")) }.toSet } @@ -98,7 +98,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec { trait Example3 { //#authors-collect val authors: Source[Author, NotUsed] = - tweets.collect { case t if t.hashtags.contains(akkaTag) => t.author } + tweets.collect { case t if t.hashtags.contains(akkaTag) ⇒ t.author } //#authors-collect } @@ -184,8 +184,8 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec { //#backpressure-by-readline val completion: Future[Done] = Source(1 to 10) - .map(i => { println(s"map => $i"); i }) - .runForeach { i => readLine(s"Element = $i; continue reading? [press enter]\n") } + .map(i ⇒ { println(s"map => $i"); i }) + .runForeach { i ⇒ readLine(s"Element = $i; continue reading? [press enter]\n") } Await.ready(completion, 1.minute) //#backpressure-by-readline @@ -194,7 +194,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec { "count elements on finite stream" in { //#tweets-fold-count - val count: Flow[Tweet, Int, NotUsed] = Flow[Tweet].map(_ => 1) + val count: Flow[Tweet, Int, NotUsed] = Flow[Tweet].map(_ ⇒ 1) val sumSink: Sink[Int, Future[Int]] = Sink.fold[Int, Int](0)(_ + _) @@ -205,12 +205,12 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec { val sum: Future[Int] = counterGraph.run() - sum.foreach(c => println(s"Total tweets processed: $c")) + sum.foreach(c ⇒ println(s"Total tweets processed: $c")) //#tweets-fold-count new AnyRef { //#tweets-fold-count-oneline - val sum: Future[Int] = tweets.map(t => 1).runWith(sumSink) + val sum: Future[Int] = tweets.map(t ⇒ 1).runWith(sumSink) //#tweets-fold-count-oneline } } @@ -223,7 +223,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec { val counterRunnableGraph: RunnableGraph[Future[Int]] = tweetsInMinuteFromNow .filter(_.hashtags contains akkaTag) - .map(t => 1) + .map(t ⇒ 1) .toMat(sumSink)(Keep.right) // materialize the stream once in the morning @@ -235,7 +235,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec { val sum: Future[Int] = counterRunnableGraph.run() - sum.map { c => println(s"Total tweets processed: $c") } + sum.map { c ⇒ println(s"Total tweets processed: $c") } } } diff --git a/akka-docs/src/test/scala/docs/stream/cookbook/RecipeDroppyBroadcast.scala b/akka-docs/src/test/scala/docs/stream/cookbook/RecipeDroppyBroadcast.scala index 72cf648c2e..cf92198e48 100644 --- a/akka-docs/src/test/scala/docs/stream/cookbook/RecipeDroppyBroadcast.scala +++ b/akka-docs/src/test/scala/docs/stream/cookbook/RecipeDroppyBroadcast.scala @@ -23,7 +23,7 @@ class RecipeDroppyBroadcast extends RecipeSpec { val mySink3 = Sink.fromSubscriber(sub3) //#droppy-bcast - val graph = RunnableGraph.fromGraph(GraphDSL.create(mySink1, mySink2, mySink3)((_, _, _)) { implicit b => (sink1, sink2, sink3) => + val graph = RunnableGraph.fromGraph(GraphDSL.create(mySink1, mySink2, mySink3)((_, _, _)) { implicit b ⇒ (sink1, sink2, sink3) ⇒ import GraphDSL.Implicits._ val bcast = b.add(Broadcast[Int](3)) @@ -39,7 +39,7 @@ class RecipeDroppyBroadcast extends RecipeSpec { graph.run() sub3.request(100) - for (i <- 1 to 100) { + for (i ← 1 to 100) { pub.sendNext(i) sub3.expectNext(i) } @@ -49,7 +49,7 @@ class RecipeDroppyBroadcast extends RecipeSpec { sub1.expectSubscription().request(10) sub2.expectSubscription().request(10) - for (i <- 91 to 100) { + for (i ← 91 to 100) { sub1.expectNext(i) sub2.expectNext(i) } diff --git a/akka-docs/src/test/scala/docs/stream/cookbook/RecipeGlobalRateLimit.scala b/akka-docs/src/test/scala/docs/stream/cookbook/RecipeGlobalRateLimit.scala index 89615b5a17..4b5f3d1cfd 100644 --- a/akka-docs/src/test/scala/docs/stream/cookbook/RecipeGlobalRateLimit.scala +++ b/akka-docs/src/test/scala/docs/stream/cookbook/RecipeGlobalRateLimit.scala @@ -45,19 +45,19 @@ class RecipeGlobalRateLimit extends RecipeSpec { override def receive: Receive = open val open: Receive = { - case ReplenishTokens => + case ReplenishTokens ⇒ permitTokens = math.min(permitTokens + tokenRefreshAmount, maxAvailableTokens) - case WantToPass => + case WantToPass ⇒ permitTokens -= 1 sender() ! MayPass if (permitTokens == 0) context.become(closed) } val closed: Receive = { - case ReplenishTokens => + case ReplenishTokens ⇒ permitTokens = math.min(permitTokens + tokenRefreshAmount, maxAvailableTokens) releaseWaiting() - case WantToPass => + case WantToPass ⇒ waitQueue = waitQueue.enqueue(sender()) } @@ -82,11 +82,11 @@ class RecipeGlobalRateLimit extends RecipeSpec { def limitGlobal[T](limiter: ActorRef, maxAllowedWait: FiniteDuration): Flow[T, T, NotUsed] = { import akka.pattern.ask import akka.util.Timeout - Flow[T].mapAsync(4)((element: T) => { + Flow[T].mapAsync(4)((element: T) ⇒ { import system.dispatcher implicit val triggerTimeout = Timeout(maxAllowedWait) val limiterTriggerFuture = limiter ? Limiter.WantToPass - limiterTriggerFuture.map((_) => element) + limiterTriggerFuture.map((_) ⇒ element) }) } @@ -95,12 +95,12 @@ class RecipeGlobalRateLimit extends RecipeSpec { // Use a large period and emulate the timer by hand instead val limiter = system.actorOf(Limiter.props(2, 100.days, 1), "limiter") - val source1 = Source.fromIterator(() => Iterator.continually("E1")).via(limitGlobal(limiter, 2.seconds.dilated)) - val source2 = Source.fromIterator(() => Iterator.continually("E2")).via(limitGlobal(limiter, 2.seconds.dilated)) + val source1 = Source.fromIterator(() ⇒ Iterator.continually("E1")).via(limitGlobal(limiter, 2.seconds.dilated)) + val source2 = Source.fromIterator(() ⇒ Iterator.continually("E2")).via(limitGlobal(limiter, 2.seconds.dilated)) val probe = TestSubscriber.manualProbe[String]() - RunnableGraph.fromGraph(GraphDSL.create() { implicit b => + RunnableGraph.fromGraph(GraphDSL.create() { implicit b ⇒ import GraphDSL.Implicits._ val merge = b.add(Merge[String](2)) source1 ~> merge ~> Sink.fromSubscriber(probe) @@ -119,7 +119,7 @@ class RecipeGlobalRateLimit extends RecipeSpec { probe.expectNoMsg(500.millis) var resultSet = Set.empty[String] - for (_ <- 1 to 100) { + for (_ ← 1 to 100) { limiter ! Limiter.ReplenishTokens resultSet += probe.expectNext() } diff --git a/akka-docs/src/test/scala/docs/stream/cookbook/RecipeKeepAlive.scala b/akka-docs/src/test/scala/docs/stream/cookbook/RecipeKeepAlive.scala index 532628a421..dd24998144 100644 --- a/akka-docs/src/test/scala/docs/stream/cookbook/RecipeKeepAlive.scala +++ b/akka-docs/src/test/scala/docs/stream/cookbook/RecipeKeepAlive.scala @@ -15,7 +15,7 @@ class RecipeKeepAlive extends RecipeSpec { //#inject-keepalive import scala.concurrent.duration._ val injectKeepAlive: Flow[ByteString, ByteString, NotUsed] = - Flow[ByteString].keepAlive(1.second, () => keepaliveMessage) + Flow[ByteString].keepAlive(1.second, () ⇒ keepaliveMessage) //#inject-keepalive // No need to test, this is a built-in stage with proper tests diff --git a/akka-docs/src/test/scala/docs/stream/cookbook/RecipeLoggingElements.scala b/akka-docs/src/test/scala/docs/stream/cookbook/RecipeLoggingElements.scala index 581d323ba6..7f262377e6 100644 --- a/akka-docs/src/test/scala/docs/stream/cookbook/RecipeLoggingElements.scala +++ b/akka-docs/src/test/scala/docs/stream/cookbook/RecipeLoggingElements.scala @@ -16,7 +16,7 @@ class RecipeLoggingElements extends RecipeSpec { val mySource = Source(List("1", "2", "3")) //#println-debug - val loggedSource = mySource.map { elem => println(elem); elem } + val loggedSource = mySource.map { elem ⇒ println(elem); elem } //#println-debug loggedSource.runWith(Sink.ignore) diff --git a/akka-docs/src/test/scala/docs/stream/cookbook/RecipeManualTrigger.scala b/akka-docs/src/test/scala/docs/stream/cookbook/RecipeManualTrigger.scala index 3bb4cfa2a8..c30f0cffc2 100644 --- a/akka-docs/src/test/scala/docs/stream/cookbook/RecipeManualTrigger.scala +++ b/akka-docs/src/test/scala/docs/stream/cookbook/RecipeManualTrigger.scala @@ -18,12 +18,12 @@ class RecipeManualTrigger extends RecipeSpec { val sink = Sink.fromSubscriber(sub) //#manually-triggered-stream - val graph = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder => + val graph = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder ⇒ import GraphDSL.Implicits._ val zip = builder.add(Zip[Message, Trigger]()) elements ~> zip.in0 triggerSource ~> zip.in1 - zip.out ~> Flow[(Message, Trigger)].map { case (msg, trigger) => msg } ~> sink + zip.out ~> Flow[(Message, Trigger)].map { case (msg, trigger) ⇒ msg } ~> sink ClosedShape }) //#manually-triggered-stream @@ -57,9 +57,9 @@ class RecipeManualTrigger extends RecipeSpec { val sink = Sink.fromSubscriber(sub) //#manually-triggered-stream-zipwith - val graph = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder => + val graph = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder ⇒ import GraphDSL.Implicits._ - val zip = builder.add(ZipWith((msg: Message, trigger: Trigger) => msg)) + val zip = builder.add(ZipWith((msg: Message, trigger: Trigger) ⇒ msg)) elements ~> zip.in0 triggerSource ~> zip.in1 diff --git a/akka-docs/src/test/scala/docs/stream/cookbook/RecipeMissedTicks.scala b/akka-docs/src/test/scala/docs/stream/cookbook/RecipeMissedTicks.scala index 28c384dfbc..a8c12ecd9e 100644 --- a/akka-docs/src/test/scala/docs/stream/cookbook/RecipeMissedTicks.scala +++ b/akka-docs/src/test/scala/docs/stream/cookbook/RecipeMissedTicks.scala @@ -21,13 +21,13 @@ class RecipeMissedTicks extends RecipeSpec { //#missed-ticks val missedTicks: Flow[Tick, Int, NotUsed] = - Flow[Tick].conflateWithSeed(seed = (_) => 0)( - (missedTicks, tick) => missedTicks + 1) + Flow[Tick].conflateWithSeed(seed = (_) ⇒ 0)( + (missedTicks, tick) ⇒ missedTicks + 1) //#missed-ticks val latch = TestLatch(3) val realMissedTicks: Flow[Tick, Int, NotUsed] = - Flow[Tick].conflateWithSeed(seed = (_) => 0)( - (missedTicks, tick) => { latch.countDown(); missedTicks + 1 }) + Flow[Tick].conflateWithSeed(seed = (_) ⇒ 0)( + (missedTicks, tick) ⇒ { latch.countDown(); missedTicks + 1 }) tickStream.via(realMissedTicks).to(sink).run() diff --git a/akka-docs/src/test/scala/docs/stream/cookbook/RecipeMultiGroupBy.scala b/akka-docs/src/test/scala/docs/stream/cookbook/RecipeMultiGroupBy.scala index 76f1dc667d..9551c60d86 100644 --- a/akka-docs/src/test/scala/docs/stream/cookbook/RecipeMultiGroupBy.scala +++ b/akka-docs/src/test/scala/docs/stream/cookbook/RecipeMultiGroupBy.scala @@ -16,15 +16,15 @@ class RecipeMultiGroupBy extends RecipeSpec { case class Topic(name: String) val elems = Source(List("1: a", "1: b", "all: c", "all: d", "1: e")) - val extractTopics = { msg: Message => + val extractTopics = { msg: Message ⇒ if (msg.startsWith("1")) List(Topic("1")) else List(Topic("1"), Topic("2")) } //#multi-groupby - val topicMapper: (Message) => immutable.Seq[Topic] = extractTopics + val topicMapper: (Message) ⇒ immutable.Seq[Topic] = extractTopics - val messageAndTopic: Source[(Message, Topic), NotUsed] = elems.mapConcat { msg: Message => + val messageAndTopic: Source[(Message, Topic), NotUsed] = elems.mapConcat { msg: Message ⇒ val topicsForMessage = topicMapper(msg) // Create a (Msg, Topic) pair for each of the topics // the message belongs to @@ -33,7 +33,7 @@ class RecipeMultiGroupBy extends RecipeSpec { val multiGroups = messageAndTopic .groupBy(2, _._2).map { - case (msg, topic) => + case (msg, topic) ⇒ // do what needs to be done //#multi-groupby (msg, topic) @@ -44,7 +44,7 @@ class RecipeMultiGroupBy extends RecipeSpec { val result = multiGroups .grouped(10) .mergeSubstreams - .map(g => g.head._2.name + g.map(_._1).mkString("[", ", ", "]")) + .map(g ⇒ g.head._2.name + g.map(_._1).mkString("[", ", ", "]")) .limit(10) .runWith(Sink.seq) diff --git a/akka-docs/src/test/scala/docs/stream/cookbook/RecipeReduceByKey.scala b/akka-docs/src/test/scala/docs/stream/cookbook/RecipeReduceByKey.scala index 908dd7d797..73efc873f5 100644 --- a/akka-docs/src/test/scala/docs/stream/cookbook/RecipeReduceByKey.scala +++ b/akka-docs/src/test/scala/docs/stream/cookbook/RecipeReduceByKey.scala @@ -24,7 +24,7 @@ class RecipeReduceByKey extends RecipeSpec { //transform each element to pair with number of words in it .map(_ -> 1) // add counting logic to the streams - .reduce((l, r) => (l._1, l._2 + r._2)) + .reduce((l, r) ⇒ (l._1, l._2 + r._2)) // get a stream of word counts .mergeSubstreams //#word-count @@ -45,21 +45,21 @@ class RecipeReduceByKey extends RecipeSpec { //#reduce-by-key-general def reduceByKey[In, K, Out]( maximumGroupSize: Int, - groupKey: (In) => K, - map: (In) => Out)(reduce: (Out, Out) => Out): Flow[In, (K, Out), NotUsed] = { + groupKey: (In) ⇒ K, + map: (In) ⇒ Out)(reduce: (Out, Out) ⇒ Out): Flow[In, (K, Out), NotUsed] = { Flow[In] .groupBy[K](maximumGroupSize, groupKey) - .map(e => groupKey(e) -> map(e)) - .reduce((l, r) => l._1 -> reduce(l._2, r._2)) + .map(e ⇒ groupKey(e) -> map(e)) + .reduce((l, r) ⇒ l._1 -> reduce(l._2, r._2)) .mergeSubstreams } val wordCounts = words.via( reduceByKey( MaximumDistinctWords, - groupKey = (word: String) => word, - map = (word: String) => 1)((left: Int, right: Int) => left + right)) + groupKey = (word: String) ⇒ word, + map = (word: String) ⇒ 1)((left: Int, right: Int) ⇒ left + right)) //#reduce-by-key-general Await.result(wordCounts.limit(10).runWith(Sink.seq), 3.seconds).toSet should be(Set( diff --git a/akka-docs/src/test/scala/docs/stream/cookbook/RecipeSimpleDrop.scala b/akka-docs/src/test/scala/docs/stream/cookbook/RecipeSimpleDrop.scala index 600c7e8088..35d4f41db6 100644 --- a/akka-docs/src/test/scala/docs/stream/cookbook/RecipeSimpleDrop.scala +++ b/akka-docs/src/test/scala/docs/stream/cookbook/RecipeSimpleDrop.scala @@ -15,11 +15,11 @@ class RecipeSimpleDrop extends RecipeSpec { //#simple-drop val droppyStream: Flow[Message, Message, NotUsed] = - Flow[Message].conflate((lastMessage, newMessage) => newMessage) + Flow[Message].conflate((lastMessage, newMessage) ⇒ newMessage) //#simple-drop val latch = TestLatch(2) val realDroppyStream = - Flow[Message].conflate((lastMessage, newMessage) => { latch.countDown(); newMessage }) + Flow[Message].conflate((lastMessage, newMessage) ⇒ { latch.countDown(); newMessage }) val pub = TestPublisher.probe[Message]() val sub = TestSubscriber.manualProbe[Message]() diff --git a/akka-docs/src/test/scala/docs/stream/cookbook/RecipeWorkerPool.scala b/akka-docs/src/test/scala/docs/stream/cookbook/RecipeWorkerPool.scala index 4ccb3d2f1a..3dd1c50b02 100644 --- a/akka-docs/src/test/scala/docs/stream/cookbook/RecipeWorkerPool.scala +++ b/akka-docs/src/test/scala/docs/stream/cookbook/RecipeWorkerPool.scala @@ -22,11 +22,11 @@ class RecipeWorkerPool extends RecipeSpec { def balancer[In, Out](worker: Flow[In, Out, Any], workerCount: Int): Flow[In, Out, NotUsed] = { import GraphDSL.Implicits._ - Flow.fromGraph(GraphDSL.create() { implicit b => + Flow.fromGraph(GraphDSL.create() { implicit b ⇒ val balancer = b.add(Balance[In](workerCount, waitForAllDownstreams = true)) val merge = b.add(Merge[Out](workerCount)) - for (_ <- 1 to workerCount) { + for (_ ← 1 to workerCount) { // for each worker, add an edge from the balancer to the worker, then wire // it to the merge element balancer ~> worker.async ~> merge diff --git a/akka-docs/src/test/scala/docs/stream/io/StreamTcpDocSpec.scala b/akka-docs/src/test/scala/docs/stream/io/StreamTcpDocSpec.scala index 943e5781db..fd27ebcb23 100644 --- a/akka-docs/src/test/scala/docs/stream/io/StreamTcpDocSpec.scala +++ b/akka-docs/src/test/scala/docs/stream/io/StreamTcpDocSpec.scala @@ -29,9 +29,9 @@ class StreamTcpDocSpec extends AkkaSpec { val binding: Future[ServerBinding] = Tcp().bind("127.0.0.1", 8888).to(Sink.ignore).run() - binding.map { b => + binding.map { b ⇒ b.unbind() onComplete { - case _ => // ... + case _ ⇒ // ... } } //#echo-server-simple-bind @@ -43,7 +43,7 @@ class StreamTcpDocSpec extends AkkaSpec { val connections: Source[IncomingConnection, Future[ServerBinding]] = Tcp().bind(host, port) - connections runForeach { connection => + connections runForeach { connection ⇒ println(s"New connection from: ${connection.remoteAddress}") val echo = Flow[ByteString] @@ -69,7 +69,7 @@ class StreamTcpDocSpec extends AkkaSpec { import akka.stream.scaladsl.Framing //#welcome-banner-chat-server - connections.runForeach { connection => + connections.runForeach { connection ⇒ // server logic, parses incoming commands val commandParser = Flow[String].takeWhile(_ != "BYE").map(_ + "!") @@ -85,7 +85,7 @@ class StreamTcpDocSpec extends AkkaSpec { allowTruncation = true)) .map(_.utf8String) //#welcome-banner-chat-server - .map { command => serverProbe.ref ! command; command } + .map { command ⇒ serverProbe.ref ! command; command } //#welcome-banner-chat-server .via(commandParser) // merge in the initial banner after parser @@ -102,8 +102,8 @@ class StreamTcpDocSpec extends AkkaSpec { val input = new AtomicReference("Hello world" :: "What a lovely day" :: Nil) def readLine(prompt: String): String = { input.get() match { - case all @ cmd :: tail if input.compareAndSet(all, tail) => cmd - case _ => "q" + case all @ cmd :: tail if input.compareAndSet(all, tail) ⇒ cmd + case _ ⇒ "q" } } @@ -120,7 +120,7 @@ class StreamTcpDocSpec extends AkkaSpec { val replParser = Flow[String].takeWhile(_ != "q") .concat(Source.single("BYE")) - .map(elem => ByteString(s"$elem\n")) + .map(elem ⇒ ByteString(s"$elem\n")) val repl = Flow[ByteString] .via(Framing.delimiter( @@ -128,8 +128,8 @@ class StreamTcpDocSpec extends AkkaSpec { maximumFrameLength = 256, allowTruncation = true)) .map(_.utf8String) - .map(text => println("Server: " + text)) - .map(_ => readLine("> ")) + .map(text ⇒ println("Server: " + text)) + .map(_ ⇒ readLine("> ")) .via(replParser) connection.join(repl).run() diff --git a/akka-docs/src/test/scala/docs/testkit/ParentChildSpec.scala b/akka-docs/src/test/scala/docs/testkit/ParentChildSpec.scala index fa87dec678..7573c0fa9d 100644 --- a/akka-docs/src/test/scala/docs/testkit/ParentChildSpec.scala +++ b/akka-docs/src/test/scala/docs/testkit/ParentChildSpec.scala @@ -24,14 +24,14 @@ class Parent extends Actor { var ponged = false def receive = { - case "pingit" => child ! "ping" - case "pong" => ponged = true + case "pingit" ⇒ child ! "ping" + case "pong" ⇒ ponged = true } } class Child extends Actor { def receive = { - case "ping" => context.parent ! "pong" + case "ping" ⇒ context.parent ! "pong" } } //#test-example @@ -39,7 +39,7 @@ class Child extends Actor { //#test-dependentchild class DependentChild(parent: ActorRef) extends Actor { def receive = { - case "ping" => parent ! "pong" + case "ping" ⇒ parent ! "pong" } } //#test-dependentchild @@ -50,18 +50,18 @@ class DependentParent(childProps: Props) extends Actor { var ponged = false def receive = { - case "pingit" => child ! "ping" - case "pong" => ponged = true + case "pingit" ⇒ child ! "ping" + case "pong" ⇒ ponged = true } } -class GenericDependentParent(childMaker: ActorRefFactory => ActorRef) extends Actor { +class GenericDependentParent(childMaker: ActorRefFactory ⇒ ActorRef) extends Actor { val child = childMaker(context) var ponged = false def receive = { - case "pingit" => child ! "ping" - case "pong" => ponged = true + case "pingit" ⇒ child ! "ping" + case "pong" ⇒ ponged = true } } //#test-dependentparent @@ -72,7 +72,7 @@ class GenericDependentParent(childMaker: ActorRefFactory => ActorRef) extends Ac class MockedChild extends Actor { def receive = { - case "ping" => sender ! "pong" + case "ping" ⇒ sender ! "pong" } } @@ -106,7 +106,7 @@ class ParentChildSpec extends WordSpec with Matchers with TestKitBase with Befor "be tested with a child probe" in { val probe = TestProbe() //#child-maker-test - val maker = (_: ActorRefFactory) => probe.ref + val maker = (_: ActorRefFactory) ⇒ probe.ref val parent = system.actorOf(Props(classOf[GenericDependentParent], maker)) //#child-maker-test probe.send(parent, "pingit") @@ -115,7 +115,7 @@ class ParentChildSpec extends WordSpec with Matchers with TestKitBase with Befor "demonstrate production version of child creator" in { //#child-maker-prod - val maker = (f: ActorRefFactory) => f.actorOf(Props[Child]) + val maker = (f: ActorRefFactory) ⇒ f.actorOf(Props[Child]) val parent = system.actorOf(Props(classOf[GenericDependentParent], maker)) //#child-maker-prod } @@ -139,8 +139,8 @@ class ParentChildSpec extends WordSpec with Matchers with TestKitBase with Befor val parent = system.actorOf(Props(new Actor { val child = context.actorOf(Props[Child], "child") def receive = { - case x if sender == child => proxy.ref forward x - case x => child forward x + case x if sender == child ⇒ proxy.ref forward x + case x ⇒ child forward x } })) diff --git a/akka-docs/src/test/scala/docs/testkit/TestKitUsageSpec.scala b/akka-docs/src/test/scala/docs/testkit/TestKitUsageSpec.scala index 896b41e649..bb8a8f5385 100644 --- a/akka-docs/src/test/scala/docs/testkit/TestKitUsageSpec.scala +++ b/akka-docs/src/test/scala/docs/testkit/TestKitUsageSpec.scala @@ -78,7 +78,7 @@ class TestKitUsageSpec filterRef ! 1 receiveWhile(500 millis) { - case msg: String => messages = msg +: messages + case msg: String ⇒ messages = msg +: messages } } messages.length should be(3) @@ -89,12 +89,12 @@ class TestKitUsageSpec "receive an interesting message at some point " in { within(500 millis) { ignoreMsg { - case msg: String => msg != "something" + case msg: String ⇒ msg != "something" } seqRef ! "something" expectMsg("something") ignoreMsg { - case msg: String => msg == "1" + case msg: String ⇒ msg == "1" } expectNoMsg ignoreNoMsg @@ -116,7 +116,7 @@ object TestKitUsageSpec { */ class ForwardingActor(next: ActorRef) extends Actor { def receive = { - case msg => next ! msg + case msg ⇒ next ! msg } } @@ -125,8 +125,8 @@ object TestKitUsageSpec { */ class FilteringActor(next: ActorRef) extends Actor { def receive = { - case msg: String => next ! msg - case _ => None + case msg: String ⇒ next ! msg + case _ ⇒ None } } @@ -139,7 +139,7 @@ object TestKitUsageSpec { class SequencingActor(next: ActorRef, head: immutable.Seq[String], tail: immutable.Seq[String]) extends Actor { def receive = { - case msg => { + case msg ⇒ { head foreach { next ! _ } next ! msg tail foreach { next ! _ } diff --git a/akka-docs/src/test/scala/docs/testkit/TestkitDocSpec.scala b/akka-docs/src/test/scala/docs/testkit/TestkitDocSpec.scala index dfd4b76d8d..7e76289657 100644 --- a/akka-docs/src/test/scala/docs/testkit/TestkitDocSpec.scala +++ b/akka-docs/src/test/scala/docs/testkit/TestkitDocSpec.scala @@ -25,18 +25,18 @@ object TestKitDocSpec { class MyActor extends Actor { def receive = { - case Say42 => sender() ! 42 - case "some work" => sender() ! "some result" + case Say42 ⇒ sender() ! 42 + case "some work" ⇒ sender() ! "some result" } } class TestFsmActor extends Actor with FSM[Int, String] { startWith(1, "") when(1) { - case Event("go", _) => goto(2) using "go" + case Event("go", _) ⇒ goto(2) using "go" } when(2) { - case Event("back", _) => goto(1) using "back" + case Event("back", _) ⇒ goto(1) using "back" } } @@ -45,10 +45,10 @@ object TestKitDocSpec { var dest1: ActorRef = _ var dest2: ActorRef = _ def receive = { - case (d1: ActorRef, d2: ActorRef) => + case (d1: ActorRef, d2: ActorRef) ⇒ dest1 = d1 dest2 = d2 - case x => + case x ⇒ dest1 ! x dest2 ! x } @@ -61,13 +61,13 @@ object TestKitDocSpec { //#test-probe-forward-actors class Source(target: ActorRef) extends Actor { def receive = { - case "start" => target ! "work" + case "start" ⇒ target ! "work" } } class Destination extends Actor { def receive = { - case x => // Do something.. + case x ⇒ // Do something.. } } @@ -77,10 +77,10 @@ object TestKitDocSpec { //#logging-receive import akka.event.LoggingReceive def receive = LoggingReceive { - case msg => // Do something ... + case msg ⇒ // Do something ... } def otherState: Receive = LoggingReceive.withLabel("other") { - case msg => // Do something else ... + case msg ⇒ // Do something else ... } //#logging-receive } @@ -182,7 +182,7 @@ class TestKitDocSpec extends AkkaSpec with DefaultTimeout with ImplicitSender { val actorRef = TestActorRef(new Actor { def receive = { - case "hello" => throw new IllegalArgumentException("boom") + case "hello" ⇒ throw new IllegalArgumentException("boom") } }) intercept[IllegalArgumentException] { actorRef.receive("hello") } @@ -230,7 +230,7 @@ class TestKitDocSpec extends AkkaSpec with DefaultTimeout with ImplicitSender { val probe = new TestProbe(system) { def expectUpdate(x: Int) = { expectMsgPF() { - case Update(id, _) if id == x => true + case Update(id, _) if id == x ⇒ true } sender() ! "ACK" } @@ -321,7 +321,7 @@ class TestKitDocSpec extends AkkaSpec with DefaultTimeout with ImplicitSender { //#put-your-test-code-here val probe = TestProbe() probe.send(testActor, "hello") - try expectMsg("hello") catch { case NonFatal(e) => system.terminate(); throw e } + try expectMsg("hello") catch { case NonFatal(e) ⇒ system.terminate(); throw e } //#put-your-test-code-here shutdown(system) diff --git a/akka-docs/src/test/scala/tutorial_1/ActorHierarchyExperiments.scala b/akka-docs/src/test/scala/tutorial_1/ActorHierarchyExperiments.scala index 5ae63e1228..3cf680c6aa 100644 --- a/akka-docs/src/test/scala/tutorial_1/ActorHierarchyExperiments.scala +++ b/akka-docs/src/test/scala/tutorial_1/ActorHierarchyExperiments.scala @@ -9,7 +9,7 @@ import scala.io.StdIn class PrintMyActorRefActor extends Actor { override def receive: Receive = { - case "printit" => + case "printit" ⇒ val secondRef = context.actorOf(Props.empty, "second-actor") println(s"Second: $secondRef") } @@ -27,7 +27,7 @@ class StartStopActor1 extends Actor { override def postStop(): Unit = println("first stopped") override def receive: Receive = { - case "stop" => context.stop(self) + case "stop" ⇒ context.stop(self) } } @@ -46,7 +46,7 @@ class SupervisingActor extends Actor { val child = context.actorOf(Props[SupervisedActor], "supervised-actor") override def receive: Receive = { - case "failChild" => child ! "fail" + case "failChild" ⇒ child ! "fail" } } @@ -55,7 +55,7 @@ class SupervisedActor extends Actor { override def postStop(): Unit = println("supervised actor stopped") override def receive: Receive = { - case "fail" => + case "fail" ⇒ println("supervised actor fails now") throw new Exception("I failed!") } diff --git a/akka-docs/src/test/scala/tutorial_3/Device.scala b/akka-docs/src/test/scala/tutorial_3/Device.scala index 577406002f..7de6f4c7d9 100644 --- a/akka-docs/src/test/scala/tutorial_3/Device.scala +++ b/akka-docs/src/test/scala/tutorial_3/Device.scala @@ -24,12 +24,12 @@ class Device(groupId: String, deviceId: String) extends Actor with ActorLogging override def postStop(): Unit = log.info("Device actor {}-{} stopped", groupId, deviceId) override def receive: Receive = { - case RecordTemperature(id, value) => + case RecordTemperature(id, value) ⇒ log.info("Recorded temperature reading {} with {}", value, id) lastTemperatureReading = Some(value) sender() ! TemperatureRecorded(id) - case ReadTemperature(id) => + case ReadTemperature(id) ⇒ sender() ! RespondTemperature(id, lastTemperatureReading) } } diff --git a/akka-docs/src/test/scala/tutorial_3/DeviceInProgress.scala b/akka-docs/src/test/scala/tutorial_3/DeviceInProgress.scala index 3cc2377935..cd88c94a58 100644 --- a/akka-docs/src/test/scala/tutorial_3/DeviceInProgress.scala +++ b/akka-docs/src/test/scala/tutorial_3/DeviceInProgress.scala @@ -34,7 +34,7 @@ object DeviceInProgress2 { override def postStop(): Unit = log.info("Device actor {}-{} stopped", groupId, deviceId) override def receive: Receive = { - case ReadTemperature(id) => + case ReadTemperature(id) ⇒ sender() ! RespondTemperature(id, lastTemperatureReading) } diff --git a/akka-docs/src/test/scala/tutorial_4/Device.scala b/akka-docs/src/test/scala/tutorial_4/Device.scala index 640920f91f..892e509fc1 100644 --- a/akka-docs/src/test/scala/tutorial_4/Device.scala +++ b/akka-docs/src/test/scala/tutorial_4/Device.scala @@ -26,21 +26,21 @@ class Device(groupId: String, deviceId: String) extends Actor with ActorLogging override def postStop(): Unit = log.info("Device actor {}-{} stopped", groupId, deviceId) override def receive: Receive = { - case DeviceManager.RequestTrackDevice(`groupId`, `deviceId`) => + case DeviceManager.RequestTrackDevice(`groupId`, `deviceId`) ⇒ sender() ! DeviceManager.DeviceRegistered - case DeviceManager.RequestTrackDevice(groupId, deviceId) => + case DeviceManager.RequestTrackDevice(groupId, deviceId) ⇒ log.warning( "Ignoring TrackDevice request for {}-{}.This actor is responsible for {}-{}.", groupId, deviceId, this.groupId, this.deviceId ) - case RecordTemperature(id, value) => + case RecordTemperature(id, value) ⇒ log.info("Recorded temperature reading {} with {}", value, id) lastTemperatureReading = Some(value) sender() ! TemperatureRecorded(id) - case ReadTemperature(id) => + case ReadTemperature(id) ⇒ sender() ! RespondTemperature(id, lastTemperatureReading) } } diff --git a/akka-docs/src/test/scala/tutorial_4/DeviceGroup.scala b/akka-docs/src/test/scala/tutorial_4/DeviceGroup.scala index 1b4498bee0..15e66a2780 100644 --- a/akka-docs/src/test/scala/tutorial_4/DeviceGroup.scala +++ b/akka-docs/src/test/scala/tutorial_4/DeviceGroup.scala @@ -34,11 +34,11 @@ class DeviceGroup(groupId: String) extends Actor with ActorLogging { override def postStop(): Unit = log.info("DeviceGroup {} stopped", groupId) override def receive: Receive = { - case trackMsg @ RequestTrackDevice(`groupId`, _) => + case trackMsg @ RequestTrackDevice(`groupId`, _) ⇒ deviceIdToActor.get(trackMsg.deviceId) match { - case Some(deviceActor) => + case Some(deviceActor) ⇒ deviceActor forward trackMsg - case None => + case None ⇒ log.info("Creating device actor for {}", trackMsg.deviceId) val deviceActor = context.actorOf(Device.props(groupId, trackMsg.deviceId), s"device-${trackMsg.deviceId}") //#device-group-register @@ -49,7 +49,7 @@ class DeviceGroup(groupId: String) extends Actor with ActorLogging { deviceActor forward trackMsg } - case RequestTrackDevice(groupId, deviceId) => + case RequestTrackDevice(groupId, deviceId) ⇒ log.warning( "Ignoring TrackDevice request for {}. This actor is responsible for {}.", groupId, this.groupId @@ -57,11 +57,11 @@ class DeviceGroup(groupId: String) extends Actor with ActorLogging { //#device-group-register //#device-group-remove - case RequestDeviceList(requestId) => + case RequestDeviceList(requestId) ⇒ sender() ! ReplyDeviceList(requestId, deviceIdToActor.keySet) //#device-group-remove - case Terminated(deviceActor) => + case Terminated(deviceActor) ⇒ val deviceId = actorToDeviceId(deviceActor) log.info("Device actor for {} has been terminated", deviceId) actorToDeviceId -= deviceActor diff --git a/akka-docs/src/test/scala/tutorial_4/DeviceManager.scala b/akka-docs/src/test/scala/tutorial_4/DeviceManager.scala index ab8b0852e6..3624aabd9e 100644 --- a/akka-docs/src/test/scala/tutorial_4/DeviceManager.scala +++ b/akka-docs/src/test/scala/tutorial_4/DeviceManager.scala @@ -26,11 +26,11 @@ class DeviceManager extends Actor with ActorLogging { override def postStop(): Unit = log.info("DeviceManager stopped") override def receive = { - case trackMsg @ RequestTrackDevice(groupId, _) => + case trackMsg @ RequestTrackDevice(groupId, _) ⇒ groupIdToActor.get(groupId) match { - case Some(ref) => + case Some(ref) ⇒ ref forward trackMsg - case None => + case None ⇒ log.info("Creating device group actor for {}", groupId) val groupActor = context.actorOf(DeviceGroup.props(groupId), "group-" + groupId) context.watch(groupActor) @@ -39,7 +39,7 @@ class DeviceManager extends Actor with ActorLogging { actorToGroupId += groupActor -> groupId } - case Terminated(groupActor) => + case Terminated(groupActor) ⇒ val groupId = actorToGroupId(groupActor) log.info("Device group actor for {} has been terminated", groupId) actorToGroupId -= groupActor diff --git a/akka-docs/src/test/scala/tutorial_5/Device.scala b/akka-docs/src/test/scala/tutorial_5/Device.scala index d41c7aef97..af323af655 100644 --- a/akka-docs/src/test/scala/tutorial_5/Device.scala +++ b/akka-docs/src/test/scala/tutorial_5/Device.scala @@ -26,21 +26,21 @@ class Device(groupId: String, deviceId: String) extends Actor with ActorLogging override def postStop(): Unit = log.info("Device actor {}-{} stopped", groupId, deviceId) override def receive: Receive = { - case DeviceManager.RequestTrackDevice(`groupId`, `deviceId`) => + case DeviceManager.RequestTrackDevice(`groupId`, `deviceId`) ⇒ sender() ! DeviceManager.DeviceRegistered - case DeviceManager.RequestTrackDevice(groupId, deviceId) => + case DeviceManager.RequestTrackDevice(groupId, deviceId) ⇒ log.warning( "Ignoring TrackDevice request for {}-{}.This actor is responsible for {}-{}.", groupId, deviceId, this.groupId, this.deviceId ) - case RecordTemperature(id, value) => + case RecordTemperature(id, value) ⇒ log.info("Recorded temperature reading {} with {}", value, id) lastTemperatureReading = Some(value) sender() ! TemperatureRecorded(id) - case ReadTemperature(id) => + case ReadTemperature(id) ⇒ sender() ! RespondTemperature(id, lastTemperatureReading) } } diff --git a/akka-docs/src/test/scala/tutorial_5/DeviceGroup.scala b/akka-docs/src/test/scala/tutorial_5/DeviceGroup.scala index 02ea60141c..151ee221fe 100644 --- a/akka-docs/src/test/scala/tutorial_5/DeviceGroup.scala +++ b/akka-docs/src/test/scala/tutorial_5/DeviceGroup.scala @@ -39,11 +39,11 @@ class DeviceGroup(groupId: String) extends Actor with ActorLogging { override def receive: Receive = { //#query-added - case trackMsg @ RequestTrackDevice(`groupId`, _) => + case trackMsg @ RequestTrackDevice(`groupId`, _) ⇒ deviceIdToActor.get(trackMsg.deviceId) match { - case Some(ref) => + case Some(ref) ⇒ ref forward trackMsg - case None => + case None ⇒ log.info("Creating device actor for {}", trackMsg.deviceId) val deviceActor = context.actorOf(Device.props(groupId, trackMsg.deviceId), "device-" + trackMsg.deviceId) context.watch(deviceActor) @@ -52,16 +52,16 @@ class DeviceGroup(groupId: String) extends Actor with ActorLogging { actorToDeviceId += deviceActor -> trackMsg.deviceId } - case RequestTrackDevice(groupId, deviceId) => + case RequestTrackDevice(groupId, deviceId) ⇒ log.warning( "Ignoring TrackDevice request for {}. This actor is responsible for {}.", groupId, this.groupId ) - case RequestDeviceList(requestId) => + case RequestDeviceList(requestId) ⇒ sender() ! ReplyDeviceList(requestId, deviceIdToActor.keySet) - case Terminated(deviceActor) => + case Terminated(deviceActor) ⇒ val deviceId = actorToDeviceId(deviceActor) log.info("Device actor for {} has been terminated", deviceId) actorToDeviceId -= deviceActor @@ -70,7 +70,7 @@ class DeviceGroup(groupId: String) extends Actor with ActorLogging { //#query-added // ... other cases omitted - case RequestAllTemperatures(requestId) => + case RequestAllTemperatures(requestId) ⇒ context.actorOf(DeviceGroupQuery.props( actorToDeviceId = actorToDeviceId, requestId = requestId, diff --git a/akka-docs/src/test/scala/tutorial_5/DeviceGroupQuery.scala b/akka-docs/src/test/scala/tutorial_5/DeviceGroupQuery.scala index 7ca1f4ccc8..4bc1ad452e 100644 --- a/akka-docs/src/test/scala/tutorial_5/DeviceGroupQuery.scala +++ b/akka-docs/src/test/scala/tutorial_5/DeviceGroupQuery.scala @@ -33,7 +33,7 @@ class DeviceGroupQuery( val queryTimeoutTimer = context.system.scheduler.scheduleOnce(timeout, self, CollectionTimeout) override def preStart(): Unit = { - actorToDeviceId.keysIterator.foreach { deviceActor => + actorToDeviceId.keysIterator.foreach { deviceActor ⇒ context.watch(deviceActor) deviceActor ! Device.ReadTemperature(0) } @@ -55,20 +55,20 @@ class DeviceGroupQuery( repliesSoFar: Map[String, DeviceGroup.TemperatureReading], stillWaiting: Set[ActorRef] ): Receive = { - case Device.RespondTemperature(0, valueOption) => + case Device.RespondTemperature(0, valueOption) ⇒ val deviceActor = sender() val reading = valueOption match { - case Some(value) => DeviceGroup.Temperature(value) - case None => DeviceGroup.TemperatureNotAvailable + case Some(value) ⇒ DeviceGroup.Temperature(value) + case None ⇒ DeviceGroup.TemperatureNotAvailable } receivedResponse(deviceActor, reading, stillWaiting, repliesSoFar) - case Terminated(deviceActor) => + case Terminated(deviceActor) ⇒ receivedResponse(deviceActor, DeviceGroup.DeviceNotAvailable, stillWaiting, repliesSoFar) - case CollectionTimeout => + case CollectionTimeout ⇒ val timedOutReplies = - stillWaiting.map { deviceActor => + stillWaiting.map { deviceActor ⇒ val deviceId = actorToDeviceId(deviceActor) deviceId -> DeviceGroup.DeviceTimedOut } diff --git a/akka-docs/src/test/scala/tutorial_5/DeviceManager.scala b/akka-docs/src/test/scala/tutorial_5/DeviceManager.scala index 66023d5595..9d24ace6d4 100644 --- a/akka-docs/src/test/scala/tutorial_5/DeviceManager.scala +++ b/akka-docs/src/test/scala/tutorial_5/DeviceManager.scala @@ -23,11 +23,11 @@ class DeviceManager extends Actor with ActorLogging { override def postStop(): Unit = log.info("DeviceManager stopped") override def receive = { - case trackMsg @ RequestTrackDevice(groupId, _) => + case trackMsg @ RequestTrackDevice(groupId, _) ⇒ groupIdToActor.get(groupId) match { - case Some(ref) => + case Some(ref) ⇒ ref forward trackMsg - case None => + case None ⇒ log.info("Creating device group actor for {}", groupId) val groupActor = context.actorOf(DeviceGroup.props(groupId), "group-" + groupId) context.watch(groupActor) @@ -36,7 +36,7 @@ class DeviceManager extends Actor with ActorLogging { actorToGroupId += groupActor -> groupId } - case Terminated(groupActor) => + case Terminated(groupActor) ⇒ val groupId = actorToGroupId(groupActor) log.info("Device group actor for {} has been terminated", groupId) actorToGroupId -= groupActor diff --git a/akka-docs/src/test/scala/tutorial_6/Device.scala b/akka-docs/src/test/scala/tutorial_6/Device.scala index 2e0db3cd30..0f275a8993 100644 --- a/akka-docs/src/test/scala/tutorial_6/Device.scala +++ b/akka-docs/src/test/scala/tutorial_6/Device.scala @@ -26,21 +26,21 @@ class Device(groupId: String, deviceId: String) extends Actor with ActorLogging override def postStop(): Unit = log.info("Device actor {}-{} stopped", groupId, deviceId) override def receive: Receive = { - case DeviceManager.RequestTrackDevice(`groupId`, `deviceId`) => + case DeviceManager.RequestTrackDevice(`groupId`, `deviceId`) ⇒ sender() ! DeviceManager.DeviceRegistered - case DeviceManager.RequestTrackDevice(groupId, deviceId) => + case DeviceManager.RequestTrackDevice(groupId, deviceId) ⇒ log.warning( "Ignoring TrackDevice request for {}-{}.This actor is responsible for {}-{}.", groupId, deviceId, this.groupId, this.deviceId ) - case RecordTemperature(id, value) => + case RecordTemperature(id, value) ⇒ log.info("Recorded temperature reading {} with {}", value, id) lastTemperatureReading = Some(value) sender() ! TemperatureRecorded(id) - case ReadTemperature(id) => + case ReadTemperature(id) ⇒ sender() ! RespondTemperature(id, lastTemperatureReading) } } diff --git a/akka-docs/src/test/scala/tutorial_6/DeviceGroup.scala b/akka-docs/src/test/scala/tutorial_6/DeviceGroup.scala index 30f2ac7f6e..f99e764000 100644 --- a/akka-docs/src/test/scala/tutorial_6/DeviceGroup.scala +++ b/akka-docs/src/test/scala/tutorial_6/DeviceGroup.scala @@ -36,11 +36,11 @@ class DeviceGroup(groupId: String) extends Actor with ActorLogging { override def receive: Receive = { // Note the backticks - case trackMsg @ RequestTrackDevice(`groupId`, _) => + case trackMsg @ RequestTrackDevice(`groupId`, _) ⇒ deviceIdToActor.get(trackMsg.deviceId) match { - case Some(ref) => + case Some(ref) ⇒ ref forward trackMsg - case None => + case None ⇒ log.info("Creating device actor for {}", trackMsg.deviceId) val deviceActor = context.actorOf(Device.props(groupId, trackMsg.deviceId), "device-" + trackMsg.deviceId) context.watch(deviceActor) @@ -49,22 +49,22 @@ class DeviceGroup(groupId: String) extends Actor with ActorLogging { actorToDeviceId += deviceActor -> trackMsg.deviceId } - case RequestTrackDevice(groupId, deviceId) => + case RequestTrackDevice(groupId, deviceId) ⇒ log.warning( "Ignoring TrackDevice request for {}. This actor is responsible for {}.", groupId, this.groupId ) - case RequestDeviceList(requestId) => + case RequestDeviceList(requestId) ⇒ sender() ! ReplyDeviceList(requestId, deviceIdToActor.keySet) - case Terminated(deviceActor) => + case Terminated(deviceActor) ⇒ val deviceId = actorToDeviceId(deviceActor) log.info("Device actor for {} has been terminated", deviceId) actorToDeviceId -= deviceActor deviceIdToActor -= deviceId - case RequestAllTemperatures(requestId) => + case RequestAllTemperatures(requestId) ⇒ context.actorOf(DeviceGroupQuery.props( actorToDeviceId = actorToDeviceId, requestId = requestId, diff --git a/akka-docs/src/test/scala/tutorial_6/DeviceGroupQuery.scala b/akka-docs/src/test/scala/tutorial_6/DeviceGroupQuery.scala index 5bf011acbc..266648d2aa 100644 --- a/akka-docs/src/test/scala/tutorial_6/DeviceGroupQuery.scala +++ b/akka-docs/src/test/scala/tutorial_6/DeviceGroupQuery.scala @@ -33,7 +33,7 @@ class DeviceGroupQuery( val queryTimeoutTimer = context.system.scheduler.scheduleOnce(timeout, self, CollectionTimeout) override def preStart(): Unit = { - actorToDeviceId.keysIterator.foreach { deviceActor => + actorToDeviceId.keysIterator.foreach { deviceActor ⇒ context.watch(deviceActor) deviceActor ! Device.ReadTemperature(0) } @@ -54,22 +54,22 @@ class DeviceGroupQuery( repliesSoFar: Map[String, DeviceGroup.TemperatureReading], stillWaiting: Set[ActorRef] ): Receive = { - case Device.RespondTemperature(0, valueOption) => + case Device.RespondTemperature(0, valueOption) ⇒ val deviceActor = sender() val reading = valueOption match { - case Some(value) => DeviceGroup.Temperature(value) - case None => DeviceGroup.TemperatureNotAvailable + case Some(value) ⇒ DeviceGroup.Temperature(value) + case None ⇒ DeviceGroup.TemperatureNotAvailable } receivedResponse(deviceActor, reading, stillWaiting, repliesSoFar) - case Terminated(deviceActor) => + case Terminated(deviceActor) ⇒ if (stillWaiting.contains(deviceActor)) receivedResponse(deviceActor, DeviceGroup.DeviceNotAvailable, stillWaiting, repliesSoFar) // else ignore - case CollectionTimeout => + case CollectionTimeout ⇒ val timedOutReplies = - stillWaiting.map { deviceActor => + stillWaiting.map { deviceActor ⇒ val deviceId = actorToDeviceId(deviceActor) deviceId -> DeviceGroup.DeviceTimedOut } diff --git a/akka-docs/src/test/scala/tutorial_6/DeviceManager.scala b/akka-docs/src/test/scala/tutorial_6/DeviceManager.scala index fa0ce9edae..ea9942f752 100644 --- a/akka-docs/src/test/scala/tutorial_6/DeviceManager.scala +++ b/akka-docs/src/test/scala/tutorial_6/DeviceManager.scala @@ -23,11 +23,11 @@ class DeviceManager extends Actor with ActorLogging { override def postStop(): Unit = log.info("DeviceManager stopped") override def receive = { - case trackMsg @ RequestTrackDevice(groupId, _) => + case trackMsg @ RequestTrackDevice(groupId, _) ⇒ groupIdToActor.get(groupId) match { - case Some(ref) => + case Some(ref) ⇒ ref forward trackMsg - case None => + case None ⇒ log.info("Creating device group actor for {}", groupId) val groupActor = context.actorOf(DeviceGroup.props(groupId), "group-" + groupId) context.watch(groupActor) @@ -36,7 +36,7 @@ class DeviceManager extends Actor with ActorLogging { actorToGroupId += groupActor -> groupId } - case Terminated(groupActor) => + case Terminated(groupActor) ⇒ val groupId = actorToGroupId(groupActor) log.info("Device group actor for {} has been terminated", groupId) actorToGroupId -= groupActor diff --git a/akka-osgi/src/test/scala/docs/osgi/Activator.scala b/akka-osgi/src/test/scala/docs/osgi/Activator.scala index 4f432452c3..d2859cc380 100644 --- a/akka-osgi/src/test/scala/docs/osgi/Activator.scala +++ b/akka-osgi/src/test/scala/docs/osgi/Activator.scala @@ -22,4 +22,4 @@ class Activator extends ActorSystemActivator { } } -//#Activator \ No newline at end of file +//#Activator diff --git a/akka-persistence-tck/src/test/scala/akka/persistence/snapshot/local/LocalSnapshotStoreSpec.scala b/akka-persistence-tck/src/test/scala/akka/persistence/snapshot/local/LocalSnapshotStoreSpec.scala index 998696e693..db5991db04 100644 --- a/akka-persistence-tck/src/test/scala/akka/persistence/snapshot/local/LocalSnapshotStoreSpec.scala +++ b/akka-persistence-tck/src/test/scala/akka/persistence/snapshot/local/LocalSnapshotStoreSpec.scala @@ -12,4 +12,4 @@ class LocalSnapshotStoreSpec extends SnapshotStoreSpec( akka.persistence.snapshot-store.plugin = "akka.persistence.snapshot-store.local" akka.persistence.snapshot-store.local.dir = "target/snapshots" """)) - with PluginCleanup \ No newline at end of file + with PluginCleanup diff --git a/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala b/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala index 6b2e3010ef..488f6a0800 100644 --- a/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala +++ b/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala @@ -190,16 +190,16 @@ private[akka] class RemoteActorRefProvider( val internals = Internals( remoteDaemon = { - val d = new RemoteSystemDaemon( - system, - local.rootPath / "remote", - rootGuardian, - remotingTerminator, - _log, - untrustedMode = remoteSettings.UntrustedMode) - local.registerExtraNames(Map(("remote", d))) - d - }, + val d = new RemoteSystemDaemon( + system, + local.rootPath / "remote", + rootGuardian, + remotingTerminator, + _log, + untrustedMode = remoteSettings.UntrustedMode) + local.registerExtraNames(Map(("remote", d))) + d + }, serialization = SerializationExtension(system), transport = if (remoteSettings.Artery.Enabled) new ArteryTransport(system, this) else new Remoting(system, this)) diff --git a/akka-remote/src/main/scala/akka/remote/security/provider/AES128CounterSecureRNG.scala b/akka-remote/src/main/scala/akka/remote/security/provider/AES128CounterSecureRNG.scala index 2fe57d6a09..06a9426ca7 100644 --- a/akka-remote/src/main/scala/akka/remote/security/provider/AES128CounterSecureRNG.scala +++ b/akka-remote/src/main/scala/akka/remote/security/provider/AES128CounterSecureRNG.scala @@ -41,4 +41,4 @@ class AES128CounterSecureRNG extends java.security.SecureRandomSpi { * @return the seed bytes. */ override protected def engineGenerateSeed(numBytes: Int): Array[Byte] = entropySource.generateSeed(numBytes) -} \ No newline at end of file +} diff --git a/akka-remote/src/main/scala/akka/remote/security/provider/AES256CounterSecureRNG.scala b/akka-remote/src/main/scala/akka/remote/security/provider/AES256CounterSecureRNG.scala index 37b2be391d..d4b6afcf85 100644 --- a/akka-remote/src/main/scala/akka/remote/security/provider/AES256CounterSecureRNG.scala +++ b/akka-remote/src/main/scala/akka/remote/security/provider/AES256CounterSecureRNG.scala @@ -41,4 +41,4 @@ class AES256CounterSecureRNG extends java.security.SecureRandomSpi { * @return the seed bytes. */ override protected def engineGenerateSeed(numBytes: Int): Array[Byte] = entropySource.generateSeed(numBytes) -} \ No newline at end of file +} diff --git a/akka-remote/src/main/scala/akka/remote/serialization/ArteryMessageSerializer.scala b/akka-remote/src/main/scala/akka/remote/serialization/ArteryMessageSerializer.scala index 20d03ac94d..0df9009edb 100644 --- a/akka-remote/src/main/scala/akka/remote/serialization/ArteryMessageSerializer.scala +++ b/akka-remote/src/main/scala/akka/remote/serialization/ArteryMessageSerializer.scala @@ -179,9 +179,9 @@ private[akka] final class ArteryMessageSerializer(val system: ExtendedActorSyste SystemMessageDelivery.SystemMessageEnvelope( serialization.deserialize( - protoEnv.getMessage.toByteArray, - protoEnv.getSerializerId, - if (protoEnv.hasMessageManifest) protoEnv.getMessageManifest.toStringUtf8 else "").get, + protoEnv.getMessage.toByteArray, + protoEnv.getSerializerId, + if (protoEnv.hasMessageManifest) protoEnv.getMessageManifest.toStringUtf8 else "").get, protoEnv.getSeqNo, deserializeUniqueAddress(protoEnv.getAckReplyTo)) } diff --git a/akka-remote/src/main/scala/akka/remote/serialization/MiscMessageSerializer.scala b/akka-remote/src/main/scala/akka/remote/serialization/MiscMessageSerializer.scala index c78bbd9164..0065dd455c 100644 --- a/akka-remote/src/main/scala/akka/remote/serialization/MiscMessageSerializer.scala +++ b/akka-remote/src/main/scala/akka/remote/serialization/MiscMessageSerializer.scala @@ -436,8 +436,8 @@ class MiscMessageSerializer(val system: ExtendedActorSystem) extends SerializerW BroadcastPool( nrOfInstances = bp.getNrOfInstances, resizer = - if (bp.hasResizer) Some(payloadSupport.deserializePayload(bp.getResizer).asInstanceOf[Resizer]) - else None, + if (bp.hasResizer) Some(payloadSupport.deserializePayload(bp.getResizer).asInstanceOf[Resizer]) + else None, routerDispatcher = if (bp.hasRouterDispatcher) bp.getRouterDispatcher else Dispatchers.DefaultDispatcherId, usePoolDispatcher = bp.getUsePoolDispatcher ) @@ -448,8 +448,8 @@ class MiscMessageSerializer(val system: ExtendedActorSystem) extends SerializerW RandomPool( nrOfInstances = rp.getNrOfInstances, resizer = - if (rp.hasResizer) Some(payloadSupport.deserializePayload(rp.getResizer).asInstanceOf[Resizer]) - else None, + if (rp.hasResizer) Some(payloadSupport.deserializePayload(rp.getResizer).asInstanceOf[Resizer]) + else None, routerDispatcher = if (rp.hasRouterDispatcher) rp.getRouterDispatcher else Dispatchers.DefaultDispatcherId, usePoolDispatcher = rp.getUsePoolDispatcher ) @@ -460,8 +460,8 @@ class MiscMessageSerializer(val system: ExtendedActorSystem) extends SerializerW RoundRobinPool( nrOfInstances = rp.getNrOfInstances, resizer = - if (rp.hasResizer) Some(payloadSupport.deserializePayload(rp.getResizer).asInstanceOf[Resizer]) - else None, + if (rp.hasResizer) Some(payloadSupport.deserializePayload(rp.getResizer).asInstanceOf[Resizer]) + else None, routerDispatcher = if (rp.hasRouterDispatcher) rp.getRouterDispatcher else Dispatchers.DefaultDispatcherId, usePoolDispatcher = rp.getUsePoolDispatcher ) @@ -472,8 +472,8 @@ class MiscMessageSerializer(val system: ExtendedActorSystem) extends SerializerW ScatterGatherFirstCompletedPool( nrOfInstances = sgp.getGeneric.getNrOfInstances, resizer = - if (sgp.getGeneric.hasResizer) Some(payloadSupport.deserializePayload(sgp.getGeneric.getResizer).asInstanceOf[Resizer]) - else None, + if (sgp.getGeneric.hasResizer) Some(payloadSupport.deserializePayload(sgp.getGeneric.getResizer).asInstanceOf[Resizer]) + else None, within = deserializeFiniteDuration(sgp.getWithin), routerDispatcher = if (sgp.getGeneric.hasRouterDispatcher) sgp.getGeneric.getRouterDispatcher @@ -486,8 +486,8 @@ class MiscMessageSerializer(val system: ExtendedActorSystem) extends SerializerW TailChoppingPool( nrOfInstances = tcp.getGeneric.getNrOfInstances, resizer = - if (tcp.getGeneric.hasResizer) Some(payloadSupport.deserializePayload(tcp.getGeneric.getResizer).asInstanceOf[Resizer]) - else None, + if (tcp.getGeneric.hasResizer) Some(payloadSupport.deserializePayload(tcp.getGeneric.getResizer).asInstanceOf[Resizer]) + else None, routerDispatcher = if (tcp.getGeneric.hasRouterDispatcher) tcp.getGeneric.getRouterDispatcher else Dispatchers.DefaultDispatcherId, usePoolDispatcher = tcp.getGeneric.getUsePoolDispatcher, within = deserializeFiniteDuration(tcp.getWithin), diff --git a/akka-remote/src/main/scala/akka/remote/transport/TestTransport.scala b/akka-remote/src/main/scala/akka/remote/transport/TestTransport.scala index ebc0fbe673..ea49118de2 100644 --- a/akka-remote/src/main/scala/akka/remote/transport/TestTransport.scala +++ b/akka-remote/src/main/scala/akka/remote/transport/TestTransport.scala @@ -141,12 +141,12 @@ class TestTransport( */ val writeBehavior = new SwitchableLoggedBehavior[(TestAssociationHandle, ByteString), Boolean]( defaultBehavior = { - defaultWrite _ - }, + defaultWrite _ + }, logCallback = { - case (handle, payload) ⇒ - registry.logActivity(WriteAttempt(handle.localAddress, handle.remoteAddress, payload)) - }) + case (handle, payload) ⇒ + registry.logActivity(WriteAttempt(handle.localAddress, handle.remoteAddress, payload)) + }) /** * The [[akka.remote.transport.TestTransport.SwitchableLoggedBehavior]] for the disassociate() method on handles. All @@ -154,12 +154,12 @@ class TestTransport( */ val disassociateBehavior = new SwitchableLoggedBehavior[TestAssociationHandle, Unit]( defaultBehavior = { - defaultDisassociate _ - }, + defaultDisassociate _ + }, logCallback = { - (handle) ⇒ - registry.logActivity(DisassociateAttempt(handle.localAddress, handle.remoteAddress)) - }) + (handle) ⇒ + registry.logActivity(DisassociateAttempt(handle.localAddress, handle.remoteAddress)) + }) private[akka] def write(handle: TestAssociationHandle, payload: ByteString): Boolean = Await.result(writeBehavior((handle, payload)), 3.seconds) diff --git a/akka-remote/src/main/scala/akka/remote/transport/netty/NettyTransport.scala b/akka-remote/src/main/scala/akka/remote/transport/netty/NettyTransport.scala index 421db500d3..5616e90b5c 100644 --- a/akka-remote/src/main/scala/akka/remote/transport/netty/NettyTransport.scala +++ b/akka-remote/src/main/scala/akka/remote/transport/netty/NettyTransport.scala @@ -207,7 +207,7 @@ private[netty] abstract class ServerHandler( listener ⇒ val remoteAddress = NettyTransport.addressFromSocketAddress(remoteSocketAddress, transport.schemeIdentifier, transport.system.name, hostName = None, port = None).getOrElse( - throw new NettyTransportException(s"Unknown inbound remote address type [${remoteSocketAddress.getClass.getName}]")) + throw new NettyTransportException(s"Unknown inbound remote address type [${remoteSocketAddress.getClass.getName}]")) init(channel, remoteSocketAddress, remoteAddress, msg) { listener notify InboundAssociation(_) } } } diff --git a/akka-remote/src/test/scala/akka/remote/RemoteActorMailboxSpec.scala b/akka-remote/src/test/scala/akka/remote/RemoteActorMailboxSpec.scala index 9c98860c44..0470b4b4fe 100644 --- a/akka-remote/src/test/scala/akka/remote/RemoteActorMailboxSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/RemoteActorMailboxSpec.scala @@ -7,4 +7,4 @@ class RemoteActorMailboxSpec extends ActorMailboxSpec( ConfigFactory.parseString("""akka.actor.provider = remote"""). withFallback(ActorMailboxSpec.mailboxConf)) { -} \ No newline at end of file +} diff --git a/akka-remote/src/test/scala/akka/remote/RemoteConsistentHashingRouterSpec.scala b/akka-remote/src/test/scala/akka/remote/RemoteConsistentHashingRouterSpec.scala index 4bb4566877..118058eed6 100644 --- a/akka-remote/src/test/scala/akka/remote/RemoteConsistentHashingRouterSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/RemoteConsistentHashingRouterSpec.scala @@ -15,7 +15,7 @@ class RemoteConsistentHashingRouterSpec extends AkkaSpec(""" "ConsistentHashingGroup" must { "use same hash ring indepenent of self address" in { - // simulating running router on two different nodes (a1, a2) with target routees on 3 other nodes (s1, s2, s3) + // simulating running router on two different nodes (a1, a2) with target routees on 3 other nodes (s1, s2, s3) val a1 = Address("akka.tcp", "Sys", "client1", 2552) val a2 = Address("akka.tcp", "Sys", "client2", 2552) val s1 = ActorSelectionRoutee(system.actorSelection("akka.tcp://Sys@server1:2552/user/a/b")) diff --git a/akka-remote/src/test/scala/akka/remote/RemoteDeathWatchSpec.scala b/akka-remote/src/test/scala/akka/remote/RemoteDeathWatchSpec.scala index 0ab356ebac..c31c944932 100644 --- a/akka-remote/src/test/scala/akka/remote/RemoteDeathWatchSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/RemoteDeathWatchSpec.scala @@ -62,7 +62,7 @@ akka { }).withDeploy(Deploy.local)) expectMsg(20.seconds, ref) - // we don't expect real quarantine when the UID is unknown, i.e. QuarantinedEvent is not published + // we don't expect real quarantine when the UID is unknown, i.e. QuarantinedEvent is not published probe.expectNoMsg(3.seconds) // The following verifies ticket #3870, i.e. make sure that re-delivery of Watch message is stopped. // It was observed as periodic logging of "address is now gated" when the gate was lifted. diff --git a/akka-remote/src/test/scala/akka/remote/artery/ArteryMultiNodeSpec.scala b/akka-remote/src/test/scala/akka/remote/artery/ArteryMultiNodeSpec.scala index bf547fa957..68ad68c57e 100644 --- a/akka-remote/src/test/scala/akka/remote/artery/ArteryMultiNodeSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/artery/ArteryMultiNodeSpec.scala @@ -41,8 +41,8 @@ abstract class ArteryMultiNodeSpec(config: Config) extends AkkaSpec(config.withF ArterySpecSupport.newFlightRecorderConfig.withFallback(extraConfig.fold( localSystem.settings.config )( - str ⇒ ConfigFactory.parseString(str).withFallback(localSystem.settings.config) - )) + str ⇒ ConfigFactory.parseString(str).withFallback(localSystem.settings.config) + )) val remoteSystem = ActorSystem(name.getOrElse(nextGeneratedSystemName), config) remoteSystems = remoteSystems :+ remoteSystem diff --git a/akka-remote/src/test/scala/akka/remote/artery/compress/HeavyHittersSpec.scala b/akka-remote/src/test/scala/akka/remote/artery/compress/HeavyHittersSpec.scala index 6eeb496c20..42496e6437 100644 --- a/akka-remote/src/test/scala/akka/remote/artery/compress/HeavyHittersSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/artery/compress/HeavyHittersSpec.scala @@ -117,8 +117,8 @@ class HeavyHittersSpec extends WordSpecLike with Matchers { val hitters = new TopHeavyHitters[String](2) hitters.update("A", 1) should ===(true) hitters.update("B", 2) should ===(true) - hitters.update("C", 3) should ===(true) // A was dropped now - hitters.update("A", 10) should ===(true) // TODO this is technically unexpected, we have already compressed A... + hitters.update("C", 3) should ===(true) // A was dropped now + hitters.update("A", 10) should ===(true) // TODO this is technically unexpected, we have already compressed A... } "allow updating entries that have lower weight than the least known weight if there is capacity anyway" in { diff --git a/akka-remote/src/test/scala/akka/remote/transport/GenericTransportSpec.scala b/akka-remote/src/test/scala/akka/remote/transport/GenericTransportSpec.scala index 61da926b3f..7ff1e445dc 100644 --- a/akka-remote/src/test/scala/akka/remote/transport/GenericTransportSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/transport/GenericTransportSpec.scala @@ -161,4 +161,4 @@ abstract class GenericTransportSpec(withAkkaProtocol: Boolean = false) } } -} \ No newline at end of file +} diff --git a/akka-stream-tests-tck/src/test/scala-jdk9-only/akka/stream/tck/IterablePublisherViaJavaFlowPublisherTest.scala b/akka-stream-tests-tck/src/test/scala-jdk9-only/akka/stream/tck/IterablePublisherViaJavaFlowPublisherTest.scala index f2a290e45c..87f898ccd8 100644 --- a/akka-stream-tests-tck/src/test/scala-jdk9-only/akka/stream/tck/IterablePublisherViaJavaFlowPublisherTest.scala +++ b/akka-stream-tests-tck/src/test/scala-jdk9-only/akka/stream/tck/IterablePublisherViaJavaFlowPublisherTest.scala @@ -3,7 +3,7 @@ */ package akka.stream.tck -import java.util.concurrent.{ Flow => JavaFlow } +import java.util.concurrent.{ Flow ⇒ JavaFlow } import akka.NotUsed import akka.stream.scaladsl.{ Flow, JavaFlowSupport, Sink, Source } @@ -14,9 +14,8 @@ class IterablePublisherViaJavaFlowPublisherTest extends AkkaPublisherVerificatio override def createPublisher(elements: Long): Publisher[Int] = { val sourceViaJavaFlowPublisher: JavaFlow.Publisher[Int] = Source(iterable(elements)) .runWith(JavaFlowSupport.Sink.asPublisher(fanout = false)) - - val javaFlowPublisherIntoAkkaSource: Source[Int, NotUsed] = + val javaFlowPublisherIntoAkkaSource: Source[Int, NotUsed] = JavaFlowSupport.Source.fromPublisher(sourceViaJavaFlowPublisher) javaFlowPublisherIntoAkkaSource diff --git a/akka-stream-tests/src/test/scala-jdk9-only/akka/stream/scaladsl/FlowPublisherSinkSpec.scala b/akka-stream-tests/src/test/scala-jdk9-only/akka/stream/scaladsl/FlowPublisherSinkSpec.scala index 1b4285349a..816ecbc3b1 100644 --- a/akka-stream-tests/src/test/scala-jdk9-only/akka/stream/scaladsl/FlowPublisherSinkSpec.scala +++ b/akka-stream-tests/src/test/scala-jdk9-only/akka/stream/scaladsl/FlowPublisherSinkSpec.scala @@ -25,7 +25,7 @@ class FlowPublisherSinkSpec extends StreamSpec { "be able to use Publisher in materialized value transformation" in { val f = Source(1 to 3).runWith( - JavaFlowSupport.Sink.asPublisher[Int](false).mapMaterializedValue { p ⇒ + JavaFlowSupport.Sink.asPublisher[Int](false).mapMaterializedValue { p ⇒ JavaFlowSupport.Source.fromPublisher(p).runFold(0)(_ + _) }) diff --git a/akka-stream-tests/src/test/scala/akka/stream/io/compression/CompressionTestingTools.scala b/akka-stream-tests/src/test/scala/akka/stream/io/compression/CompressionTestingTools.scala index 9ace6259af..30e71e6048 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/io/compression/CompressionTestingTools.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/io/compression/CompressionTestingTools.scala @@ -40,4 +40,4 @@ object CompressionTestingTools { rec(throwable) } } -} \ No newline at end of file +} diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FailedSourceSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FailedSourceSpec.scala index ddf3ac082a..b7254b3c6a 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FailedSourceSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FailedSourceSpec.scala @@ -32,4 +32,4 @@ class FailedSourceSpec extends StreamSpec with DefaultTimeout { } } -} \ No newline at end of file +} diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMapAsyncSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMapAsyncSpec.scala index f2b07dbf42..08ef4a56a5 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMapAsyncSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMapAsyncSpec.scala @@ -139,7 +139,7 @@ class FlowMapAsyncSpec extends StreamSpec { case OnNext("A") ⇒ () // is fine case OnNext("B") ⇒ () // is fine case OnError(ex) if ex.getMessage == "Boom at C" && !gotErrorAlready ⇒ - gotErrorAlready = true // fine, error can over-take elements + gotErrorAlready = true // fine, error can over-take elements } probe.request(100) diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/MaybeSourceSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/MaybeSourceSpec.scala index 6d9cd2b927..d6a0a140cd 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/MaybeSourceSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/MaybeSourceSpec.scala @@ -98,4 +98,4 @@ class MaybeSourceSpec extends StreamSpec with DefaultTimeout { } } -} \ No newline at end of file +} diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SourceSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SourceSpec.scala index c354a908eb..bcdd23da51 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SourceSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SourceSpec.scala @@ -167,11 +167,11 @@ class SourceSpec extends StreamSpec with DefaultTimeout { EventFilter[RuntimeException](message = "expected", occurrences = 1) intercept whenReady( Source.unfold((0, 1)) { - case (a, _) if a > 10000000 ⇒ throw t - case (a, b) ⇒ Some((b, a + b) → a) - }.runFold(List.empty[Int]) { case (xs, x) ⇒ x :: xs }.failed) { - _ should be theSameInstanceAs (t) - } + case (a, _) if a > 10000000 ⇒ throw t + case (a, b) ⇒ Some((b, a + b) → a) + }.runFold(List.empty[Int]) { case (xs, x) ⇒ x :: xs }.failed) { + _ should be theSameInstanceAs (t) + } } "generate a finite fibonacci sequence asynchronously" in { diff --git a/akka-stream/src/main/scala-jdk9-only/akka/stream/impl/JavaFlowAndRsConverters.scala b/akka-stream/src/main/scala-jdk9-only/akka/stream/impl/JavaFlowAndRsConverters.scala index 032dee7187..5a13ebbd4f 100644 --- a/akka-stream/src/main/scala-jdk9-only/akka/stream/impl/JavaFlowAndRsConverters.scala +++ b/akka-stream/src/main/scala-jdk9-only/akka/stream/impl/JavaFlowAndRsConverters.scala @@ -7,17 +7,17 @@ package akka.stream.impl import java.util.concurrent.Flow import akka.annotation.InternalApi -import org.{ reactivestreams => rs } +import org.{ reactivestreams ⇒ rs } import JavaFlowAndRsConverters.Implicits._ /** * INTERNAL API: Provides converters between Reactive Streams (reactive-streams.org) and their Java 9+ counter-parts, * defined in `java.util.concurrent.Flow.*`. This API is internal because Reactive Streams will ship with such - * adapters itself at some point, and we'd not want to duplicate that effort for users to be confused about which ones - * to use. These adapters are used internally by Akka Streams to convert between the standards but you should not touch + * adapters itself at some point, and we'd not want to duplicate that effort for users to be confused about which ones + * to use. These adapters are used internally by Akka Streams to convert between the standards but you should not touch * them directly - use thr `JavaFlowSupport` classes instead. - * - * Please note that either of these types are designed for *inter-op* and usually should not be used directly + * + * Please note that either of these types are designed for *inter-op* and usually should not be used directly * in applications. The intended use case is for shared libraries, like database drivers or similar to provide * the inter-operable types, such that other librarie can co-operate with them directly, if that is your use case * and you're using the j.u.c.Flow types, use the [[akka.stream.scaladsl.JavaFlowSupport]] sources/sinks/flows instead. @@ -67,43 +67,43 @@ private[akka] object JavaFlowAndRsConverters { case delegate ⇒ new RsPublisherToJavaFlowAdapter(delegate) // adapt, it is a real Publisher } final def asRs[T](p: Flow.Publisher[T]): rs.Publisher[T] = p match { - case null ⇒ null // null remains null - case adapter: RsPublisherToJavaFlowAdapter[T] ⇒ adapter.delegate // unwrap adapter instead of wrapping again - case _ => new JavaFlowPublisherToRsAdapter[T](p) - } + case null ⇒ null // null remains null + case adapter: RsPublisherToJavaFlowAdapter[T] ⇒ adapter.delegate // unwrap adapter instead of wrapping again + case _ ⇒ new JavaFlowPublisherToRsAdapter[T](p) + } - final def asJava[T](s: rs.Subscription): Flow.Subscription = s match { - case null ⇒ null // null remains null + final def asJava[T](s: rs.Subscription): Flow.Subscription = s match { + case null ⇒ null // null remains null case adapter: JavaFlowSubscriptionToRsAdapter ⇒ adapter.delegate // unwrap adapter instead of wrapping again - case _ => new RsSubscriptionToJavaFlowAdapter(s) + case _ ⇒ new RsSubscriptionToJavaFlowAdapter(s) } final def asRs[T](s: Flow.Subscription): rs.Subscription = s match { - case null ⇒ null // null remains null + case null ⇒ null // null remains null case adapter: RsSubscriptionToJavaFlowAdapter ⇒ adapter.delegate // unwrap adapter instead of wrapping again - case _ => new JavaFlowSubscriptionToRsAdapter(s) + case _ ⇒ new JavaFlowSubscriptionToRsAdapter(s) } - - final def asJava[T](s: rs.Subscriber[T]): Flow.Subscriber[T] = + + final def asJava[T](s: rs.Subscriber[T]): Flow.Subscriber[T] = s match { - case null ⇒ null // null remains null + case null ⇒ null // null remains null case adapter: JavaFlowSubscriberToRsAdapter[T] ⇒ adapter.delegate // unwrap adapter instead of wrapping again - case _ => new RsSubscriberToJavaFlowAdapter[T](s) + case _ ⇒ new RsSubscriberToJavaFlowAdapter[T](s) } final def asRs[T](s: Flow.Subscriber[T]): rs.Subscriber[T] = s match { - case null ⇒ null // null remains null + case null ⇒ null // null remains null case adapter: RsSubscriberToJavaFlowAdapter[T] ⇒ adapter.delegate // unwrap adapter instead of wrapping again - case _ => new JavaFlowSubscriberToRsAdapter[T](s) + case _ ⇒ new JavaFlowSubscriberToRsAdapter[T](s) } final def asJava[T, R](p: rs.Processor[T, R]): Flow.Processor[T, R] = p match { - case null ⇒ null // null remains null - case adapter: JavaFlowProcessorToRsAdapter[T, R] ⇒ adapter.delegate // unwrap adapter instead of wrapping again - case _ => new RsProcessorToJavaFlowAdapter[T, R](p) - } + case null ⇒ null // null remains null + case adapter: JavaFlowProcessorToRsAdapter[T, R] ⇒ adapter.delegate // unwrap adapter instead of wrapping again + case _ ⇒ new RsProcessorToJavaFlowAdapter[T, R](p) + } final def asRs[T, R](p: Flow.Processor[T, R]): rs.Processor[T, R] = p match { case null ⇒ null // null remains null case adapter: RsProcessorToJavaFlowAdapter[T, R] ⇒ adapter.delegate // unwrap adapter instead of wrapping again - case _ => new JavaFlowProcessorToRsAdapter[T, R](p) + case _ ⇒ new JavaFlowProcessorToRsAdapter[T, R](p) } } diff --git a/akka-stream/src/main/scala/akka/stream/UniformFanOutShape.scala b/akka-stream/src/main/scala/akka/stream/UniformFanOutShape.scala index f791be282e..76e0bb2634 100644 --- a/akka-stream/src/main/scala/akka/stream/UniformFanOutShape.scala +++ b/akka-stream/src/main/scala/akka/stream/UniformFanOutShape.scala @@ -31,4 +31,4 @@ class UniformFanOutShape[-I, +O](n: Int, _init: FanOutShape.Init[I @uncheckedVar // cannot deprecate a lazy val because of genjavadoc problem https://github.com/typesafehub/genjavadoc/issues/85 private lazy val _outArray: Array[Outlet[O @uncheckedVariance]] = outlets.toArray def out(n: Int): Outlet[O @uncheckedVariance] = outlets(n) -} \ No newline at end of file +} diff --git a/akka-stream/src/main/scala/akka/stream/impl/FailedSource.scala b/akka-stream/src/main/scala/akka/stream/impl/FailedSource.scala index 8872d6c22a..2ec88128c1 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/FailedSource.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/FailedSource.scala @@ -28,4 +28,4 @@ import akka.stream.stage.{ GraphStage, GraphStageLogic, OutHandler } } override def toString = s"FailedSource(${failure.getClass.getName})" -} \ No newline at end of file +} diff --git a/akka-stream/src/main/scala/akka/stream/impl/MaybeSource.scala b/akka-stream/src/main/scala/akka/stream/impl/MaybeSource.scala index fe4fdb4a23..17b6393103 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/MaybeSource.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/MaybeSource.scala @@ -81,4 +81,4 @@ import scala.util.Try } override def toString = "MaybeSource" -} \ No newline at end of file +} diff --git a/akka-stream/src/main/scala/akka/stream/impl/TraversalBuilder.scala b/akka-stream/src/main/scala/akka/stream/impl/TraversalBuilder.scala index 58db7cbfd8..b1f1849019 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/TraversalBuilder.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/TraversalBuilder.scala @@ -753,8 +753,8 @@ import scala.collection.immutable.Map.Map1 applyIslandAndAttributes( beforeBuilder.concat( composite - .assign(out, inOffset - composite.offsetOfModule(out)) - .traversal).concat(traversalSoFar)), + .assign(out, inOffset - composite.offsetOfModule(out)) + .traversal).concat(traversalSoFar)), pendingBuilder = OptionVal.None, beforeBuilder = EmptyTraversal) case OptionVal.None ⇒ copy( diff --git a/akka-stream/src/main/scala/akka/stream/impl/io/IOSources.scala b/akka-stream/src/main/scala/akka/stream/impl/io/IOSources.scala index 29db60dadb..d3c7b2378c 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/io/IOSources.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/io/IOSources.scala @@ -171,4 +171,4 @@ private[akka] final class FileSource(path: Path, chunkSize: Int, startPosition: override def withAttributes(attr: Attributes): SourceModule[ByteString, Future[IOResult]] = new InputStreamSource(createInputStream, chunkSize, attr, amendShape(attr)) -} \ No newline at end of file +} diff --git a/akka-stream/src/main/scala/akka/stream/impl/io/TcpStages.scala b/akka-stream/src/main/scala/akka/stream/impl/io/TcpStages.scala index 8551f8f691..f09d6b3d9c 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/io/TcpStages.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/io/TcpStages.scala @@ -391,7 +391,7 @@ private[stream] object ConnectionSourceStage { Flow[ByteString] ) val fromNetTimeout: BidiFlow[ByteString, ByteString, ByteString, ByteString, NotUsed] = - toNetTimeout.reversed // now the bottom flow transforms the exception, the top one doesn't (since that one is "fromNet") + toNetTimeout.reversed // now the bottom flow transforms the exception, the top one doesn't (since that one is "fromNet") fromNetTimeout atop BidiFlow.bidirectionalIdleTimeout[ByteString, ByteString](idleTimeout) atop toNetTimeout } diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/Restart.scala b/akka-stream/src/main/scala/akka/stream/javadsl/Restart.scala index 66d0b0c5e8..42d36bcf53 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/Restart.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/Restart.scala @@ -122,4 +122,4 @@ object RestartFlow { flowFactory.create().asScala }.asJava } -} \ No newline at end of file +} diff --git a/akka-testkit/src/main/scala/akka/testkit/TestActors.scala b/akka-testkit/src/main/scala/akka/testkit/TestActors.scala index 05d8747d6a..a04acbffb8 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestActors.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestActors.scala @@ -24,7 +24,7 @@ object TestActors { */ class BlackholeActor extends Actor { override def receive = { - case _ ⇒ // ignore... + case _ ⇒ // ignore... } } diff --git a/akka-typed/src/main/scala/akka/typed/cluster/internal/MiscMessageSerializer.scala b/akka-typed/src/main/scala/akka/typed/cluster/internal/MiscMessageSerializer.scala index 703f3c3af5..df40150b3c 100644 --- a/akka-typed/src/main/scala/akka/typed/cluster/internal/MiscMessageSerializer.scala +++ b/akka-typed/src/main/scala/akka/typed/cluster/internal/MiscMessageSerializer.scala @@ -29,4 +29,4 @@ class MiscMessageSerializer(val system: akka.actor.ExtendedActorSystem) extends case "a" ⇒ resolver.resolveActorRef(new String(bytes, StandardCharsets.UTF_8)) } -} \ No newline at end of file +} diff --git a/akka-typed/src/main/scala/akka/typed/internal/BehaviorImpl.scala b/akka-typed/src/main/scala/akka/typed/internal/BehaviorImpl.scala index 2e0d9d4812..2cad783a62 100644 --- a/akka-typed/src/main/scala/akka/typed/internal/BehaviorImpl.scala +++ b/akka-typed/src/main/scala/akka/typed/internal/BehaviorImpl.scala @@ -81,13 +81,13 @@ import scala.annotation.tailrec behavior: Behavior[T]): Behavior[T] = { intercept[T, T]( beforeMessage = (ctx, msg) ⇒ { - onMessage(ctx, msg) - msg - }, + onMessage(ctx, msg) + msg + }, beforeSignal = (ctx, sig) ⇒ { - onSignal(ctx, sig) - true - }, + onSignal(ctx, sig) + true + }, afterMessage = (ctx, msg, b) ⇒ b, // TODO optimize by using more ConstantFun afterSignal = (ctx, sig, b) ⇒ b, behavior)(ClassTag(classOf[Any])) diff --git a/akka-typed/src/main/scala/akka/typed/internal/DispatchersImpl.scala b/akka-typed/src/main/scala/akka/typed/internal/DispatchersImpl.scala index 67f4d9d357..6f2ec355be 100644 --- a/akka-typed/src/main/scala/akka/typed/internal/DispatchersImpl.scala +++ b/akka-typed/src/main/scala/akka/typed/internal/DispatchersImpl.scala @@ -189,7 +189,7 @@ class DispatcherConfigurator(config: Config, prerequisites: d.DispatcherPrerequi private val instance = ExecutionContexts.fromExecutorService( configureExecutor().createExecutorServiceFactory(config.getString("id"), prerequisites.threadFactory) - .createExecutorService) + .createExecutorService) /** * Returns the same dispatcher instance for each invocation diff --git a/akka-typed/src/main/scala/akka/typed/internal/ExtensionsImpl.scala b/akka-typed/src/main/scala/akka/typed/internal/ExtensionsImpl.scala index 1f69dc096e..6c7c3e1dba 100644 --- a/akka-typed/src/main/scala/akka/typed/internal/ExtensionsImpl.scala +++ b/akka-typed/src/main/scala/akka/typed/internal/ExtensionsImpl.scala @@ -117,4 +117,4 @@ trait ExtensionsImpl extends Extensions { self: ActorSystem[_] ⇒ case t: Throwable ⇒ throw t //Initialization failed, throw same again case other ⇒ other.asInstanceOf[T] //could be a T or null, in which case we return the null as T } -} \ No newline at end of file +} diff --git a/akka-typed/src/main/scala/akka/typed/internal/TimerSchedulerImpl.scala b/akka-typed/src/main/scala/akka/typed/internal/TimerSchedulerImpl.scala index b5147fdf5b..fb5551fbd8 100644 --- a/akka-typed/src/main/scala/akka/typed/internal/TimerSchedulerImpl.scala +++ b/akka-typed/src/main/scala/akka/typed/internal/TimerSchedulerImpl.scala @@ -132,12 +132,12 @@ import scala.reflect.ClassTag BehaviorImpl.intercept[T, TimerMsg]( beforeMessage = interceptTimerMsg, beforeSignal = (ctx, sig) ⇒ { - sig match { - case PreRestart | PostStop ⇒ cancelAll() - case _ ⇒ // unhandled - } - true - }, + sig match { + case PreRestart | PostStop ⇒ cancelAll() + case _ ⇒ // unhandled + } + true + }, afterMessage = (ctx, msg, b) ⇒ b, // TODO optimize by using more ConstantFun afterSignal = (ctx, sig, b) ⇒ b, behavior)(ClassTag(classOf[TimerSchedulerImpl.TimerMsg])) diff --git a/akka-typed/src/main/scala/akka/typed/javadsl/Ask.scala b/akka-typed/src/main/scala/akka/typed/javadsl/Ask.scala index d615b072b3..006b460fae 100644 --- a/akka-typed/src/main/scala/akka/typed/javadsl/Ask.scala +++ b/akka-typed/src/main/scala/akka/typed/javadsl/Ask.scala @@ -11,4 +11,4 @@ import akka.japi.function.Function object AskPattern { def ask[T, U](actor: ActorRef[T], message: Function[ActorRef[U], T], timeout: Timeout, scheduler: Scheduler): CompletionStage[U] = FutureConverters.toJava[U](actor.?(message.apply)(timeout, scheduler)) -} \ No newline at end of file +} diff --git a/akka-typed/src/main/scala/akka/typed/javadsl/BehaviorBuilder.scala b/akka-typed/src/main/scala/akka/typed/javadsl/BehaviorBuilder.scala index 3c2d22d79c..5f398f285a 100644 --- a/akka-typed/src/main/scala/akka/typed/javadsl/BehaviorBuilder.scala +++ b/akka-typed/src/main/scala/akka/typed/javadsl/BehaviorBuilder.scala @@ -265,4 +265,4 @@ private class BuiltBehavior[T]( unhandled[T] } -} \ No newline at end of file +} diff --git a/akka-typed/src/main/scala/akka/typed/javadsl/ReceiveBuilder.scala b/akka-typed/src/main/scala/akka/typed/javadsl/ReceiveBuilder.scala index 7a3beee314..8da78e9a90 100644 --- a/akka-typed/src/main/scala/akka/typed/javadsl/ReceiveBuilder.scala +++ b/akka-typed/src/main/scala/akka/typed/javadsl/ReceiveBuilder.scala @@ -166,4 +166,4 @@ private class BuiltReceive[T]( Actor.unhandled } -} \ No newline at end of file +} diff --git a/akka-typed/src/main/scala/akka/typed/scaladsl/adapter/AdapterExtension.scala b/akka-typed/src/main/scala/akka/typed/scaladsl/adapter/AdapterExtension.scala index 955d651183..2c54f0265e 100644 --- a/akka-typed/src/main/scala/akka/typed/scaladsl/adapter/AdapterExtension.scala +++ b/akka-typed/src/main/scala/akka/typed/scaladsl/adapter/AdapterExtension.scala @@ -20,4 +20,4 @@ import akka.typed.internal.adapter.ActorSystemAdapter */ @InternalApi object AdapterExtension extends akka.actor.ExtensionId[AdapterExtension] { def createExtension(sys: ExtendedActorSystem): AdapterExtension = new AdapterExtension(sys) -} \ No newline at end of file +} diff --git a/project/AkkaBuild.scala b/project/AkkaBuild.scala index 8a064186ac..1b664e9b05 100644 --- a/project/AkkaBuild.scala +++ b/project/AkkaBuild.scala @@ -4,7 +4,7 @@ package akka -import java.io.{FileInputStream, InputStreamReader} +import java.io.{ FileInputStream, InputStreamReader } import java.util.Properties import akka.TestExtras.JUnitFileReporting @@ -21,31 +21,26 @@ object AkkaBuild { val parallelExecutionByDefault = false // TODO: enable this once we're sure it does not break things lazy val buildSettings = Dependencies.Versions ++ Seq( - organization := "com.typesafe.akka", - version := "2.5-SNAPSHOT" - ) + organization := "com.typesafe.akka", + version := "2.5-SNAPSHOT") lazy val rootSettings = parentSettings ++ Release.settings ++ UnidocRoot.akkaSettings ++ + Formatting.formatSettings ++ Protobuf.settings ++ Seq( - parallelExecution in GlobalScope := System.getProperty("akka.parallelExecution", parallelExecutionByDefault.toString).toBoolean - ) + parallelExecution in GlobalScope := System.getProperty("akka.parallelExecution", parallelExecutionByDefault.toString).toBoolean) val dontPublishSettings = Seq( publishSigned := (), publish := (), publishArtifact in Compile := false, - whitesourceIgnore := true - ) + whitesourceIgnore := true) val dontPublishDocsSettings = Seq( - sources in doc in Compile := List() - ) - + sources in doc in Compile := List()) lazy val parentSettings = Seq( - publishArtifact := false - ) ++ dontPublishSettings + publishArtifact := false) ++ dontPublishSettings lazy val mayChangeSettings = Seq( description := """|This module of Akka is marked as @@ -58,33 +53,31 @@ object AkkaBuild { |refine and simplify based on your feedback. Additionally |such a module may be dropped in major releases |without prior deprecation. - |""".stripMargin - ) + |""".stripMargin) val (mavenLocalResolver, mavenLocalResolverSettings) = System.getProperty("akka.build.M2Dir") match { - case null => (Resolver.mavenLocal, Seq.empty) - case path => + case null ⇒ (Resolver.mavenLocal, Seq.empty) + case path ⇒ // Maven resolver settings val resolver = Resolver.file("user-publish-m2-local", new File(path)) (resolver, Seq( - otherResolvers := resolver:: publishTo.value.toList, - publishM2Configuration := Classpaths.publishConfig(packagedArtifacts.value, None, resolverName = resolver.name, checksums = checksums.in(publishM2).value, logging = ivyLoggingLevel.value, overwrite = true) - )) + otherResolvers := resolver :: publishTo.value.toList, + publishM2Configuration := Classpaths.publishConfig(packagedArtifacts.value, None, resolverName = resolver.name, checksums = checksums.in(publishM2).value, logging = ivyLoggingLevel.value, overwrite = true))) } lazy val resolverSettings = { // should we be allowed to use artifacts published to the local maven repository - if(System.getProperty("akka.build.useLocalMavenResolver", "false").toBoolean) + if (System.getProperty("akka.build.useLocalMavenResolver", "false").toBoolean) Seq(resolvers += mavenLocalResolver) else Seq.empty } ++ { // should we be allowed to use artifacts from sonatype snapshots - if(System.getProperty("akka.build.useSnapshotSonatypeResolver", "false").toBoolean) + if (System.getProperty("akka.build.useSnapshotSonatypeResolver", "false").toBoolean) Seq(resolvers += Resolver.sonatypeRepo("snapshots")) else Seq.empty } ++ Seq( - pomIncludeRepository := (_ => false) // do not leak internal repositories during staging + pomIncludeRepository := (_ ⇒ false) // do not leak internal repositories during staging ) private def allWarnings: Boolean = System.getProperty("akka.allwarnings", "false").toBoolean @@ -92,28 +85,28 @@ object AkkaBuild { lazy val defaultSettings = resolverSettings ++ TestExtras.Filter.settings ++ Protobuf.settings ++ Seq( - // compile options - scalacOptions in Compile ++= Seq("-encoding", "UTF-8", "-target:jvm-1.8", "-feature", "-unchecked", "-Xlog-reflective-calls", "-Xlint"), - scalacOptions in Compile ++= (if (allWarnings) Seq("-deprecation") else Nil), - scalacOptions in Test := (scalacOptions in Test).value.filterNot(opt => - opt == "-Xlog-reflective-calls" || opt.contains("genjavadoc")), - // -XDignore.symbol.file suppresses sun.misc.Unsafe warnings - javacOptions in compile ++= Seq("-encoding", "UTF-8", "-source", "1.8", "-target", "1.8", "-Xlint:unchecked", "-XDignore.symbol.file"), - javacOptions in compile ++= (if (allWarnings) Seq("-Xlint:deprecation") else Nil), - javacOptions in doc ++= Seq(), - incOptions := incOptions.value.withNameHashing(true), + // compile options + scalacOptions in Compile ++= Seq("-encoding", "UTF-8", "-target:jvm-1.8", "-feature", "-unchecked", "-Xlog-reflective-calls", "-Xlint"), + scalacOptions in Compile ++= (if (allWarnings) Seq("-deprecation") else Nil), + scalacOptions in Test := (scalacOptions in Test).value.filterNot(opt ⇒ + opt == "-Xlog-reflective-calls" || opt.contains("genjavadoc")), + // -XDignore.symbol.file suppresses sun.misc.Unsafe warnings + javacOptions in compile ++= Seq("-encoding", "UTF-8", "-source", "1.8", "-target", "1.8", "-Xlint:unchecked", "-XDignore.symbol.file"), + javacOptions in compile ++= (if (allWarnings) Seq("-Xlint:deprecation") else Nil), + javacOptions in doc ++= Seq(), + incOptions := incOptions.value.withNameHashing(true), - crossVersion := CrossVersion.binary, + crossVersion := CrossVersion.binary, - ivyLoggingLevel in ThisBuild := UpdateLogging.Quiet, + ivyLoggingLevel in ThisBuild := UpdateLogging.Quiet, - licenses := Seq(("Apache License, Version 2.0", url("http://www.apache.org/licenses/LICENSE-2.0"))), - homepage := Some(url("http://akka.io/")), + licenses := Seq(("Apache License, Version 2.0", url("http://www.apache.org/licenses/LICENSE-2.0"))), + homepage := Some(url("http://akka.io/")), - apiURL := Some(url(s"http://doc.akka.io/api/akka/${version.value}")), + apiURL := Some(url(s"http://doc.akka.io/api/akka/${version.value}")), - initialCommands := - """|import language.postfixOps + initialCommands := + """|import language.postfixOps |import akka.actor._ |import ActorDSL._ |import scala.concurrent._ @@ -129,87 +122,82 @@ object AkkaBuild { |implicit val timeout = Timeout(5 seconds) |""".stripMargin, - /** - * Test settings - */ - fork in Test := true, + /** + * Test settings + */ + fork in Test := true, - // default JVM config for tests - javaOptions in Test ++= { - val defaults = Seq( - // ## core memory settings - "-XX:+UseG1GC", - // most tests actually don't really use _that_ much memory (>1g usually) - // twice used (and then some) keeps G1GC happy - very few or to no full gcs - "-Xms3g", "-Xmx3g", - // increase stack size (todo why?) - "-Xss2m", + // default JVM config for tests + javaOptions in Test ++= { + val defaults = Seq( + // ## core memory settings + "-XX:+UseG1GC", + // most tests actually don't really use _that_ much memory (>1g usually) + // twice used (and then some) keeps G1GC happy - very few or to no full gcs + "-Xms3g", "-Xmx3g", + // increase stack size (todo why?) + "-Xss2m", - // ## extra memory/gc tuning - // this breaks jstat, but could avoid costly syncs to disc see http://www.evanjones.ca/jvm-mmap-pause.html - "-XX:+PerfDisableSharedMem", - // tell G1GC that we would be really happy if all GC pauses could be kept below this as higher would - // likely start causing test failures in timing tests - "-XX:MaxGCPauseMillis=300", - // nio direct memory limit for artery/aeron (probably) - "-XX:MaxDirectMemorySize=256m", + // ## extra memory/gc tuning + // this breaks jstat, but could avoid costly syncs to disc see http://www.evanjones.ca/jvm-mmap-pause.html + "-XX:+PerfDisableSharedMem", + // tell G1GC that we would be really happy if all GC pauses could be kept below this as higher would + // likely start causing test failures in timing tests + "-XX:MaxGCPauseMillis=300", + // nio direct memory limit for artery/aeron (probably) + "-XX:MaxDirectMemorySize=256m", - // faster random source - "-Djava.security.egd=file:/dev/./urandom" - ) + // faster random source + "-Djava.security.egd=file:/dev/./urandom") - if (sys.props.contains("akka.ci-server")) - defaults ++ Seq("-XX:+PrintGCTimeStamps", "-XX:+PrintGCDetails") - else - defaults - }, + if (sys.props.contains("akka.ci-server")) + defaults ++ Seq("-XX:+PrintGCTimeStamps", "-XX:+PrintGCDetails") + else + defaults + }, + // all system properties passed to sbt prefixed with "akka." will be passed on to the forked jvms as is + javaOptions in Test := { + val base = (javaOptions in Test).value + val akkaSysProps: Seq[String] = + sys.props.filter(_._1.startsWith("akka")) + .map { case (key, value) ⇒ s"-D$key=$value" }(breakOut) - // all system properties passed to sbt prefixed with "akka." will be passed on to the forked jvms as is - javaOptions in Test := { - val base = (javaOptions in Test).value - val akkaSysProps: Seq[String] = - sys.props.filter(_._1.startsWith("akka")) - .map { case (key, value) => s"-D$key=$value" }(breakOut) + base ++ akkaSysProps + }, - base ++ akkaSysProps - }, + // with forked tests the working directory is set to each module's home directory + // rather than the Akka root, some tests depend on Akka root being working dir, so reset + testGrouping in Test := { + val original: Seq[Tests.Group] = (testGrouping in Test).value - // with forked tests the working directory is set to each module's home directory - // rather than the Akka root, some tests depend on Akka root being working dir, so reset - testGrouping in Test := { - val original: Seq[Tests.Group] = (testGrouping in Test).value - - original.map { group => - group.runPolicy match { - case Tests.SubProcess(forkOptions) => - group.copy(runPolicy = Tests.SubProcess(forkOptions.copy( - workingDirectory = Some(new File(System.getProperty("user.dir"))) - ))) - case _ => group + original.map { group ⇒ + group.runPolicy match { + case Tests.SubProcess(forkOptions) ⇒ + group.copy(runPolicy = Tests.SubProcess(forkOptions.copy( + workingDirectory = Some(new File(System.getProperty("user.dir")))))) + case _ ⇒ group + } } - } - }, + }, - parallelExecution in Test := System.getProperty("akka.parallelExecution", parallelExecutionByDefault.toString).toBoolean, - logBuffered in Test := System.getProperty("akka.logBufferedTests", "false").toBoolean, + parallelExecution in Test := System.getProperty("akka.parallelExecution", parallelExecutionByDefault.toString).toBoolean, + logBuffered in Test := System.getProperty("akka.logBufferedTests", "false").toBoolean, - // show full stack traces and test case durations - testOptions in Test += Tests.Argument("-oDF"), + // show full stack traces and test case durations + testOptions in Test += Tests.Argument("-oDF"), - // -v Log "test run started" / "test started" / "test run finished" events on log level "info" instead of "debug". - // -a Show stack traces and exception class name for AssertionErrors. - testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a") - ) ++ - mavenLocalResolverSettings ++ - JUnitFileReporting.settings ++ - docLintingSettings + // -v Log "test run started" / "test started" / "test run finished" events on log level "info" instead of "debug". + // -a Show stack traces and exception class name for AssertionErrors. + testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a")) ++ + mavenLocalResolverSettings ++ + JUnitFileReporting.settings ++ + docLintingSettings lazy val docLintingSettings = Seq( - javacOptions in compile ++= Seq("-Xdoclint:none"), - javacOptions in test ++= Seq("-Xdoclint:none"), - javacOptions in doc ++= Seq("-Xdoclint:none") - ) + javacOptions in compile ++= Seq("-Xdoclint:none"), + javacOptions in test ++= Seq("-Xdoclint:none"), + javacOptions in doc ++= Seq("-Xdoclint:none")) def loadSystemProperties(fileName: String): Unit = { import scala.collection.JavaConverters._ @@ -224,5 +212,5 @@ object AkkaBuild { } } - def majorMinor(version: String): Option[String] ="""\d+\.\d+""".r.findFirstIn(version) + def majorMinor(version: String): Option[String] = """\d+\.\d+""".r.findFirstIn(version) } diff --git a/project/CliOptions.scala b/project/CliOptions.scala index c80fae8f34..ccd0d37526 100644 --- a/project/CliOptions.scala +++ b/project/CliOptions.scala @@ -11,7 +11,7 @@ object CliOption { def apply[T](path: String, default: T)(implicit ev: CliOptionParser[T]): CliOption[T] = ev.parse(path, default) implicit class BooleanCliOption(cliOption: CliOption[Boolean]) { - def ifTrue[A](a: => A): Option[A] = if (cliOption.get) Some(a) else None + def ifTrue[A](a: ⇒ A): Option[A] = if (cliOption.get) Some(a) else None } trait CliOptionParser[T] { diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 6621da59b8..8a616288fe 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -25,90 +25,88 @@ object Dependencies { scalaStmVersion := sys.props.get("akka.build.scalaStmVersion").getOrElse("0.8"), scalaCheckVersion := sys.props.get("akka.build.scalaCheckVersion").getOrElse( CrossVersion.partialVersion(scalaVersion.value) match { - case Some((2, n)) if n >= 12 => "1.13.5" // does not work for 2.11 - case _ => "1.13.2" - } - ), + case Some((2, n)) if n >= 12 ⇒ "1.13.5" // does not work for 2.11 + case _ ⇒ "1.13.2" + }), scalaTestVersion := "3.0.4", java8CompatVersion := { CrossVersion.partialVersion(scalaVersion.value) match { - case Some((2, n)) if n >= 12 => "0.8.0" - case _ => "0.7.0" + case Some((2, n)) if n >= 12 ⇒ "0.8.0" + case _ ⇒ "0.7.0" } - } - ) + }) object Compile { // Compile - val camelCore = "org.apache.camel" % "camel-core" % "2.17.7" exclude("org.slf4j", "slf4j-api") // ApacheV2 + val camelCore = "org.apache.camel" % "camel-core" % "2.17.7" exclude ("org.slf4j", "slf4j-api") // ApacheV2 // when updating config version, update links ActorSystem ScalaDoc to link to the updated version - val config = "com.typesafe" % "config" % "1.3.1" // ApacheV2 - val netty = "io.netty" % "netty" % "3.10.6.Final" // ApacheV2 - val scalaStm = Def.setting { "org.scala-stm" %% "scala-stm" % scalaStmVersion.value } // Modified BSD (Scala) + val config = "com.typesafe" % "config" % "1.3.1" // ApacheV2 + val netty = "io.netty" % "netty" % "3.10.6.Final" // ApacheV2 + val scalaStm = Def.setting { "org.scala-stm" %% "scala-stm" % scalaStmVersion.value } // Modified BSD (Scala) - val scalaXml = "org.scala-lang.modules" %% "scala-xml" % scalaXmlVersion // Scala License - val scalaReflect = ScalaVersionDependentModuleID.versioned("org.scala-lang" % "scala-reflect" % _) // Scala License + val scalaXml = "org.scala-lang.modules" %% "scala-xml" % scalaXmlVersion // Scala License + val scalaReflect = ScalaVersionDependentModuleID.versioned("org.scala-lang" % "scala-reflect" % _) // Scala License - val slf4jApi = "org.slf4j" % "slf4j-api" % slf4jVersion // MIT + val slf4jApi = "org.slf4j" % "slf4j-api" % slf4jVersion // MIT // mirrored in OSGi sample https://github.com/akka/akka-samples/tree/master/akka-sample-osgi-dining-hakkers - val osgiCore = "org.osgi" % "org.osgi.core" % "4.3.1" // ApacheV2 - val osgiCompendium= "org.osgi" % "org.osgi.compendium" % "4.3.1" // ApacheV2 + val osgiCore = "org.osgi" % "org.osgi.core" % "4.3.1" // ApacheV2 + val osgiCompendium = "org.osgi" % "org.osgi.compendium" % "4.3.1" // ApacheV2 - val sigar = "org.fusesource" % "sigar" % "1.6.4" // ApacheV2 + val sigar = "org.fusesource" % "sigar" % "1.6.4" // ApacheV2 // reactive streams - val reactiveStreams = "org.reactivestreams" % "reactive-streams" % "1.0.1" // CC0 + val reactiveStreams = "org.reactivestreams" % "reactive-streams" % "1.0.1" // CC0 // ssl-config - val sslConfigCore = "com.typesafe" %% "ssl-config-core" % sslConfigVersion // ApacheV2 + val sslConfigCore = "com.typesafe" %% "ssl-config-core" % sslConfigVersion // ApacheV2 - val lmdb = "org.lmdbjava" % "lmdbjava" % "0.6.0" // ApacheV2, OpenLDAP Public License + val lmdb = "org.lmdbjava" % "lmdbjava" % "0.6.0" // ApacheV2, OpenLDAP Public License // For akka-http-testkit-java - val junit = "junit" % "junit" % junitVersion // Common Public License 1.0 + val junit = "junit" % "junit" % junitVersion // Common Public License 1.0 // For Java 8 Conversions - val java8Compat = Def.setting {"org.scala-lang.modules" %% "scala-java8-compat" % java8CompatVersion.value} // Scala License + val java8Compat = Def.setting { "org.scala-lang.modules" %% "scala-java8-compat" % java8CompatVersion.value } // Scala License - val aeronDriver = "io.aeron" % "aeron-driver" % aeronVersion // ApacheV2 - val aeronClient = "io.aeron" % "aeron-client" % aeronVersion // ApacheV2 + val aeronDriver = "io.aeron" % "aeron-driver" % aeronVersion // ApacheV2 + val aeronClient = "io.aeron" % "aeron-client" % aeronVersion // ApacheV2 object Docs { - val sprayJson = "io.spray" %% "spray-json" % "1.3.3" % "test" - val gson = "com.google.code.gson" % "gson" % "2.8.1" % "test" + val sprayJson = "io.spray" %% "spray-json" % "1.3.3" % "test" + val gson = "com.google.code.gson" % "gson" % "2.8.1" % "test" } object Test { - val commonsMath = "org.apache.commons" % "commons-math" % "2.2" % "test" // ApacheV2 - val commonsIo = "commons-io" % "commons-io" % "2.5" % "test" // ApacheV2 - val commonsCodec = "commons-codec" % "commons-codec" % "1.10" % "test" // ApacheV2 - val junit = "junit" % "junit" % junitVersion % "test" // Common Public License 1.0 - val logback = "ch.qos.logback" % "logback-classic" % "1.2.3" % "test" // EPL 1.0 / LGPL 2.1 - val mockito = "org.mockito" % "mockito-core" % "2.7.16" % "test" // MIT + val commonsMath = "org.apache.commons" % "commons-math" % "2.2" % "test" // ApacheV2 + val commonsIo = "commons-io" % "commons-io" % "2.5" % "test" // ApacheV2 + val commonsCodec = "commons-codec" % "commons-codec" % "1.10" % "test" // ApacheV2 + val junit = "junit" % "junit" % junitVersion % "test" // Common Public License 1.0 + val logback = "ch.qos.logback" % "logback-classic" % "1.2.3" % "test" // EPL 1.0 / LGPL 2.1 + val mockito = "org.mockito" % "mockito-core" % "2.7.16" % "test" // MIT // changing the scalatest dependency must be reflected in akka-docs/rst/dev/multi-jvm-testing.rst - val scalatest = Def.setting { "org.scalatest" %% "scalatest" % scalaTestVersion.value % "test" } // ApacheV2 - val scalacheck = Def.setting { "org.scalacheck" %% "scalacheck" % scalaCheckVersion.value % "test" } // New BSD - val pojosr = "com.googlecode.pojosr" % "de.kalpatec.pojosr.framework" % "0.2.1" % "test" // ApacheV2 - val tinybundles = "org.ops4j.pax.tinybundles" % "tinybundles" % "1.0.0" % "test" // ApacheV2 - val log4j = "log4j" % "log4j" % "1.2.17" % "test" // ApacheV2 - val junitIntf = "com.novocode" % "junit-interface" % "0.11" % "test" // MIT - val scalaXml = "org.scala-lang.modules" %% "scala-xml" % scalaXmlVersion % "test" + val scalatest = Def.setting { "org.scalatest" %% "scalatest" % scalaTestVersion.value % "test" } // ApacheV2 + val scalacheck = Def.setting { "org.scalacheck" %% "scalacheck" % scalaCheckVersion.value % "test" } // New BSD + val pojosr = "com.googlecode.pojosr" % "de.kalpatec.pojosr.framework" % "0.2.1" % "test" // ApacheV2 + val tinybundles = "org.ops4j.pax.tinybundles" % "tinybundles" % "1.0.0" % "test" // ApacheV2 + val log4j = "log4j" % "log4j" % "1.2.17" % "test" // ApacheV2 + val junitIntf = "com.novocode" % "junit-interface" % "0.11" % "test" // MIT + val scalaXml = "org.scala-lang.modules" %% "scala-xml" % scalaXmlVersion % "test" // in-memory filesystem for file related tests - val jimfs = "com.google.jimfs" % "jimfs" % "1.1" % "test" // ApacheV2 + val jimfs = "com.google.jimfs" % "jimfs" % "1.1" % "test" // ApacheV2 // metrics, measurements, perf testing - val metrics = "io.dropwizard.metrics" % "metrics-core" % "3.2.4" % "test" // ApacheV2 - val metricsJvm = "io.dropwizard.metrics" % "metrics-jvm" % "3.2.4" % "test" // ApacheV2 - val latencyUtils = "org.latencyutils" % "LatencyUtils" % "1.0.5" % "test" // Free BSD - val hdrHistogram = "org.hdrhistogram" % "HdrHistogram" % "2.1.9" % "test" // CC0 - val metricsAll = Seq(metrics, metricsJvm, latencyUtils, hdrHistogram) + val metrics = "io.dropwizard.metrics" % "metrics-core" % "3.2.4" % "test" // ApacheV2 + val metricsJvm = "io.dropwizard.metrics" % "metrics-jvm" % "3.2.4" % "test" // ApacheV2 + val latencyUtils = "org.latencyutils" % "LatencyUtils" % "1.0.5" % "test" // Free BSD + val hdrHistogram = "org.hdrhistogram" % "HdrHistogram" % "2.1.9" % "test" // CC0 + val metricsAll = Seq(metrics, metricsJvm, latencyUtils, hdrHistogram) // sigar logging - val slf4jJul = "org.slf4j" % "jul-to-slf4j" % slf4jVersion % "test" // MIT - val slf4jLog4j = "org.slf4j" % "log4j-over-slf4j" % slf4jVersion % "test" // MIT + val slf4jJul = "org.slf4j" % "jul-to-slf4j" % slf4jVersion % "test" // MIT + val slf4jLog4j = "org.slf4j" % "log4j-over-slf4j" % slf4jVersion % "test" // MIT // reactive streams tck val reactiveStreamsTck = "org.reactivestreams" % "reactive-streams-tck" % "1.0.1" % "test" // CC0 @@ -117,10 +115,10 @@ object Dependencies { object Provided { // TODO remove from "test" config // If changed, update akka-docs/build.sbt as well - val sigarLoader = "io.kamon" % "sigar-loader" % "1.6.6-rev002" % "optional;provided;test" // ApacheV2 + val sigarLoader = "io.kamon" % "sigar-loader" % "1.6.6-rev002" % "optional;provided;test" // ApacheV2 - val levelDB = "org.iq80.leveldb" % "leveldb" % "0.9" % "optional;provided" // ApacheV2 - val levelDBNative = "org.fusesource.leveldbjni" % "leveldbjni-all" % "1.8" % "optional;provided" // New BSD + val levelDB = "org.iq80.leveldb" % "leveldb" % "0.9" % "optional;provided" // ApacheV2 + val levelDBNative = "org.fusesource.leveldbjni" % "leveldbjni-all" % "1.8" % "optional;provided" // New BSD } } @@ -188,16 +186,16 @@ object Dependencies { } object DependencyHelpers { - case class ScalaVersionDependentModuleID(modules: String => Seq[ModuleID]) { + case class ScalaVersionDependentModuleID(modules: String ⇒ Seq[ModuleID]) { def %(config: String): ScalaVersionDependentModuleID = - ScalaVersionDependentModuleID(version => modules(version).map(_ % config)) + ScalaVersionDependentModuleID(version ⇒ modules(version).map(_ % config)) } object ScalaVersionDependentModuleID { - implicit def liftConstantModule(mod: ModuleID): ScalaVersionDependentModuleID = versioned(_ => mod) + implicit def liftConstantModule(mod: ModuleID): ScalaVersionDependentModuleID = versioned(_ ⇒ mod) - def versioned(f: String => ModuleID): ScalaVersionDependentModuleID = ScalaVersionDependentModuleID(v => Seq(f(v))) + def versioned(f: String ⇒ ModuleID): ScalaVersionDependentModuleID = ScalaVersionDependentModuleID(v ⇒ Seq(f(v))) def fromPF(f: PartialFunction[String, ModuleID]): ScalaVersionDependentModuleID = - ScalaVersionDependentModuleID(version => if (f.isDefinedAt(version)) Seq(f(version)) else Nil) + ScalaVersionDependentModuleID(version ⇒ if (f.isDefinedAt(version)) Seq(f(version)) else Nil) } /** @@ -205,16 +203,16 @@ object DependencyHelpers { * dependent entries. */ def versionDependentDeps(modules: ScalaVersionDependentModuleID*): Def.Setting[Seq[ModuleID]] = - libraryDependencies ++= modules.flatMap(m => m.modules(scalaVersion.value)) + libraryDependencies ++= modules.flatMap(m ⇒ m.modules(scalaVersion.value)) val ScalaVersion = """\d\.\d+\.\d+(?:-(?:M|RC)\d+)?""".r - val nominalScalaVersion: String => String = { + val nominalScalaVersion: String ⇒ String = { // matches: // 2.12.0-M1 // 2.12.0-RC1 // 2.12.0 - case version @ ScalaVersion() => version + case version @ ScalaVersion() ⇒ version // transforms 2.12.0-custom-version to 2.12.0 - case version => version.takeWhile(_ != '-') + case version ⇒ version.takeWhile(_ != '-') } } diff --git a/project/Doc.scala b/project/Doc.scala index 1241cccb52..aaf2f21c53 100644 --- a/project/Doc.scala +++ b/project/Doc.scala @@ -25,15 +25,14 @@ object Scaladoc extends AutoPlugin { override lazy val projectSettings = { inTask(doc)(Seq( scalacOptions in Compile ++= scaladocOptions(version.value, (baseDirectory in ThisBuild).value), - autoAPIMappings := CliOptions.scaladocAutoAPI.get - )) ++ - Seq(validateDiagrams in Compile := true) ++ - CliOptions.scaladocDiagramsEnabled.ifTrue(doc in Compile := { - val docs = (doc in Compile).value - if ((validateDiagrams in Compile).value) - scaladocVerifier(docs) - docs - }) + autoAPIMappings := CliOptions.scaladocAutoAPI.get)) ++ + Seq(validateDiagrams in Compile := true) ++ + CliOptions.scaladocDiagramsEnabled.ifTrue(doc in Compile := { + val docs = (doc in Compile).value + if ((validateDiagrams in Compile).value) + scaladocVerifier(docs) + docs + }) } def scaladocOptions(ver: String, base: File): List[String] = { @@ -42,7 +41,7 @@ object Scaladoc extends AutoPlugin { CliOptions.scaladocDiagramsEnabled.ifTrue("-diagrams").toList ::: opts } - def scaladocVerifier(file: File): File= { + def scaladocVerifier(file: File): File = { @tailrec def findHTMLFileWithDiagram(dirs: Seq[File]): Boolean = { if (dirs.isEmpty) false @@ -50,18 +49,17 @@ object Scaladoc extends AutoPlugin { val curr = dirs.head val (newDirs, files) = curr.listFiles.partition(_.isDirectory) val rest = dirs.tail ++ newDirs - val hasDiagram = files exists { f => + val hasDiagram = files exists { f ⇒ val name = f.getName if (name.endsWith(".html") && !name.startsWith("index-") && !name.equals("index.html") && !name.equals("package.html")) { val source = scala.io.Source.fromFile(f)(scala.io.Codec.UTF8) val hd = try source.getLines().exists(_.contains("
")) catch { - case e: Exception => throw new IllegalStateException("Scaladoc verification failed for file '"+f+"'", e) + case e: Exception ⇒ throw new IllegalStateException("Scaladoc verification failed for file '" + f + "'", e) } finally source.close() hd - } - else false + } else false } hasDiagram || findHTMLFileWithDiagram(rest) } @@ -84,8 +82,7 @@ object ScaladocNoVerificationOfDiagrams extends AutoPlugin { override def requires = Scaladoc override lazy val projectSettings = Seq( - Scaladoc.validateDiagrams in Compile := false - ) + Scaladoc.validateDiagrams in Compile := false) } /** @@ -109,8 +106,7 @@ object UnidocRoot extends AutoPlugin { // genjavadoc needs to generate synthetic methods since the java code uses them scalacOptions += "-P:genjavadoc:suppressSynthetic=false", // FIXME: see #18056 - sources in(JavaUnidoc, unidoc) ~= (_.filterNot(_.getPath.contains("Access$minusControl$minusAllow$minusOrigin"))) - )).getOrElse(Nil) + sources in (JavaUnidoc, unidoc) ~= (_.filterNot(_.getPath.contains("Access$minusControl$minusAllow$minusOrigin"))))).getOrElse(Nil) val settings = { def unidocRootProjectFilter(ignoreProjects: Seq[Project]) = @@ -119,8 +115,7 @@ object UnidocRoot extends AutoPlugin { inTask(unidoc)(Seq( unidocProjectFilter in ScalaUnidoc := unidocRootProjectFilter(unidocRootIgnoreProjects.value), unidocProjectFilter in JavaUnidoc := unidocRootProjectFilter(unidocRootIgnoreProjects.value), - apiMappings in ScalaUnidoc := (apiMappings in (Compile, doc)).value - )) + apiMappings in ScalaUnidoc := (apiMappings in (Compile, doc)).value)) } override lazy val projectSettings = @@ -140,7 +135,5 @@ object Unidoc extends AutoPlugin { scalacOptions in Compile += "-P:genjavadoc:fabricateParams=true", unidocGenjavadocVersion in Global := "0.10", // FIXME: see #18056 - sources in(Genjavadoc, doc) ~= (_.filterNot(_.getPath.contains("Access$minusControl$minusAllow$minusOrigin"))) - ) - ).getOrElse(Seq.empty) + sources in (Genjavadoc, doc) ~= (_.filterNot(_.getPath.contains("Access$minusControl$minusAllow$minusOrigin"))))).getOrElse(Seq.empty) } diff --git a/project/Formatting.scala b/project/Formatting.scala index 57936066f7..61f731cb78 100644 --- a/project/Formatting.scala +++ b/project/Formatting.scala @@ -9,35 +9,28 @@ import com.typesafe.sbt.SbtScalariform import com.typesafe.sbt.SbtScalariform.ScalariformKeys object Formatting { + import scalariform.formatter.preferences._ + lazy val formatSettings = Seq( - ScalariformKeys.preferences in Compile := formattingPreferences.value, - ScalariformKeys.preferences in Test := formattingPreferences.value, - ScalariformKeys.preferences in MultiJvm := formattingPreferences.value + ScalariformKeys.preferences := setPreferences(ScalariformKeys.preferences.value), + ScalariformKeys.preferences in Compile := setPreferences(ScalariformKeys.preferences.value), + ScalariformKeys.preferences in Test := setPreferences(ScalariformKeys.preferences.value), + ScalariformKeys.preferences in MultiJvm := setPreferences(ScalariformKeys.preferences.value) ) lazy val docFormatSettings = Seq( - ScalariformKeys.preferences in Compile := docFormattingPreferences.value, - ScalariformKeys.preferences in Test := docFormattingPreferences.value, - ScalariformKeys.preferences in MultiJvm := docFormattingPreferences.value + ScalariformKeys.preferences := setPreferences(ScalariformKeys.preferences.value, rewriteArrowSymbols = false), + ScalariformKeys.preferences in Compile := setPreferences(ScalariformKeys.preferences.value, rewriteArrowSymbols = false), + ScalariformKeys.preferences in Test := setPreferences(ScalariformKeys.preferences.value, rewriteArrowSymbols = false), + ScalariformKeys.preferences in MultiJvm := setPreferences(ScalariformKeys.preferences.value, rewriteArrowSymbols = false) ) - def formattingPreferences = Def.setting { - import scalariform.formatter.preferences._ - ScalariformKeys.preferences.value - .setPreference(RewriteArrowSymbols, true) - .setPreference(AlignParameters, true) - .setPreference(AlignSingleLineCaseStatements, true) - .setPreference(DanglingCloseParenthesis, Preserve) - .setPreference(DoubleIndentClassDeclaration, false) - } - - def docFormattingPreferences = Def.setting { - import scalariform.formatter.preferences._ - ScalariformKeys.preferences.value - .setPreference(RewriteArrowSymbols, false) - .setPreference(AlignParameters, true) - .setPreference(AlignSingleLineCaseStatements, true) - .setPreference(DanglingCloseParenthesis, Preserve) - .setPreference(DoubleIndentClassDeclaration, false) - } + def setPreferences(preferences: IFormattingPreferences, rewriteArrowSymbols: Boolean = true) = preferences + .setPreference(RewriteArrowSymbols, rewriteArrowSymbols) + .setPreference(AlignParameters, true) + .setPreference(AlignSingleLineCaseStatements, true) + .setPreference(DoubleIndentConstructorArguments, false) + .setPreference(DoubleIndentMethodDeclaration, false) + .setPreference(DanglingCloseParenthesis, Preserve) + .setPreference(NewlineAtEndOfFile, true) } diff --git a/project/GitHub.scala b/project/GitHub.scala index 3d5d8fcc7f..20554f1876 100644 --- a/project/GitHub.scala +++ b/project/GitHub.scala @@ -6,7 +6,8 @@ package akka object GitHub { def envTokenOrThrow: String = - sys.env.getOrElse("PR_VALIDATOR_GH_TOKEN", + sys.env.getOrElse( + "PR_VALIDATOR_GH_TOKEN", throw new Exception("No PR_VALIDATOR_GH_TOKEN env var provided, unable to reach github!")) def url(v: String): String = { diff --git a/project/Jdk9CompileDirectoriesPlugin.scala b/project/Jdk9CompileDirectoriesPlugin.scala index 2b9abbfc63..0c1740d5a2 100644 --- a/project/Jdk9CompileDirectoriesPlugin.scala +++ b/project/Jdk9CompileDirectoriesPlugin.scala @@ -9,7 +9,7 @@ import sbt.Keys._ object Jdk9CompileDirectoriesPlugin extends AutoPlugin { val jdkVersion: String = System.getProperty("java.version") - + override def trigger = allRequirements override lazy val projectSettings = Seq( @@ -19,27 +19,23 @@ object Jdk9CompileDirectoriesPlugin extends AutoPlugin { if (isJDK9) Seq("-target", "1.8", "-source", "1.8", "-Xdoclint:none") else Seq("-Xdoclint:none") }, - + unmanagedSourceDirectories in Compile ++= { if (isJDK9) { println(s"[JDK9] Enabled [...-jdk9-only] directories to be compiled.") Seq( (sourceDirectory in Compile).value / "java-jdk9-only", - (sourceDirectory in Compile).value / "scala-jdk9-only" - ) + (sourceDirectory in Compile).value / "scala-jdk9-only") } else Seq.empty }, - + unmanagedSourceDirectories in Test ++= { if (isJDK9) { Seq( (sourceDirectory in Test).value / "java-jdk9-only", - (sourceDirectory in Test).value / "scala-jdk9-only" - ) + (sourceDirectory in Test).value / "scala-jdk9-only") } else Seq.empty - } - - ) + }) private def isJDK9 = { jdkVersion startsWith "9" diff --git a/project/MiMa.scala b/project/MiMa.scala index 68f577195d..a1c991587f 100644 --- a/project/MiMa.scala +++ b/project/MiMa.scala @@ -17,50 +17,46 @@ object MiMa extends AutoPlugin { override def trigger = allRequirements override val projectSettings = Seq( - mimaPreviousArtifacts := akkaPreviousArtifacts(name.value, organization.value, scalaBinaryVersion.value) - ) + mimaPreviousArtifacts := akkaPreviousArtifacts(name.value, organization.value, scalaBinaryVersion.value)) def akkaPreviousArtifacts(projectName: String, organization: String, scalaBinaryVersion: String): Set[sbt.ModuleID] = { val versions: Seq[String] = { val akka24NoStreamVersions = Seq("2.4.0", "2.4.1") - val akka25Versions = (0 to latestMinorOf25).map(patch => s"2.5.$patch") + val akka25Versions = (0 to latestMinorOf25).map(patch ⇒ s"2.5.$patch") val akka24StreamVersions = (2 to 12) map ("2.4." + _) val akka24WithAtLeastScala212 = (13 to latestMinorOf24) - .map ("2.4." + _) + .map("2.4." + _) .filterNot(_ == "2.4.15") // 2.4.15 was released from the wrong branch and never announced val akka242NewArtifacts = Seq( "akka-stream", - "akka-stream-testkit" - ) + "akka-stream-testkit") val akka250NewArtifacts = Seq( - "akka-persistence-query" - ) + "akka-persistence-query") scalaBinaryVersion match { - case "2.11" => + case "2.11" ⇒ if (akka250NewArtifacts.contains(projectName)) akka25Versions else { if (!akka242NewArtifacts.contains(projectName)) akka24NoStreamVersions else Seq.empty } ++ akka24StreamVersions ++ akka24WithAtLeastScala212 ++ akka25Versions - case "2.12" => + case "2.12" ⇒ akka24WithAtLeastScala212 ++ akka25Versions - case "2.13" => + case "2.13" ⇒ // no Akka released for 2.13 yet, no jars to check BC against - Seq.empty + Seq.empty } } val akka25PromotedArtifacts = Set( - "akka-distributed-data" - ) + "akka-distributed-data") // check against all binary compatible artifacts - versions.map { v => + versions.map { v ⇒ val adjustedProjectName = if (akka25PromotedArtifacts(projectName) && v.startsWith("2.4")) projectName + "-experimental" diff --git a/project/MultiNode.scala b/project/MultiNode.scala index 920808aadc..5ae9416941 100644 --- a/project/MultiNode.scala +++ b/project/MultiNode.scala @@ -5,14 +5,14 @@ package akka import akka.TestExtras.Filter import akka.TestExtras.Filter.Keys._ -import com.typesafe.sbt.{SbtScalariform, SbtMultiJvm} +import com.typesafe.sbt.{ SbtScalariform, SbtMultiJvm } import com.typesafe.sbt.SbtMultiJvm.MultiJvmKeys._ import com.typesafe.sbt.SbtScalariform.ScalariformKeys import sbt._ import sbt.Keys._ object MultiNode extends AutoPlugin { - + // MultiJvm tests can be excluded from normal test target an validatePullRequest // with -Dakka.test.multi-in-test=false val multiNodeTestInTest: Boolean = @@ -44,10 +44,10 @@ object MultiNode extends AutoPlugin { val MultinodeJvmArgs = "multinode\\.(D|X)(.*)".r val knownPrefix = Set("multnode.", "akka.", "MultiJvm.") val akkaProperties = System.getProperties.propertyNames.asScala.toList.collect { - case MultinodeJvmArgs(a, b) => + case MultinodeJvmArgs(a, b) ⇒ val value = System.getProperty("multinode." + a + b) "-" + a + b + (if (value == "") "" else "=" + value) - case key: String if knownPrefix.exists(pre => key.startsWith(pre)) => "-D" + key + "=" + System.getProperty(key) + case key: String if knownPrefix.exists(pre ⇒ key.startsWith(pre)) ⇒ "-D" + key + "=" + System.getProperty(key) } "-Xmx256m" :: akkaProperties ::: CliOptions.sbtLogNoFormat.ifTrue("-Dakka.test.nocolor=true").toList @@ -55,32 +55,32 @@ object MultiNode extends AutoPlugin { private val multiJvmSettings = SbtMultiJvm.multiJvmSettings ++ - inConfig(MultiJvm)(SbtScalariform.configScalariformSettings) ++ - Seq( - jvmOptions in MultiJvm := defaultMultiJvmOptions, - compileInputs in(MultiJvm, compile) := ((compileInputs in(MultiJvm, compile)) dependsOn (ScalariformKeys.format in MultiJvm)).value, - scalacOptions in MultiJvm := (scalacOptions in Test).value, - compile in MultiJvm := ((compile in MultiJvm) triggeredBy (compile in Test)).value - ) ++ - CliOptions.hostsFileName.map(multiNodeHostsFileName in MultiJvm := _) ++ - CliOptions.javaName.map(multiNodeJavaName in MultiJvm := _) ++ - CliOptions.targetDirName.map(multiNodeTargetDirName in MultiJvm := _) ++ - (if (multiNodeTestInTest) { - // make sure that MultiJvm tests are executed by the default test target, - // and combine the results from ordinary test and multi-jvm tests - (executeTests in Test) := { - val testResults = (executeTests in Test).value - val multiNodeResults = multiExecuteTests.value - val overall = - if (testResults.overall.id < multiNodeResults.overall.id) - multiNodeResults.overall - else - testResults.overall - Tests.Output(overall, - testResults.events ++ multiNodeResults.events, - testResults.summaries ++ multiNodeResults.summaries) - } - } else Nil) + inConfig(MultiJvm)(SbtScalariform.configScalariformSettings) ++ + Seq( + jvmOptions in MultiJvm := defaultMultiJvmOptions, + compileInputs in (MultiJvm, compile) := ((compileInputs in (MultiJvm, compile)) dependsOn (ScalariformKeys.format in MultiJvm)).value, + scalacOptions in MultiJvm := (scalacOptions in Test).value, + compile in MultiJvm := ((compile in MultiJvm) triggeredBy (compile in Test)).value) ++ + CliOptions.hostsFileName.map(multiNodeHostsFileName in MultiJvm := _) ++ + CliOptions.javaName.map(multiNodeJavaName in MultiJvm := _) ++ + CliOptions.targetDirName.map(multiNodeTargetDirName in MultiJvm := _) ++ + (if (multiNodeTestInTest) { + // make sure that MultiJvm tests are executed by the default test target, + // and combine the results from ordinary test and multi-jvm tests + (executeTests in Test) := { + val testResults = (executeTests in Test).value + val multiNodeResults = multiExecuteTests.value + val overall = + if (testResults.overall.id < multiNodeResults.overall.id) + multiNodeResults.overall + else + testResults.overall + Tests.Output( + overall, + testResults.events ++ multiNodeResults.events, + testResults.summaries ++ multiNodeResults.summaries) + } + } else Nil) } /** @@ -93,12 +93,11 @@ object MultiNodeScalaTest extends AutoPlugin { override lazy val projectSettings = Seq( extraOptions in MultiJvm := { val src = (sourceDirectory in MultiJvm).value - (name: String) => (src ** (name + ".conf")).get.headOption.map("-Dakka.config=" + _.absolutePath).toSeq + (name: String) ⇒ (src ** (name + ".conf")).get.headOption.map("-Dakka.config=" + _.absolutePath).toSeq }, scalatestOptions in MultiJvm := { Seq("-C", "org.scalatest.extra.QuietReporter") ++ (if (excludeTestTags.value.isEmpty) Seq.empty else Seq("-l", if (MultiNode.CliOptions.multiNode.get) excludeTestTags.value.mkString("\"", " ", "\"") else excludeTestTags.value.mkString(" "))) ++ (if (onlyTestTags.value.isEmpty) Seq.empty else Seq("-n", if (MultiNode.CliOptions.multiNode.get) onlyTestTags.value.mkString("\"", " ", "\"") else onlyTestTags.value.mkString(" "))) - } - ) + }) } diff --git a/project/OSGi.scala b/project/OSGi.scala index f357a51652..b1509e5ebc 100644 --- a/project/OSGi.scala +++ b/project/OSGi.scala @@ -20,8 +20,7 @@ object OSGi { // Each package contained in a project MUST be known to be private or exported, if it's undecided we MUST resolve this OsgiKeys.failOnUndecidedPackage := true, // By default an entry is generated from module group-id, but our modules do not adhere to such package naming - OsgiKeys.privatePackage := Seq() - ) + OsgiKeys.privatePackage := Seq()) val actor = osgiSettings ++ Seq( OsgiKeys.exportPackage := Seq("akka*"), @@ -29,21 +28,20 @@ object OSGi { //akka-actor packages are not imported, as contained in the CP OsgiKeys.importPackage := (osgiOptionalImports map optionalResolution) ++ Seq("!sun.misc", scalaJava8CompatImport(), scalaVersion(scalaImport).value, configImport(), "*"), // dynamicImportPackage needed for loading classes defined in configuration - OsgiKeys.dynamicImportPackage := Seq("*") - ) + OsgiKeys.dynamicImportPackage := Seq("*")) val agent = exports(Seq("akka.agent.*")) val camel = exports(Seq("akka.camel.*")) val cluster = exports(Seq("akka.cluster.*")) - + val clusterTools = exports(Seq("akka.cluster.singleton.*", "akka.cluster.client.*", "akka.cluster.pubsub.*")) - - val clusterSharding = exports(Seq("akka.cluster.sharding.*")) + + val clusterSharding = exports(Seq("akka.cluster.sharding.*")) val clusterMetrics = exports(Seq("akka.cluster.metrics.*"), imports = Seq(kamonImport(), sigarImport())) - + val distributedData = exports(Seq("akka.cluster.ddata.*")) val contrib = exports(Seq("akka.contrib.*")) @@ -54,27 +52,27 @@ object OSGi { val remote = exports(Seq("akka.remote.*")) - val parsing = exports(Seq("akka.parboiled2.*", "akka.shapeless.*"), + val parsing = exports( + Seq("akka.parboiled2.*", "akka.shapeless.*"), imports = Seq(optionalResolution("scala.quasiquotes"))) val httpCore = exports(Seq("akka.http.*"), imports = Seq(scalaJava8CompatImport())) - val http = exports(Seq("akka.http.impl.server") ++ - Seq( - "akka.http.$DSL$.server.*", - "akka.http.$DSL$.client.*", - "akka.http.$DSL$.coding.*", - "akka.http.$DSL$.common.*", - "akka.http.$DSL$.marshalling.*", - "akka.http.$DSL$.unmarshalling.*" - ) flatMap { p => - Seq(p.replace("$DSL$", "scaladsl"), p.replace("$DSL$", "javadsl")) - }, + val http = exports( + Seq("akka.http.impl.server") ++ + Seq( + "akka.http.$DSL$.server.*", + "akka.http.$DSL$.client.*", + "akka.http.$DSL$.coding.*", + "akka.http.$DSL$.common.*", + "akka.http.$DSL$.marshalling.*", + "akka.http.$DSL$.unmarshalling.*") flatMap { p ⇒ + Seq(p.replace("$DSL$", "scaladsl"), p.replace("$DSL$", "javadsl")) + }, imports = Seq( scalaJava8CompatImport(), akkaImport("akka.stream.*"), - akkaImport("akka.parboiled2.*")) - ) + akkaImport("akka.parboiled2.*"))) val httpTestkit = exports(Seq("akka.http.scaladsl.testkit.*", "akka.http.javadsl.testkit.*")) @@ -86,8 +84,9 @@ object OSGi { val stream = exports( - packages = Seq("akka.stream.*", - "com.typesafe.sslconfig.akka.*"), + packages = Seq( + "akka.stream.*", + "com.typesafe.sslconfig.akka.*"), imports = Seq(scalaJava8CompatImport(), scalaParsingCombinatorImport())) ++ Seq(OsgiKeys.requireBundle := Seq(s"""com.typesafe.sslconfig;bundle-version="${Dependencies.sslConfigVersion}"""")) @@ -95,7 +94,8 @@ object OSGi { val slf4j = exports(Seq("akka.event.slf4j.*")) - val persistence = exports(Seq("akka.persistence.*"), + val persistence = exports( + Seq("akka.persistence.*"), imports = Seq(optionalResolution("org.fusesource.leveldbjni.*"), optionalResolution("org.iq80.leveldb.*"))) val persistenceQuery = exports(Seq("akka.persistence.query.*")) @@ -110,8 +110,7 @@ object OSGi { def exports(packages: Seq[String] = Seq(), imports: Seq[String] = Nil) = osgiSettings ++ Seq( OsgiKeys.importPackage := imports ++ scalaVersion(defaultImports).value, - OsgiKeys.exportPackage := packages - ) + OsgiKeys.exportPackage := packages) def defaultImports(scalaVersion: String) = Seq("!sun.misc", akkaImport(), configImport(), "!scala.compat.java8.*", "!scala.util.parsing.*", scalaImport(scalaVersion), "*") def akkaImport(packageName: String = "akka.*") = versionedImport(packageName, "2.5", "2.6") @@ -120,7 +119,7 @@ object OSGi { val packageName = "scala.*" val ScalaVersion = """(\d+)\.(\d+)\..*""".r val ScalaVersion(epoch, major) = version - versionedImport(packageName, s"$epoch.$major", s"$epoch.${major.toInt+1}") + versionedImport(packageName, s"$epoch.$major", s"$epoch.${major.toInt + 1}") } def scalaJava8CompatImport(packageName: String = "scala.compat.java8.*") = versionedImport(packageName, "0.7.0", "1.0.0") def scalaParsingCombinatorImport(packageName: String = "scala.util.parsing.combinator.*") = versionedImport(packageName, "1.0.4", "1.1.0") diff --git a/project/Protobuf.scala b/project/Protobuf.scala index a66625f813..48933371ea 100644 --- a/project/Protobuf.scala +++ b/project/Protobuf.scala @@ -40,27 +40,28 @@ object Protobuf { val targets = target.value val cache = targets / "protoc" / "cache" - (sourceDirs zip targetDirs) map { case (src, dst) => - val relative = src.relativeTo(sources).getOrElse(throw new Exception(s"path $src is not a in source tree $sources")).toString - val tmp = targets / "protoc" / relative - IO.delete(tmp) - generate(cmd, src, tmp, log) - transformDirectory(tmp, dst, _ => true, transformFile(_.replace("com.google.protobuf", "akka.protobuf")), cache, log) + (sourceDirs zip targetDirs) map { + case (src, dst) ⇒ + val relative = src.relativeTo(sources).getOrElse(throw new Exception(s"path $src is not a in source tree $sources")).toString + val tmp = targets / "protoc" / relative + IO.delete(tmp) + generate(cmd, src, tmp, log) + transformDirectory(tmp, dst, _ ⇒ true, transformFile(_.replace("com.google.protobuf", "akka.protobuf")), cache, log) } } - } - ) + }) - private def callProtoc[T](protoc: String, args: Seq[String], log: Logger, thunk: (ProcessBuilder, Logger) => T): T = + private def callProtoc[T](protoc: String, args: Seq[String], log: Logger, thunk: (ProcessBuilder, Logger) ⇒ T): T = try { val proc = Process(protoc, args) thunk(proc, log) - } catch { case e: Exception => - throw new RuntimeException("error while executing '%s' with args: %s" format(protoc, args.mkString(" ")), e) + } catch { + case e: Exception ⇒ + throw new RuntimeException("error while executing '%s' with args: %s" format (protoc, args.mkString(" ")), e) } private def checkProtocVersion(protoc: String, protocVersion: String, log: Logger): Unit = { - val res = callProtoc(protoc, Seq("--version"), log, { (p, l) => p !! l }) + val res = callProtoc(protoc, Seq("--version"), log, { (p, l) ⇒ p !! l }) val version = res.split(" ").last.trim if (version != protocVersion) { sys.error("Wrong protoc version! Expected %s but got %s" format (protocVersion, version)) @@ -76,10 +77,10 @@ object Protobuf { targetDir.mkdirs() log.info("Generating %d protobuf files from %s to %s".format(protoFiles.size, srcDir, targetDir)) - protoFiles.foreach { proto => log.info("Compiling %s" format proto) } + protoFiles.foreach { proto ⇒ log.info("Compiling %s" format proto) } val exitCode = callProtoc(protoc, Seq("-I" + srcDir.absolutePath, "--java_out=%s" format targetDir.absolutePath) ++ - protoFiles.map(_.absolutePath), log, { (p, l) => p ! l }) + protoFiles.map(_.absolutePath), log, { (p, l) ⇒ p ! l }) if (exitCode != 0) sys.error("protoc returned exit code: %d" format exitCode) } diff --git a/project/Publish.scala b/project/Publish.scala index 7971b85b5e..6690154176 100644 --- a/project/Publish.scala +++ b/project/Publish.scala @@ -21,9 +21,8 @@ object Publish extends AutoPlugin { organizationName := "Lightbend Inc.", organizationHomepage := Some(url("http://www.lightbend.com")), publishMavenStyle := true, - pomIncludeRepository := { x => false }, - defaultPublishTo := crossTarget.value / "repository" - ) + pomIncludeRepository := { x ⇒ false }, + defaultPublishTo := crossTarget.value / "repository") def akkaPomExtra = { /* The scm info is automatic from the sbt-git plugin @@ -48,7 +47,7 @@ object Publish extends AutoPlugin { } private def sonatypeRepo(version: String): Option[Resolver] = - Option(sys.props("publish.maven.central")) filter (_.toLowerCase == "true") map { _ => + Option(sys.props("publish.maven.central")) filter (_.toLowerCase == "true") map { _ ⇒ val nexus = "https://oss.sonatype.org/" if (version endsWith "-SNAPSHOT") "snapshots" at nexus + "content/repositories/snapshots" else "releases" at nexus + "service/local/staging/deploy/maven2" @@ -58,6 +57,6 @@ object Publish extends AutoPlugin { Some(Resolver.file("Default Local Repository", repository)) private def akkaCredentials: Seq[Credentials] = - Option(System.getProperty("akka.publish.credentials", null)).map(f => Credentials(new File(f))).toSeq + Option(System.getProperty("akka.publish.credentials", null)).map(f ⇒ Credentials(new File(f))).toSeq } diff --git a/project/Release.scala b/project/Release.scala index 35fdb49ffa..00bf55a14f 100644 --- a/project/Release.scala +++ b/project/Release.scala @@ -14,14 +14,12 @@ object Release extends ParadoxKeys { val releaseDirectory = SettingKey[File]("release-directory") lazy val settings: Seq[Setting[_]] = commandSettings ++ Seq( - releaseDirectory := crossTarget.value / "release" - ) + releaseDirectory := crossTarget.value / "release") lazy val commandSettings = Seq( - commands += buildReleaseCommand - ) + commands += buildReleaseCommand) - def buildReleaseCommand = Command.command("buildRelease") { state => + def buildReleaseCommand = Command.command("buildRelease") { state ⇒ val extracted = Project.extract(state) val release = extracted.get(releaseDirectory) val releaseVersion = extracted.get(version) diff --git a/project/SigarLoader.scala b/project/SigarLoader.scala index d43b91f5a1..a5a73382d4 100644 --- a/project/SigarLoader.scala +++ b/project/SigarLoader.scala @@ -34,22 +34,18 @@ object SigarLoader { sigarArtifact := { val report = update.value val artifactList = report.matching( - moduleFilter(organization = sigarLoader.organization, name = sigarLoader.name) - ) + moduleFilter(organization = sigarLoader.organization, name = sigarLoader.name)) require(artifactList.size == 1, "Expecting single artifact, while found: " + artifactList) artifactList.head }, sigarFolder := target.value / "native", sigarOptions := "-javaagent:" + sigarArtifact.value + "=" + sigarFolderProperty + "=" + sigarFolder.value, // - fork in Test := true - ) ++ ( + fork in Test := true) ++ ( // Invoke Sigar agent at JVM init time, to extract and load native Sigar library. if (sigarTestEnabled) Seq( - javaOptions in Test += sigarOptions.value - ) - else Seq() - ) + javaOptions in Test += sigarOptions.value) + else Seq()) } } diff --git a/project/TestExtras.scala b/project/TestExtras.scala index 9a281b88b8..9105cd40e6 100644 --- a/project/TestExtras.scala +++ b/project/TestExtras.scala @@ -12,8 +12,7 @@ object TestExtras { val settings = Seq( // we can enable junit-style reports everywhere with this testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a", "-u", (target.value / "test-reports").getAbsolutePath), - testOptions += Tests.Argument(TestFrameworks.ScalaTest, "-u", (target.value / "test-reports").getAbsolutePath) - ) + testOptions += Tests.Argument(TestFrameworks.ScalaTest, "-u", (target.value / "test-reports").getAbsolutePath)) } object Filter { @@ -41,7 +40,7 @@ object TestExtras { onlyTestTags := Params.testTagsOnly, // add filters for tests excluded by name - testOptions in Test ++= excludeTestNames.value.toSeq.map(exclude => Tests.Filter(test => !test.contains(exclude))), + testOptions in Test ++= excludeTestNames.value.toSeq.map(exclude ⇒ Tests.Filter(test ⇒ !test.contains(exclude))), // add arguments for tests excluded by tag testOptions in Test ++= { @@ -53,8 +52,7 @@ object TestExtras { testOptions in Test ++= { val tags = onlyTestTags.value if (tags.isEmpty) Seq.empty else Seq(Tests.Argument("-n", tags.mkString(" "))) - } - ) + }) } def containsOrNotExcludesTag(tag: String) = { diff --git a/project/TimeStampede.scala b/project/TimeStampede.scala index 42d7fe0f83..fbb4826339 100644 --- a/project/TimeStampede.scala +++ b/project/TimeStampede.scala @@ -16,12 +16,11 @@ object TimeStampede extends AutoPlugin { override def trigger = noTrigger override lazy val projectSettings = Seq( - commands += stampVersion - ) + commands += stampVersion) final val Snapshot = "-SNAPSHOT" - def stampVersion = Command.command("stampVersion") { state => + def stampVersion = Command.command("stampVersion") { state ⇒ val extracted = Project.extract(state) extracted.append(List(version in ThisBuild ~= stamp), state) } diff --git a/project/ValidatePullRequest.scala b/project/ValidatePullRequest.scala index 5851a0d0e1..d14a45a1dd 100644 --- a/project/ValidatePullRequest.scala +++ b/project/ValidatePullRequest.scala @@ -80,22 +80,24 @@ object ValidatePullRequest extends AutoPlugin { val validatePullRequest = taskKey[Unit]("Validate pull request") val additionalTasks = taskKey[Seq[TaskKey[_]]]("Additional tasks for pull request validation") - def changedDirectoryIsDependency(changedDirs: Set[String], - name: String, - graphsToTest: Seq[(Configuration, ModuleGraph)])(log: Logger): Boolean = { - graphsToTest exists { case (ivyScope, deps) => - log.debug(s"Analysing [$ivyScope] scoped dependencies...") + def changedDirectoryIsDependency( + changedDirs: Set[String], + name: String, + graphsToTest: Seq[(Configuration, ModuleGraph)])(log: Logger): Boolean = { + graphsToTest exists { + case (ivyScope, deps) ⇒ + log.debug(s"Analysing [$ivyScope] scoped dependencies...") - deps.nodes.foreach { m ⇒ log.debug(" -> " + m.id) } + deps.nodes.foreach { m ⇒ log.debug(" -> " + m.id) } - // if this project depends on a modified module, we must test it - deps.nodes.exists { m => - // match just by name, we'd rather include too much than too little - val dependsOnModule = changedDirs.find(m.id.name contains _) - val depends = dependsOnModule.isDefined - if (depends) log.info(s"Project [$name] must be verified, because depends on [${dependsOnModule.get}]") - depends - } + // if this project depends on a modified module, we must test it + deps.nodes.exists { m ⇒ + // match just by name, we'd rather include too much than too little + val dependsOnModule = changedDirs.find(m.id.name contains _) + val depends = dependsOnModule.isDefined + if (depends) log.info(s"Project [$name] must be verified, because depends on [${dependsOnModule.get}]") + depends + } } } @@ -114,16 +116,16 @@ object ValidatePullRequest extends AutoPlugin { targetBranch in Global in ValidatePR := { (localTargetBranch, jenkinsTargetBranch) match { - case (Some(local), _) => local // local override - case (None, Some(branch)) => s"origin/$branch" // usually would be "master" or "release-2.3" etc - case (None, None) => "origin/master" // defaulting to diffing with "master" + case (Some(local), _) ⇒ local // local override + case (None, Some(branch)) ⇒ s"origin/$branch" // usually would be "master" or "release-2.3" etc + case (None, None) ⇒ "origin/master" // defaulting to diffing with "master" } }, buildAllKeyword in Global in ValidatePR := """PLS BUILD ALL""".r, githubEnforcedBuildAll in Global in ValidatePR := { - sys.env.get(PullIdEnvVarName).map(_.toInt) flatMap { prId => + sys.env.get(PullIdEnvVarName).map(_.toInt) flatMap { prId ⇒ val log = streams.value.log val buildAllMagicPhrase = (buildAllKeyword in ValidatePR).value log.info("Checking GitHub comments for PR validation options...") @@ -134,11 +136,12 @@ object ValidatePullRequest extends AutoPlugin { val comments = gh.getRepository("akka/akka").getIssue(prId).getComments.asScala def triggersBuildAll(c: GHIssueComment): Boolean = buildAllMagicPhrase.findFirstIn(c.getBody).isDefined - comments collectFirst { case c if triggersBuildAll(c) => - BuildCommentForcedAll(buildAllMagicPhrase.toString(), c) + comments collectFirst { + case c if triggersBuildAll(c) ⇒ + BuildCommentForcedAll(buildAllMagicPhrase.toString(), c) } } catch { - case ex: Exception => + case ex: Exception ⇒ log.warn("Unable to reach GitHub! Exception was: " + ex.getMessage) None } @@ -157,11 +160,10 @@ object ValidatePullRequest extends AutoPlugin { val diffOutput = s"git diff $target --name-only".!!.split("\n") val diffedModuleNames = diffOutput - .map(l => l.trim) - .filter(l => + .map(l ⇒ l.trim) + .filter(l ⇒ l.startsWith("akka-") || - (l.startsWith("project") && l != "project/MiMa.scala") - ) + (l.startsWith("project") && l != "project/MiMa.scala")) .map(l ⇒ l.takeWhile(_ != '/')) .toSet @@ -172,18 +174,16 @@ object ValidatePullRequest extends AutoPlugin { val dirtyDirectories = statusOutput .map(l ⇒ l.trim.dropWhile(_ != ' ').drop(1)) .map(_.takeWhile(_ != '/')) - .filter(dir => dir.startsWith("akka-") || dir == "project") + .filter(dir ⇒ dir.startsWith("akka-") || dir == "project") .toSet log.info("Detected uncommitted changes in directories (including in dependency analysis): " + dirtyDirectories.mkString("[", ",", "]")) dirtyDirectories } - val allModuleNames = dirtyModuleNames ++ diffedModuleNames log.info("Detected changes in directories: " + allModuleNames.mkString("[", ", ", "]")) allModuleNames - } - ) + }) override lazy val projectSettings = inConfig(ValidatePR)(Defaults.testTasks) ++ Seq( testOptions in ValidatePR += Tests.Argument(TestFrameworks.ScalaTest, "-l", "performance"), @@ -236,23 +236,23 @@ object ValidatePullRequest extends AutoPlugin { buildMode.log(name.value, log) val validationTasks = buildMode.task.toSeq ++ (buildMode match { - case BuildSkip => Seq.empty // do not run the additional task if project is skipped during pr validation - case _ => (additionalTasks in ValidatePR).value + case BuildSkip ⇒ Seq.empty // do not run the additional task if project is skipped during pr validation + case _ ⇒ (additionalTasks in ValidatePR).value }) // Create a task for every validation task key and // then zip all of the tasks together discarding outputs. // Task failures are propagated as normal. - val zero: Def.Initialize[Seq[Task[Any]]] = Def.setting { Seq(task())} - validationTasks.map(taskKey => Def.task { taskKey.value } ).foldLeft(zero) { (acc, current) => - acc.zipWith(current) { case (taskSeq, task) => - taskSeq :+ task.asInstanceOf[Task[Any]] + val zero: Def.Initialize[Seq[Task[Any]]] = Def.setting { Seq(task()) } + validationTasks.map(taskKey ⇒ Def.task { taskKey.value }).foldLeft(zero) { (acc, current) ⇒ + acc.zipWith(current) { + case (taskSeq, task) ⇒ + taskSeq :+ task.asInstanceOf[Task[Any]] } - } apply { tasks: Seq[Task[Any]] => - tasks.join map { seq => () /* Ignore the sequence of unit returned */ } + } apply { tasks: Seq[Task[Any]] ⇒ + tasks.join map { seq ⇒ () /* Ignore the sequence of unit returned */ } } - }.value - ) + }.value) } /** @@ -284,8 +284,7 @@ object MimaWithPrValidation extends AutoPlugin { override def trigger = allRequirements override def requires = ValidatePullRequest && MimaPlugin override lazy val projectSettings = Seq( - additionalTasks in ValidatePR += mimaReportBinaryIssues - ) + additionalTasks in ValidatePR += mimaReportBinaryIssues) } object UnidocWithPrValidation extends AutoPlugin { @@ -293,6 +292,5 @@ object UnidocWithPrValidation extends AutoPlugin { override def trigger = noTrigger override lazy val projectSettings = Seq( - additionalTasks in ValidatePR += unidoc in Compile - ) + additionalTasks in ValidatePR += unidoc in Compile) } diff --git a/project/Version.scala b/project/Version.scala index 13706a876d..3ff5bce3ed 100644 --- a/project/Version.scala +++ b/project/Version.scala @@ -21,10 +21,9 @@ object Version { |object Version { | val current: String = "%s" |} - |""") - )) + |"""))) - def generateVersion(dir: SettingKey[File], locate: File => File, template: String) = Def.task[Seq[File]] { + def generateVersion(dir: SettingKey[File], locate: File ⇒ File, template: String) = Def.task[Seq[File]] { val file = locate(dir.value) val content = template.stripMargin.format(version.value) if (!file.exists || IO.read(file) != content) IO.write(file, content) diff --git a/project/Whitesource.scala b/project/Whitesource.scala index 4e5ef7b0a6..d83aba7650 100644 --- a/project/Whitesource.scala +++ b/project/Whitesource.scala @@ -9,7 +9,7 @@ object Whitesource extends AutoPlugin { override def trigger = allRequirements - def majorMinor(version: String): Option[String] ="""\d+\.\d+""".r.findFirstIn(version) + def majorMinor(version: String): Option[String] = """\d+\.\d+""".r.findFirstIn(version) override lazy val projectSettings = Seq( // do not change the value of whitesourceProduct @@ -18,9 +18,7 @@ object Whitesource extends AutoPlugin { (moduleName in LocalRootProject).value + "-" + ( if (isSnapshot.value) if (gitCurrentBranch.value == "master") "master" - else "adhoc" - else majorMinor((version in LocalRootProject).value).map(_ + "-stable").getOrElse("adhoc") - ) - } - ) + else "adhoc" + else majorMinor((version in LocalRootProject).value).map(_ + "-stable").getOrElse("adhoc")) + }) } diff --git a/project/plugins.sbt b/project/plugins.sbt index 26f5da8ff3..12c0abb0a7 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -9,7 +9,7 @@ resolvers += "Bintray Jcenter" at "https://jcenter.bintray.com/" addSbtPlugin("com.typesafe.sbt" % "sbt-multi-jvm" % "0.3.8") //#sbt-multi-jvm -addSbtPlugin("org.scalariform" % "sbt-scalariform" % "1.6.0") +addSbtPlugin("org.scalariform" % "sbt-scalariform" % "1.8.0") addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "0.7.1")