Use scalafix to update scala 2.13.3, silencer 1.7.0 (#29367)

This commit is contained in:
ohze.net 2020-12-09 15:20:13 +07:00 committed by GitHub
parent 388fb73beb
commit 9416dedad7
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
58 changed files with 131 additions and 115 deletions

View file

@ -3,15 +3,18 @@ rules = [
RemoveUnused RemoveUnused
SortImports SortImports
ExplicitResultTypes ExplicitResultTypes
"github:ohze/scalafix-rules/ExplicitNonNullaryApply"
"github:ohze/scalafix-rules/ConstructorProcedureSyntax"
"github:ohze/scalafix-rules/FinalObject" "github:ohze/scalafix-rules/FinalObject"
"github:ohze/scalafix-rules/Any2StringAdd" fix.scala213.DottyMigrate
// fix.scala213.NullaryOverride
] ]
//NullaryOverride.mode = Rewrite
ExplicitResultTypes { ExplicitResultTypes {
memberVisibility = [] # only rewrite implicit members memberVisibility = [] # only rewrite implicit members
skipSimpleDefinitions = [] skipSimpleDefinitions = []
} }
ExplicitImplicitTypes.symbolReplacements {
"scala/concurrent/ExecutionContextExecutor#" = "scala/concurrent/ExecutionContext#"
}
RemoveUnused.imports = true RemoveUnused.imports = true
RemoveUnused.privates = false RemoveUnused.privates = false
RemoveUnused.locals = false RemoveUnused.locals = false

View file

@ -118,7 +118,7 @@ class ActorLifeCycleSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitS
"log failures in postStop" in { "log failures in postStop" in {
val a = system.actorOf(Props(new Actor { val a = system.actorOf(Props(new Actor {
def receive = Actor.emptyBehavior def receive = Actor.emptyBehavior
override def postStop: Unit = { throw new Exception("hurrah") } override def postStop(): Unit = { throw new Exception("hurrah") }
})) }))
EventFilter[Exception]("hurrah", occurrences = 1).intercept { EventFilter[Exception]("hurrah", occurrences = 1).intercept {
a ! PoisonPill a ! PoisonPill

View file

@ -101,7 +101,7 @@ object ActorWithStashSpec {
class ActorWithStashSpec extends AkkaSpec with DefaultTimeout with BeforeAndAfterEach { class ActorWithStashSpec extends AkkaSpec with DefaultTimeout with BeforeAndAfterEach {
import ActorWithStashSpec._ import ActorWithStashSpec._
override def atStartup: Unit = { override def atStartup(): Unit = {
system.eventStream.publish(Mute(EventFilter[Exception]("Crashing..."))) system.eventStream.publish(Mute(EventFilter[Exception]("Crashing...")))
} }

View file

@ -183,7 +183,7 @@ class FSMActorSpec extends AkkaSpec(Map("akka.actor.debug.fsm" -> true)) with Im
* It is necessary here because of the path-dependent type fsm.StopEvent. * It is necessary here because of the path-dependent type fsm.StopEvent.
*/ */
lazy val fsm = new Actor with FSM[Int, Null] { lazy val fsm = new Actor with FSM[Int, Null] {
override def preStart = { started.countDown() } override def preStart() = { started.countDown() }
startWith(1, null) startWith(1, null)
when(1) { FSM.NullFunction } when(1) { FSM.NullFunction }
onTermination { onTermination {

View file

@ -22,7 +22,7 @@ import akka.testkit.TestLatch
@silent @silent
class RestartStrategySpec extends AkkaSpec with DefaultTimeout { class RestartStrategySpec extends AkkaSpec with DefaultTimeout {
override def atStartup: Unit = { override def atStartup(): Unit = {
system.eventStream.publish(Mute(EventFilter[Exception]("Crashing..."))) system.eventStream.publish(Mute(EventFilter[Exception]("Crashing...")))
} }

View file

@ -31,7 +31,7 @@ class DnsClientSpec extends AkkaSpec with ImplicitSender {
val client = system.actorOf(Props(new DnsClient(dnsServerAddress) { val client = system.actorOf(Props(new DnsClient(dnsServerAddress) {
override val udp = udpExtensionProbe.ref override val udp = udpExtensionProbe.ref
override def createTcpClient = { override def createTcpClient() = {
tcpClientCreated.set(true) tcpClientCreated.set(true)
TestProbe().ref TestProbe().ref
} }
@ -57,7 +57,7 @@ class DnsClientSpec extends AkkaSpec with ImplicitSender {
val client = system.actorOf(Props(new DnsClient(dnsServerAddress) { val client = system.actorOf(Props(new DnsClient(dnsServerAddress) {
override val udp = udpExtensionProbe.ref override val udp = udpExtensionProbe.ref
override def createTcpClient = tcpClientProbe.ref override def createTcpClient() = tcpClientProbe.ref
})) }))
client ! exampleRequest client ! exampleRequest

View file

@ -37,7 +37,7 @@ object ScatterGatherFirstCompletedSpec {
} }
} }
override def postStop = { override def postStop() = {
shudownLatch.foreach(_.countDown()) shudownLatch.foreach(_.countDown())
} }
}), }),

View file

@ -22,7 +22,7 @@ object BenchRunner {
val opts = new CommandLineOptions(args2: _*) val opts = new CommandLineOptions(args2: _*)
val results = new Runner(opts).run() val results = new Runner(opts).run()
val report = results.asScala.map { result: RunResult => val report = results.asScala.map { (result: RunResult) =>
val bench = result.getParams.getBenchmark val bench = result.getParams.getBenchmark
val params = val params =
result.getParams.getParamsKeys.asScala.map(key => s"$key=${result.getParams.getParam(key)}").mkString("_") result.getParams.getParamsKeys.asScala.map(key => s"$key=${result.getParams.getParam(key)}").mkString("_")

View file

@ -97,7 +97,7 @@ class SbrTestLeaseActorClient(settings: LeaseSettings, system: ExtendedActorSyst
SbrTestLeaseActorClientExt(system).setActorLeaseClient(this) SbrTestLeaseActorClientExt(system).setActorLeaseClient(this)
private implicit val timeout = Timeout(3.seconds) private implicit val timeout: Timeout = Timeout(3.seconds)
private val _leaseRef = new AtomicReference[ActorRef] private val _leaseRef = new AtomicReference[ActorRef]

View file

@ -20,6 +20,7 @@ import com.github.ghik.silencer.silent
import com.typesafe.config.ConfigFactory import com.typesafe.config.ConfigFactory
import org.scalatest.wordspec.AnyWordSpecLike import org.scalatest.wordspec.AnyWordSpecLike
import scala.concurrent.duration._ import scala.concurrent.duration._
import scala.concurrent.ExecutionContext
import akka.cluster.sharding.ShardCoordinator.ShardAllocationStrategy import akka.cluster.sharding.ShardCoordinator.ShardAllocationStrategy
@ -98,7 +99,7 @@ object RememberEntitiesFailureSpec {
class FakeShardStoreActor(shardId: ShardId) extends Actor with ActorLogging with Timers { class FakeShardStoreActor(shardId: ShardId) extends Actor with ActorLogging with Timers {
import FakeShardStoreActor._ import FakeShardStoreActor._
implicit val ec = context.system.dispatcher implicit val ec: ExecutionContext = context.system.dispatcher
private var failUpdate: Option[Fail] = None private var failUpdate: Option[Fail] = None
context.system.eventStream.publish(ShardStoreCreated(self, shardId)) context.system.eventStream.publish(ShardStoreCreated(self, shardId))
@ -201,7 +202,7 @@ class RememberEntitiesFailureSpec
"Remember entities handling in sharding" must { "Remember entities handling in sharding" must {
List(NoResponse, CrashStore, StopStore, Delay(500.millis), Delay(1.second)).foreach { wayToFail: Fail => List(NoResponse, CrashStore, StopStore, Delay(500.millis), Delay(1.second)).foreach { (wayToFail: Fail) =>
s"recover when initial remember entities load fails $wayToFail" in { s"recover when initial remember entities load fails $wayToFail" in {
log.debug("Getting entities for shard 1 will fail") log.debug("Getting entities for shard 1 will fail")
failShardGetEntities = Map("1" -> wayToFail) failShardGetEntities = Map("1" -> wayToFail)

View file

@ -129,7 +129,7 @@ class ActorSystemSpec
Behaviors.receiveMessage[Done] { _ => Behaviors.receiveMessage[Done] { _ =>
Behaviors.stopped Behaviors.stopped
} }
withSystem("shutdown", stoppable, doTerminate = false) { sys: ActorSystem[Done] => withSystem("shutdown", stoppable, doTerminate = false) { (sys: ActorSystem[Done]) =>
sys ! Done sys ! Done
sys.whenTerminated.futureValue sys.whenTerminated.futureValue
} }

View file

@ -117,7 +117,7 @@ class ReplicatorDocSpec
import ReplicatorDocSpec._ import ReplicatorDocSpec._
implicit val selfNodeAddress = DistributedData(system).selfUniqueAddress implicit val selfNodeAddress: SelfUniqueAddress = DistributedData(system).selfUniqueAddress
"Replicator" must { "Replicator" must {

View file

@ -110,7 +110,7 @@ akka {
class BasicClusterConfigSpec extends AnyWordSpec with ScalaFutures with Eventually with Matchers with LogCapturing { class BasicClusterConfigSpec extends AnyWordSpec with ScalaFutures with Eventually with Matchers with LogCapturing {
import BasicClusterExampleSpec._ import BasicClusterExampleSpec._
implicit override val patienceConfig = implicit override val patienceConfig: PatienceConfig =
PatienceConfig(timeout = scaled(Span(10, Seconds)), interval = scaled(Span(100, Millis))) PatienceConfig(timeout = scaled(Span(10, Seconds)), interval = scaled(Span(100, Millis)))
"Cluster API" must { "Cluster API" must {
@ -167,7 +167,7 @@ class BasicClusterManualSpec extends AnyWordSpec with ScalaFutures with Eventual
import BasicClusterManualSpec._ import BasicClusterManualSpec._
implicit override val patienceConfig = implicit override val patienceConfig: PatienceConfig =
PatienceConfig(timeout = scaled(Span(10, Seconds)), interval = scaled(Span(100, Millis))) PatienceConfig(timeout = scaled(Span(10, Seconds)), interval = scaled(Span(100, Millis)))
"Cluster API" must { "Cluster API" must {

View file

@ -453,7 +453,7 @@ class ActorDocSpec extends AkkaSpec("""
class DependencyInjector(applicationContext: AnyRef, beanName: String) extends IndirectActorProducer { class DependencyInjector(applicationContext: AnyRef, beanName: String) extends IndirectActorProducer {
override def actorClass = classOf[Actor] override def actorClass = classOf[Actor]
override def produce = override def produce() =
//#obtain-fresh-Actor-instance-from-DI-framework //#obtain-fresh-Actor-instance-from-DI-framework
new Echo(beanName) new Echo(beanName)
@ -578,15 +578,16 @@ class ActorDocSpec extends AkkaSpec("""
def receive = { def receive = {
case "kill" => case "kill" =>
context.stop(child); lastSender = sender() context.stop(child)
case Terminated(`child`) => lastSender ! "finished" lastSender = sender()
case Terminated(`child`) =>
lastSender ! "finished"
} }
} }
//#watch //#watch
val victim = system.actorOf(Props(classOf[WatchActor], this)) val victim = system.actorOf(Props(classOf[WatchActor], this))
implicit val sender = testActor victim.tell("kill", testActor)
victim ! "kill"
expectMsg("finished") expectMsg("finished")
} }
} }

View file

@ -80,7 +80,7 @@ object Worker {
class Worker extends Actor with ActorLogging { class Worker extends Actor with ActorLogging {
import Worker._ import Worker._
import CounterService._ import CounterService._
implicit val askTimeout = Timeout(5 seconds) implicit val askTimeout: Timeout = Timeout(5 seconds)
// Stop the CounterService child if it throws ServiceUnavailable // Stop the CounterService child if it throws ServiceUnavailable
override val supervisorStrategy = OneForOneStrategy() { override val supervisorStrategy = OneForOneStrategy() {

View file

@ -16,8 +16,8 @@ import scala.concurrent.duration._
object DnsCompileOnlyDocSpec { object DnsCompileOnlyDocSpec {
implicit val system = ActorSystem() implicit val system: ActorSystem = ActorSystem()
implicit val timeout = Timeout(1.second) implicit val timeout: Timeout = Timeout(1.second)
val actorRef: ActorRef = ??? val actorRef: ActorRef = ???
//#resolve //#resolve

View file

@ -63,7 +63,7 @@ object DistributedDataDocSpec {
import DataBot._ import DataBot._
val replicator = DistributedData(context.system).replicator val replicator = DistributedData(context.system).replicator
implicit val node = DistributedData(context.system).selfUniqueAddress implicit val node: SelfUniqueAddress = DistributedData(context.system).selfUniqueAddress
import context.dispatcher import context.dispatcher
val tickTask = context.system.scheduler.scheduleWithFixedDelay(5.seconds, 5.seconds, self, Tick) val tickTask = context.system.scheduler.scheduleWithFixedDelay(5.seconds, 5.seconds, self, Tick)

View file

@ -12,6 +12,7 @@ import akka.actor.Props
import akka.cluster.ddata.DistributedData import akka.cluster.ddata.DistributedData
import akka.cluster.ddata.LWWMap import akka.cluster.ddata.LWWMap
import akka.cluster.ddata.LWWMapKey import akka.cluster.ddata.LWWMapKey
import akka.cluster.ddata.SelfUniqueAddress
object ShoppingCart { object ShoppingCart {
import akka.cluster.ddata.Replicator._ import akka.cluster.ddata.Replicator._
@ -38,7 +39,7 @@ class ShoppingCart(userId: String) extends Actor {
import akka.cluster.ddata.Replicator._ import akka.cluster.ddata.Replicator._
val replicator = DistributedData(context.system).replicator val replicator = DistributedData(context.system).replicator
implicit val node = DistributedData(context.system).selfUniqueAddress implicit val node: SelfUniqueAddress = DistributedData(context.system).selfUniqueAddress
val DataKey = LWWMapKey[String, LineItem]("cart-" + userId) val DataKey = LWWMapKey[String, LineItem]("cart-" + userId)

View file

@ -106,7 +106,7 @@ object FutureDocSpec {
import akka.pattern.{ ask, pipe } import akka.pattern.{ ask, pipe }
implicit val ec: ExecutionContext = context.dispatcher implicit val ec: ExecutionContext = context.dispatcher
implicit val timeout = Timeout(5 seconds) implicit val timeout: Timeout = Timeout(5 seconds)
def receive = { def receive = {
case GetUserData => case GetUserData =>

View file

@ -16,7 +16,7 @@ import scala.io.StdIn
object EchoServer extends App { object EchoServer extends App {
val config = ConfigFactory.parseString("akka.loglevel = DEBUG") val config = ConfigFactory.parseString("akka.loglevel = DEBUG")
implicit val system = ActorSystem("EchoServer", config) implicit val system: ActorSystem = ActorSystem("EchoServer", config)
system.actorOf(Props(classOf[EchoManager], classOf[EchoHandler]), "echo") system.actorOf(Props(classOf[EchoManager], classOf[EchoHandler]), "echo")
system.actorOf(Props(classOf[EchoManager], classOf[SimpleEchoHandler]), "simple") system.actorOf(Props(classOf[EchoManager], classOf[SimpleEchoHandler]), "simple")

View file

@ -14,7 +14,7 @@ import scala.collection.immutable
class PersistenceEventAdapterDocSpec(config: String) extends AkkaSpec(config) { class PersistenceEventAdapterDocSpec(config: String) extends AkkaSpec(config) {
def this() = { def this() =
this(""" this("""
akka.persistence.snapshot-store.plugin = "akka.persistence.snapshot-store.local" akka.persistence.snapshot-store.plugin = "akka.persistence.snapshot-store.local"
@ -65,7 +65,6 @@ class PersistenceEventAdapterDocSpec(config: String) extends AkkaSpec(config) {
} }
} }
""") """)
}
"MyAutomaticJsonEventAdapter" must { "MyAutomaticJsonEventAdapter" must {
"demonstrate how to implement a JSON adapter" in { "demonstrate how to implement a JSON adapter" in {

View file

@ -24,7 +24,7 @@ import com.typesafe.config.Config
object PersistenceQueryDocSpec { object PersistenceQueryDocSpec {
implicit val timeout = Timeout(3.seconds) implicit val timeout: Timeout = Timeout(3.seconds)
//#advanced-journal-query-types //#advanced-journal-query-types
final case class RichEvent(tags: Set[String], payload: Any) final case class RichEvent(tags: Set[String], payload: Any)
@ -150,7 +150,7 @@ object PersistenceQueryDocSpec {
} }
//#projection-into-different-store-rs //#projection-into-different-store-rs
implicit val system = ActorSystem() implicit val system: ActorSystem = ActorSystem()
val readJournal = val readJournal =
PersistenceQuery(system).readJournalFor[MyScaladslReadJournal](JournalId) PersistenceQuery(system).readJournalFor[MyScaladslReadJournal](JournalId)
@ -199,14 +199,13 @@ object PersistenceQueryDocSpec {
class PersistenceQueryDocSpec(s: String) extends AkkaSpec(s) { class PersistenceQueryDocSpec(s: String) extends AkkaSpec(s) {
import PersistenceQueryDocSpec._ import PersistenceQueryDocSpec._
def this() = { def this() =
this(""" this("""
akka.persistence.query.my-read-journal { akka.persistence.query.my-read-journal {
class = "docs.persistence.query.PersistenceQueryDocSpec$MyReadJournalProvider" class = "docs.persistence.query.PersistenceQueryDocSpec$MyReadJournalProvider"
refresh-interval = 3s refresh-interval = 3s
} }
""") """)
}
class BasicUsage { class BasicUsage {
//#basic-usage //#basic-usage

View file

@ -12,10 +12,11 @@ import akka.testkit.AkkaSpec
import akka.util.ByteString import akka.util.ByteString
import scala.concurrent.{ Future, Promise } import scala.concurrent.{ Future, Promise }
import scala.concurrent.ExecutionContext
class CompositionDocSpec extends AkkaSpec { class CompositionDocSpec extends AkkaSpec {
implicit val ec = system.dispatcher implicit val ec: ExecutionContext = system.dispatcher
"nonnested flow" in { "nonnested flow" in {
//#non-nested-flow //#non-nested-flow

View file

@ -16,10 +16,11 @@ import docs.CompileOnlySpec
import scala.concurrent.{ Future, Promise } import scala.concurrent.{ Future, Promise }
import scala.util.{ Failure, Success } import scala.util.{ Failure, Success }
import scala.concurrent.ExecutionContext
class FlowDocSpec extends AkkaSpec with CompileOnlySpec { class FlowDocSpec extends AkkaSpec with CompileOnlySpec {
implicit val ec = system.dispatcher implicit val ec: ExecutionContext = system.dispatcher
"source is immutable" in { "source is immutable" in {
//#source-immutable //#source-immutable
@ -251,7 +252,7 @@ object FlowDocSpec {
//#materializer-from-actor-context //#materializer-from-actor-context
final class RunWithMyself extends Actor { final class RunWithMyself extends Actor {
implicit val mat = Materializer(context) implicit val mat: Materializer = Materializer(context)
Source.maybe.runWith(Sink.onComplete { Source.maybe.runWith(Sink.onComplete {
case Success(done) => println(s"Completed: $done") case Success(done) => println(s"Completed: $done")

View file

@ -12,10 +12,11 @@ import akka.testkit.AkkaSpec
import scala.collection.immutable import scala.collection.immutable
import scala.concurrent.duration._ import scala.concurrent.duration._
import scala.concurrent.{ Await, Future } import scala.concurrent.{ Await, Future }
import scala.concurrent.ExecutionContext
class GraphDSLDocSpec extends AkkaSpec { class GraphDSLDocSpec extends AkkaSpec {
implicit val ec = system.dispatcher implicit val ec: ExecutionContext = system.dispatcher
"build simple graph" in { "build simple graph" in {
//format: OFF //format: OFF

View file

@ -9,10 +9,11 @@ import java.util.concurrent.ThreadLocalRandom
import akka.stream._ import akka.stream._
import akka.stream.scaladsl._ import akka.stream.scaladsl._
import akka.testkit.{ AkkaSpec, EventFilter } import akka.testkit.{ AkkaSpec, EventFilter }
import scala.concurrent.ExecutionContext
class GraphStageLoggingDocSpec extends AkkaSpec("akka.loglevel = DEBUG") { class GraphStageLoggingDocSpec extends AkkaSpec("akka.loglevel = DEBUG") {
implicit val ec = system.dispatcher implicit val ec: ExecutionContext = system.dispatcher
//#operator-with-logging //#operator-with-logging
import akka.stream.stage.{ GraphStage, GraphStageLogic, OutHandler, StageLogging } import akka.stream.stage.{ GraphStage, GraphStageLogic, OutHandler, StageLogging }

View file

@ -24,13 +24,13 @@ import org.scalatest.concurrent._
//#main-app //#main-app
object Main extends App { object Main extends App {
implicit val system = ActorSystem("QuickStart") implicit val system: ActorSystem = ActorSystem("QuickStart")
// Code here // Code here
} }
//#main-app //#main-app
class QuickStartDocSpec extends AnyWordSpec with BeforeAndAfterAll with ScalaFutures { class QuickStartDocSpec extends AnyWordSpec with BeforeAndAfterAll with ScalaFutures {
implicit val patience = PatienceConfig(5.seconds) implicit val patience: PatienceConfig = PatienceConfig(5.seconds)
def println(any: Any) = () // silence printing stuff def println(any: Any) = () // silence printing stuff

View file

@ -11,10 +11,11 @@ import akka.testkit.AkkaSpec
import scala.concurrent.{ Await, Future } import scala.concurrent.{ Await, Future }
import scala.concurrent.duration._ import scala.concurrent.duration._
import scala.concurrent.ExecutionContext
class StreamPartialGraphDSLDocSpec extends AkkaSpec { class StreamPartialGraphDSLDocSpec extends AkkaSpec {
implicit val ec = system.dispatcher implicit val ec: ExecutionContext = system.dispatcher
"build with open ports" in { "build with open ports" in {
//#simple-partial-graph-dsl //#simple-partial-graph-dsl

View file

@ -17,6 +17,7 @@ import scala.io.StdIn.readLine
//#imports //#imports
import akka.testkit.AkkaSpec import akka.testkit.AkkaSpec
import scala.concurrent.ExecutionContext
object TwitterStreamQuickstartDocSpec { object TwitterStreamQuickstartDocSpec {
//#fiddle_code //#fiddle_code
@ -70,7 +71,7 @@ object TwitterStreamQuickstartDocSpec {
class TwitterStreamQuickstartDocSpec extends AkkaSpec { class TwitterStreamQuickstartDocSpec extends AkkaSpec {
import TwitterStreamQuickstartDocSpec._ import TwitterStreamQuickstartDocSpec._
implicit val executionContext = system.dispatcher implicit val executionContext: ExecutionContext = system.dispatcher
// Disable println // Disable println
def println(s: Any): Unit = () def println(s: Any): Unit = ()
@ -79,7 +80,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
//#fiddle_code //#fiddle_code
//#first-sample //#first-sample
//#system-setup //#system-setup
implicit val system = ActorSystem("reactive-tweets") implicit val system: ActorSystem = ActorSystem("reactive-tweets")
//#system-setup //#system-setup
//#first-sample //#first-sample

View file

@ -20,7 +20,7 @@ class RecipeMultiGroupBy extends RecipeSpec {
case class Topic(name: String) case class Topic(name: String)
val elems = Source(List("1: a", "1: b", "all: c", "all: d", "1: e")) val elems = Source(List("1: a", "1: b", "all: c", "all: d", "1: e"))
val extractTopics = { msg: Message => val extractTopics = { (msg: Message) =>
if (msg.startsWith("1")) List(Topic("1")) if (msg.startsWith("1")) List(Topic("1"))
else List(Topic("1"), Topic("2")) else List(Topic("1"), Topic("2"))
} }
@ -28,7 +28,7 @@ class RecipeMultiGroupBy extends RecipeSpec {
//#multi-groupby //#multi-groupby
val topicMapper: (Message) => immutable.Seq[Topic] = extractTopics val topicMapper: (Message) => immutable.Seq[Topic] = extractTopics
val messageAndTopic: Source[(Message, Topic), NotUsed] = elems.mapConcat { msg: Message => val messageAndTopic: Source[(Message, Topic), NotUsed] = elems.mapConcat { (msg: Message) =>
val topicsForMessage = topicMapper(msg) val topicsForMessage = topicMapper(msg)
// Create a (Msg, Topic) pair for each of the topics // Create a (Msg, Topic) pair for each of the topics
// the message belongs to // the message belongs to

View file

@ -13,10 +13,11 @@ import akka.util.ByteString
import akka.testkit.AkkaSpec import akka.testkit.AkkaSpec
import scala.concurrent.Future import scala.concurrent.Future
import scala.concurrent.ExecutionContext
class StreamFileDocSpec extends AkkaSpec(UnboundedMailboxConfig) { class StreamFileDocSpec extends AkkaSpec(UnboundedMailboxConfig) {
implicit val ec = system.dispatcher implicit val ec: ExecutionContext = system.dispatcher
// silence sysout // silence sysout
def println(s: String) = () def println(s: String) = ()

View file

@ -14,10 +14,11 @@ import akka.util.ByteString
import scala.concurrent.Future import scala.concurrent.Future
import akka.testkit.SocketUtil import akka.testkit.SocketUtil
import scala.concurrent.ExecutionContext
class StreamTcpDocSpec extends AkkaSpec { class StreamTcpDocSpec extends AkkaSpec {
implicit val ec = system.dispatcher implicit val ec: ExecutionContext = system.dispatcher
// silence sysout // silence sysout
def println(s: String) = () def println(s: String) = ()

View file

@ -52,7 +52,7 @@ object Restart extends App {
} }
def onRestartWithBackoffInnerComplete() = { def onRestartWithBackoffInnerComplete(): Unit = {
//#restart-failure-inner-complete //#restart-failure-inner-complete
val finiteSource = Source.tick(1.second, 1.second, "tick").take(3) val finiteSource = Source.tick(1.second, 1.second, "tick").take(3)

View file

@ -27,7 +27,7 @@ object Tick {
case class Response(text: String) case class Response(text: String)
} }
def simple() = { def simple(): Unit = {
// #simple // #simple
Source Source
.tick( .tick(
@ -39,7 +39,7 @@ object Tick {
// #simple // #simple
} }
def pollSomething() = { def pollSomething(): Unit = {
// #poll-actor // #poll-actor
val periodicActorResponse: Source[String, Cancellable] = Source val periodicActorResponse: Source[String, Cancellable] = Source
.tick(1.second, 1.second, "tick") .tick(1.second, 1.second, "tick")

View file

@ -47,7 +47,7 @@ object Zip {
// #zipWithN-simple // #zipWithN-simple
} }
def zipAll() = { def zipAll(): Unit = {
// #zipAll-simple // #zipAll-simple
val numbers = Source(1 :: 2 :: 3 :: 4 :: Nil) val numbers = Source(1 :: 2 :: 3 :: 4 :: Nil)
val letters = Source("a" :: "b" :: "c" :: Nil) val letters = Source("a" :: "b" :: "c" :: Nil)

View file

@ -25,7 +25,7 @@ import scala.util.Random
* *
*/ */
object ExtrapolateAndExpandMain extends App { object ExtrapolateAndExpandMain extends App {
implicit val sys = ActorSystem("25fps-stream") implicit val sys: ActorSystem = ActorSystem("25fps-stream")
videoAt25Fps.map(_.pixels.utf8String).map(frame => s"$nowInSeconds - $frame").to(Sink.foreach(println)).run() videoAt25Fps.map(_.pixels.utf8String).map(frame => s"$nowInSeconds - $frame").to(Sink.foreach(println)).run()
} }

View file

@ -17,7 +17,7 @@ object Fold extends App {
} }
//#histogram //#histogram
implicit val sys = ActorSystem() implicit val sys: ActorSystem = ActorSystem()
//#fold //#fold
Source(1 to 150).fold(Histogram())((acc, n) => acc.add(n)).runForeach(println) Source(1 to 150).fold(Histogram())((acc, n) => acc.add(n)).runForeach(println)

View file

@ -7,7 +7,7 @@ import akka.actor.ActorSystem
import akka.stream.scaladsl.Source import akka.stream.scaladsl.Source
object MergeLatest extends App { object MergeLatest extends App {
implicit val system = ActorSystem() implicit val system: ActorSystem = ActorSystem()
//#mergeLatest //#mergeLatest
val prices = Source(List(100, 101, 99, 103)) val prices = Source(List(100, 101, 99, 103))

View file

@ -17,7 +17,7 @@ import scala.concurrent.duration._
*/ */
object Throttle extends App { object Throttle extends App {
implicit val sys = ActorSystem("25fps-stream") implicit val sys: ActorSystem = ActorSystem("25fps-stream")
val frameSource: Source[Int, NotUsed] = val frameSource: Source[Int, NotUsed] =
Source.fromIterator(() => Iterator.from(0)) Source.fromIterator(() => Iterator.from(0))

View file

@ -76,7 +76,7 @@ class MockedChild extends Actor {
} }
class ParentChildSpec extends AnyWordSpec with Matchers with TestKitBase with BeforeAndAfterAll { class ParentChildSpec extends AnyWordSpec with Matchers with TestKitBase with BeforeAndAfterAll {
implicit lazy val system = ActorSystem("ParentChildSpec") implicit lazy val system: ActorSystem = ActorSystem("ParentChildSpec")
override def afterAll(): Unit = { override def afterAll(): Unit = {
TestKit.shutdownActorSystem(system) TestKit.shutdownActorSystem(system)

View file

@ -342,7 +342,7 @@ class TestKitDocSpec extends AkkaSpec with DefaultTimeout with ImplicitSender {
import akka.testkit.TestKitBase import akka.testkit.TestKitBase
class MyTest extends TestKitBase { class MyTest extends TestKitBase {
implicit lazy val system = ActorSystem() implicit lazy val system: ActorSystem = ActorSystem()
//#put-your-test-code-here //#put-your-test-code-here
val probe = TestProbe() val probe = TestProbe()

View file

@ -49,8 +49,8 @@ private[akka] class SnapshotAfter(config: Config) extends Extension {
* sequence number should trigger auto snapshot or not * sequence number should trigger auto snapshot or not
*/ */
val isSnapshotAfterSeqNo: Long => Boolean = snapshotAfterValue match { val isSnapshotAfterSeqNo: Long => Boolean = snapshotAfterValue match {
case Some(snapShotAfterValue) => seqNo: Long => seqNo % snapShotAfterValue == 0 case Some(snapShotAfterValue) => (seqNo: Long) => seqNo % snapShotAfterValue == 0
case None => _: Long => false //always false, if snapshotAfter is not specified in config case None => (_: Long) => false //always false, if snapshotAfter is not specified in config
} }
} }
@ -380,7 +380,7 @@ object PersistentFSM {
stopReason: Option[Reason] = None, stopReason: Option[Reason] = None,
replies: List[Any] = Nil, replies: List[Any] = Nil,
domainEvents: Seq[E] = Nil, domainEvents: Seq[E] = Nil,
afterTransitionDo: D => Unit = { _: D => afterTransitionDo: D => Unit = { (_: D) =>
})(private[akka] val notifies: Boolean = true) { })(private[akka] val notifies: Boolean = true) {
/** /**

View file

@ -42,7 +42,7 @@ class InputStreamSourceSpec extends StreamSpec(UnboundedMailboxConfig) {
StreamConverters StreamConverters
.fromInputStream(() => inputStreamFor(Array(1, 2, 3))) .fromInputStream(() => inputStreamFor(Array(1, 2, 3)))
.toMat(Sink.ignore)(Keep.left) .toMat(Sink.ignore)(Keep.left)
.run .run()
.futureValue shouldEqual IOResult(3, Success(Done)) .futureValue shouldEqual IOResult(3, Success(Done))
} }
@ -90,7 +90,7 @@ class InputStreamSourceSpec extends StreamSpec(UnboundedMailboxConfig) {
.fromInputStream(() => inputStreamFor(Array.fill(100)(1)), 1) .fromInputStream(() => inputStreamFor(Array.fill(100)(1)), 1)
.take(1) // stream is not completely read .take(1) // stream is not completely read
.toMat(Sink.ignore)(Keep.left) .toMat(Sink.ignore)(Keep.left)
.run .run()
.futureValue .futureValue
f.status shouldEqual Success(Done) f.status shouldEqual Success(Done)

View file

@ -5,7 +5,7 @@
package akka.stream.scaladsl package akka.stream.scaladsl
import akka.stream.testkit.scaladsl.StreamTestKit._ import akka.stream.testkit.scaladsl.StreamTestKit._
import akka.stream.{ ActorMaterializer, ActorMaterializerSettings } import akka.stream.{ ActorMaterializer, ActorMaterializerSettings, Materializer }
import akka.stream.testkit.{ StreamSpec, TestSubscriber } import akka.stream.testkit.{ StreamSpec, TestSubscriber }
import com.github.ghik.silencer.silent import com.github.ghik.silencer.silent
@ -19,7 +19,7 @@ class FlowZipWithIndexSpec extends StreamSpec {
//#zip-with-index //#zip-with-index
val settings = ActorMaterializerSettings(system).withInputBuffer(initialSize = 2, maxSize = 16) val settings = ActorMaterializerSettings(system).withInputBuffer(initialSize = 2, maxSize = 16)
implicit val materializer = ActorMaterializer(settings) implicit val materializer: Materializer = ActorMaterializer(settings)
"A ZipWithIndex for Flow " must { "A ZipWithIndex for Flow " must {

View file

@ -40,7 +40,7 @@ class SinkForeachAsyncSpec extends StreamSpec {
val latch = (1 to 4).map(_ -> TestLatch(1)).toMap val latch = (1 to 4).map(_ -> TestLatch(1)).toMap
val sink: Sink[Int, Future[Done]] = { val sink: Sink[Int, Future[Done]] = {
Sink.foreachAsync(4) { n: Int => Sink.foreachAsync(4) { (n: Int) =>
Future { Future {
Await.result(latch(n), remainingOrDefault) Await.result(latch(n), remainingOrDefault)
probe.ref ! n probe.ref ! n

View file

@ -194,7 +194,7 @@ object StageActorRefSpec {
val p: Promise[Int] = Promise() val p: Promise[Int] = Promise()
val logic = new GraphStageLogic(shape) { val logic = new GraphStageLogic(shape) {
implicit def self = stageActor.ref // must be a `def`; we want self to be the sender for our replies implicit def self: ActorRef = stageActor.ref // must be a `def`; we want self to be the sender for our replies
var sum: Int = 0 var sum: Int = 0
override def preStart(): Unit = { override def preStart(): Unit = {

View file

@ -37,7 +37,7 @@ class StreamConvertersSpec extends StreamSpec with DefaultTimeout {
import scala.compat.java8.FunctionConverters._ import scala.compat.java8.FunctionConverters._
def javaStreamInts = def javaStreamInts =
IntStream.iterate(1, { i: Int => IntStream.iterate(1, { (i: Int) =>
i + 1 i + 1
}.asJava) }.asJava)
@ -70,7 +70,7 @@ class StreamConvertersSpec extends StreamSpec with DefaultTimeout {
"work with a filtered stream" in { "work with a filtered stream" in {
StreamConverters StreamConverters
.fromJavaStream(() => .fromJavaStream(() =>
javaStreamInts.filter({ i: Int => javaStreamInts.filter({ (i: Int) =>
i % 2 == 0 i % 2 == 0
}.asJava)) }.asJava))
.take(1000) .take(1000)

View file

@ -15,6 +15,7 @@ import akka.stream._
import akka.stream.ActorAttributes.SupervisionStrategy import akka.stream.ActorAttributes.SupervisionStrategy
import akka.stream.impl.Stages.DefaultAttributes import akka.stream.impl.Stages.DefaultAttributes
import akka.stream.stage._ import akka.stream.stage._
import scala.concurrent.ExecutionContext
/** /**
* INTERNAL API * INTERNAL API
@ -30,7 +31,7 @@ import akka.stream.stage._
def createLogic(inheritedAttributes: Attributes) = new GraphStageLogic(shape) with OutHandler { def createLogic(inheritedAttributes: Attributes) = new GraphStageLogic(shape) with OutHandler {
lazy val decider = inheritedAttributes.mandatoryAttribute[SupervisionStrategy].decider lazy val decider = inheritedAttributes.mandatoryAttribute[SupervisionStrategy].decider
private implicit def ec = materializer.executionContext private implicit def ec: ExecutionContext = materializer.executionContext
private var state: Option[S] = None private var state: Option[S] = None
private val createdCallback = getAsyncCallback[Try[S]] { private val createdCallback = getAsyncCallback[Try[S]] {

View file

@ -58,7 +58,7 @@ object Source {
* with an empty Optional. * with an empty Optional.
*/ */
def maybe[T]: Source[T, CompletableFuture[Optional[T]]] = { def maybe[T]: Source[T, CompletableFuture[Optional[T]]] = {
new Source(scaladsl.Source.maybe[T].mapMaterializedValue { scalaOptionPromise: Promise[Option[T]] => new Source(scaladsl.Source.maybe[T].mapMaterializedValue { (scalaOptionPromise: Promise[Option[T]]) =>
val javaOptionPromise = new CompletableFuture[Optional[T]]() val javaOptionPromise = new CompletableFuture[Optional[T]]()
scalaOptionPromise.completeWith( scalaOptionPromise.completeWith(
javaOptionPromise.toScala.map(_.asScala)(akka.dispatch.ExecutionContexts.parasitic)) javaOptionPromise.toScala.map(_.asScala)(akka.dispatch.ExecutionContexts.parasitic))

View file

@ -84,7 +84,7 @@ object TLS {
def theSslConfig(system: ActorSystem): AkkaSSLConfig = def theSslConfig(system: ActorSystem): AkkaSSLConfig =
sslConfig.getOrElse(AkkaSSLConfig(system)) sslConfig.getOrElse(AkkaSSLConfig(system))
val createSSLEngine = { system: ActorSystem => val createSSLEngine = { (system: ActorSystem) =>
val config = theSslConfig(system) val config = theSslConfig(system)
val engine = hostInfo match { val engine = hostInfo match {

View file

@ -180,7 +180,7 @@ final class Tcp(system: ExtendedActorSystem) extends akka.actor.Extension {
halfClose: Boolean = false, halfClose: Boolean = false,
idleTimeout: Duration = Duration.Inf)(implicit m: Materializer): Future[ServerBinding] = { idleTimeout: Duration = Duration.Inf)(implicit m: Materializer): Future[ServerBinding] = {
bind(interface, port, backlog, options, halfClose, idleTimeout) bind(interface, port, backlog, options, halfClose, idleTimeout)
.to(Sink.foreach { conn: IncomingConnection => .to(Sink.foreach { (conn: IncomingConnection) =>
conn.flow.join(handler).run() conn.flow.join(handler).run()
}) })
.run() .run()
@ -469,7 +469,7 @@ final class Tcp(system: ExtendedActorSystem) extends akka.actor.Extension {
verifySession: SSLSession => Try[Unit], verifySession: SSLSession => Try[Unit],
closing: TLSClosing)(implicit m: Materializer): Future[ServerBinding] = { closing: TLSClosing)(implicit m: Materializer): Future[ServerBinding] = {
bindWithTls(interface, port, createSSLEngine, backlog, options, idleTimeout, verifySession, closing) bindWithTls(interface, port, createSSLEngine, backlog, options, idleTimeout, verifySession, closing)
.to(Sink.foreach { conn: IncomingConnection => .to(Sink.foreach { (conn: IncomingConnection) =>
conn.handleWith(handler) conn.handleWith(handler)
}) })
.run() .run()
@ -500,7 +500,7 @@ final class Tcp(system: ExtendedActorSystem) extends akka.actor.Extension {
options: immutable.Traversable[SocketOption] = Nil, options: immutable.Traversable[SocketOption] = Nil,
idleTimeout: Duration = Duration.Inf)(implicit m: Materializer): Future[ServerBinding] = { idleTimeout: Duration = Duration.Inf)(implicit m: Materializer): Future[ServerBinding] = {
bindTls(interface, port, sslContext, negotiateNewSession, backlog, options, idleTimeout) bindTls(interface, port, sslContext, negotiateNewSession, backlog, options, idleTimeout)
.to(Sink.foreach { conn: IncomingConnection => .to(Sink.foreach { (conn: IncomingConnection) =>
conn.handleWith(handler) conn.handleWith(handler)
}) })
.run() .run()

View file

@ -1,5 +1,7 @@
import akka.{ AutomaticModuleName, CopyrightHeaderForBuild, Paradox, ScalafixIgnoreFilePlugin } import akka.{ AutomaticModuleName, CopyrightHeaderForBuild, Paradox, ScalafixIgnoreFilePlugin }
ThisBuild / scalafixScalaBinaryVersion := scalaBinaryVersion.value
enablePlugins( enablePlugins(
UnidocRoot, UnidocRoot,
UnidocWithPrValidation, UnidocWithPrValidation,
@ -29,11 +31,11 @@ addCommandAlias("applyCodeStyle", "headerCreateAll; javafmtAll; scalafmtAll")
addCommandAlias( addCommandAlias(
name = "fixall", name = "fixall",
value = value =
";scalafixEnable; compile:scalafix; test:scalafix; multi-jvm:scalafix; scalafmtAll; test:compile; multi-jvm:compile; reload") ";scalafixEnable; scalafixAll; scalafmtAll; test:compile; multi-jvm:compile; reload")
addCommandAlias( addCommandAlias(
name = "sortImports", name = "sortImports",
value = ";scalafixEnable; compile:scalafix SortImports; test:scalafix SortImports; scalafmtAll") value = ";scalafixEnable; scalafixAll SortImports; scalafmtAll")
import akka.AkkaBuild._ import akka.AkkaBuild._
import akka.{ AkkaBuild, Dependencies, OSGi, Protobuf, SigarLoader, VersionGenerator } import akka.{ AkkaBuild, Dependencies, OSGi, Protobuf, SigarLoader, VersionGenerator }
@ -248,7 +250,7 @@ lazy val docs = akkaModule("akka-docs")
StreamOperatorsIndexGenerator, StreamOperatorsIndexGenerator,
Jdk9) Jdk9)
.disablePlugins(MimaPlugin, WhiteSourcePlugin) .disablePlugins(MimaPlugin, WhiteSourcePlugin)
.disablePlugins(ScalafixPlugin) .disablePlugins((if (ScalafixSupport.fixTestScope) Nil else Seq(ScalafixPlugin)): _*)
lazy val jackson = akkaModule("akka-serialization-jackson") lazy val jackson = akkaModule("akka-serialization-jackson")
.dependsOn( .dependsOn(
@ -332,6 +334,7 @@ lazy val protobuf = akkaModule("akka-protobuf")
.settings(AutomaticModuleName.settings("akka.protobuf")) .settings(AutomaticModuleName.settings("akka.protobuf"))
.enablePlugins(ScaladocNoVerificationOfDiagrams) .enablePlugins(ScaladocNoVerificationOfDiagrams)
.disablePlugins(MimaPlugin) .disablePlugins(MimaPlugin)
.settings(autoScalaLibrary := false) // Pure java project
lazy val protobufV3 = akkaModule("akka-protobuf-v3") lazy val protobufV3 = akkaModule("akka-protobuf-v3")
.settings(OSGi.protobufV3) .settings(OSGi.protobufV3)

View file

@ -65,10 +65,10 @@ object AkkaDisciplinePlugin extends AutoPlugin {
lazy val silencerSettings = { lazy val silencerSettings = {
val silencerVersion = "1.7.1" val silencerVersion = "1.7.1"
Seq( val libs = Seq(
libraryDependencies ++= Seq( compilerPlugin(("com.github.ghik" %% "silencer-plugin" % silencerVersion).cross(CrossVersion.patch)),
compilerPlugin(("com.github.ghik" %% "silencer-plugin" % silencerVersion).cross(CrossVersion.patch)), ("com.github.ghik" %% "silencer-lib" % silencerVersion % Provided).cross(CrossVersion.patch))
("com.github.ghik" %% "silencer-lib" % silencerVersion % Provided).cross(CrossVersion.patch))) Seq(libraryDependencies ++= (if (autoScalaLibrary.value) libs else Nil))
} }
lazy val disciplineSettings = lazy val disciplineSettings =

View file

@ -4,10 +4,10 @@
package akka package akka
import sbt.{AutoPlugin, PluginTrigger, Plugins, ScalafixSupport} import sbt.{ AutoPlugin, PluginTrigger, Plugins, ScalafixSupport }
import scalafix.sbt.ScalafixPlugin import scalafix.sbt.ScalafixPlugin
object ScalaFixExtraRulesPlugin extends AutoPlugin with ScalafixSupport{ object ScalaFixExtraRulesPlugin extends AutoPlugin with ScalafixSupport {
override def trigger: PluginTrigger = allRequirements override def trigger: PluginTrigger = allRequirements
override def requires: Plugins = ScalafixPlugin override def requires: Plugins = ScalafixPlugin
@ -15,6 +15,10 @@ object ScalaFixExtraRulesPlugin extends AutoPlugin with ScalafixSupport{
import sbt._ import sbt._
import scalafix.sbt.ScalafixPlugin.autoImport.scalafixDependencies import scalafix.sbt.ScalafixPlugin.autoImport.scalafixDependencies
override def projectSettings: Seq[Def.Setting[_]] = super.projectSettings ++ { override def projectSettings: Seq[Def.Setting[_]] = super.projectSettings ++ {
scalafixDependencies in ThisBuild += "com.nequissimus" %% "sort-imports" % "0.5.5" scalafixDependencies in ThisBuild ++= Seq(
"com.nequissimus" %% "sort-imports" % "0.5.5",
// https://github.com/ohze/scala-rewrites
// an extended version of https://github.com/scala/scala-rewrites
"com.sandinh" %% "scala-rewrites" % "0.1.10-sd")
} }
} }

View file

@ -9,7 +9,7 @@ import scalafix.sbt.ScalafixPlugin
object ScalaFixForJdk9Plugin extends AutoPlugin with ScalafixSupport { object ScalaFixForJdk9Plugin extends AutoPlugin with ScalafixSupport {
override def trigger: PluginTrigger = allRequirements override def trigger: PluginTrigger = allRequirements
import Jdk9._ import Jdk9._
override def requires: Plugins = Jdk9 override def requires: Plugins = Jdk9 && ScalafixPlugin
import ScalafixPlugin.autoImport.scalafixConfigSettings import ScalafixPlugin.autoImport.scalafixConfigSettings
import sbt._ import sbt._
@ -23,9 +23,9 @@ object ScalaFixForJdk9Plugin extends AutoPlugin with ScalafixSupport {
scalafixIgnoredSetting ++ Seq( scalafixIgnoredSetting ++ Seq(
updateProjectCommands( updateProjectCommands(
alias = "fixall", alias = "fixall",
value = ";scalafixEnable;compile:scalafix;test:scalafix;multi-jvm:scalafix;scalafmtAll;test:compile;multi-jvm:compile;reload"), value = ";scalafixEnable;scalafixAll;scalafmtAll;test:compile;multi-jvm:compile;reload"),
updateProjectCommands( updateProjectCommands(
alias = "sortImports", alias = "sortImports",
value = ";scalafixEnable;compile:scalafix SortImports;test:scalafix SortImports;CompileJdk9:scalafix SortImports;TestJdk9:scalafix SortImports;scalafmtAll") value = ";scalafixEnable;scalafixAll SortImports;scalafmtAll")
) )
} }

View file

@ -6,12 +6,13 @@ package akka
import com.typesafe.sbt.MultiJvmPlugin import com.typesafe.sbt.MultiJvmPlugin
import sbt.{AutoPlugin, Def, PluginTrigger, Plugins, ScalafixSupport, Setting, inConfig} import sbt.{AutoPlugin, Def, PluginTrigger, Plugins, ScalafixSupport, Setting, inConfig}
import scalafix.sbt.ScalafixPlugin
import scalafix.sbt.ScalafixPlugin.autoImport.scalafixConfigSettings import scalafix.sbt.ScalafixPlugin.autoImport.scalafixConfigSettings
object ScalafixForMultiNodePlugin extends AutoPlugin with ScalafixSupport { object ScalafixForMultiNodePlugin extends AutoPlugin with ScalafixSupport {
override def trigger: PluginTrigger = allRequirements override def trigger: PluginTrigger = allRequirements
override def requires: Plugins = MultiNode override def requires: Plugins = MultiNode && ScalafixPlugin
import MultiJvmPlugin.autoImport._ import MultiJvmPlugin.autoImport._
@ -24,9 +25,9 @@ object ScalafixForMultiNodePlugin extends AutoPlugin with ScalafixSupport {
scalafixIgnoredSetting ++ Seq( scalafixIgnoredSetting ++ Seq(
updateProjectCommands( updateProjectCommands(
alias = "fixall", alias = "fixall",
value = ";scalafixEnable;compile:scalafix;test:scalafix;multi-jvm:scalafix;scalafmtAll"), value = ";scalafixEnable;scalafixAll;scalafmtAll"),
updateProjectCommands( updateProjectCommands(
alias = "sortImports", alias = "sortImports",
value = ";scalafixEnable;compile:scalafix SortImports;test:scalafix SortImports;multi-jvm:scalafix SortImports;scalafmtAll") value = ";scalafixEnable;scalafixAll SortImports;scalafmtAll")
) )
} }

View file

@ -5,19 +5,17 @@
package akka package akka
import sbt.plugins.JvmPlugin import sbt.plugins.JvmPlugin
import sbt.{AutoPlugin, PluginTrigger, Plugins, ScalafixSupport} import sbt.{ AutoPlugin, PluginTrigger, Plugins, ScalafixSupport }
import scalafix.sbt.ScalafixPlugin
object ScalafixIgnoreFilePlugin extends AutoPlugin with ScalafixSupport { object ScalafixIgnoreFilePlugin extends AutoPlugin with ScalafixSupport {
override def trigger: PluginTrigger = allRequirements override def trigger: PluginTrigger = allRequirements
override def requires: Plugins = JvmPlugin override def requires: Plugins = JvmPlugin && ScalafixPlugin
import sbt._ import sbt._
lazy val scalafixIgnoredSetting: Seq[Setting[_]] = Seq( lazy val scalafixIgnoredSetting: Seq[Setting[_]] = if (ScalafixSupport.fixTestScope) Nil else Seq(ignore(Test))
ignore(Test)
)
override def projectSettings: Seq[Def.Setting[_]] = scalafixIgnoredSetting ++ Seq( override def projectSettings: Seq[Def.Setting[_]] =
addProjectCommandsIfAbsent( scalafixIgnoredSetting ++ Seq(
alias = "fix", addProjectCommandsIfAbsent(alias = "fixall", value = ";scalafixEnable;scalafixAll;test:compile;reload"))
value = ";scalafixEnable;compile:scalafix;test:scalafix;test:compile;reload"))
} }

View file

@ -16,14 +16,13 @@ trait ScalafixSupport {
import scalafix.sbt.ScalafixPlugin.autoImport._ import scalafix.sbt.ScalafixPlugin.autoImport._
unmanagedSources.in(configKey, scalafix) := { unmanagedSources.in(configKey, scalafix) := {
val ignoreSupport = new ProjectFileIgnoreSupport((baseDirectory in ThisBuild).value / ignoreConfigFileName, descriptor) val ignoreSupport =
new ProjectFileIgnoreSupport((baseDirectory in ThisBuild).value / ignoreConfigFileName, descriptor)
unmanagedSources.in(configKey, scalafix).value unmanagedSources.in(configKey, scalafix).value.filterNot(file => ignoreSupport.isIgnoredByFileOrPackages(file))
.filterNot(file => ignoreSupport.isIgnoredByFileOrPackages(file))
} }
} }
import sbt.Keys._ import sbt.Keys._
def addProjectCommandsIfAbsent(alias: String, value: String): Def.Setting[Seq[Command]] = { def addProjectCommandsIfAbsent(alias: String, value: String): Def.Setting[Seq[Command]] = {
@ -35,10 +34,7 @@ trait ScalafixSupport {
if (isPresent) if (isPresent)
commands.value commands.value
else else
commands.value :+ BasicCommands.newAlias( commands.value :+ BasicCommands.newAlias(name = alias, value = value)
name = alias,
value = value
)
} }
} }
@ -46,11 +42,12 @@ trait ScalafixSupport {
commands := { commands := {
commands.value.filterNot({ commands.value.filterNot({
case command: SimpleCommand => command.name == alias case command: SimpleCommand => command.name == alias
case _ => false case _ => false
}) :+ BasicCommands.newAlias( }) :+ BasicCommands.newAlias(name = alias, value = value)
name = alias,
value = value
)
} }
} }
} }
object ScalafixSupport {
def fixTestScope: Boolean = System.getProperty("akka.scalafix.fixTestScope", "false").toBoolean
}