Merge pull request #1791 from akka/wip-3448-update-to-sbt-0.13-ban

=pro #3448 Upgrade to sbt 0.13.0
This commit is contained in:
Björn Antonsson 2013-11-04 05:19:56 -08:00
commit 577aed548a
28 changed files with 109 additions and 80 deletions

View file

@ -131,7 +131,7 @@ class ActorLifeCycleSpec extends AkkaSpec("akka.actor.serialize-messages=off") w
case class Become(recv: ActorContext Receive)
val a = system.actorOf(Props(new Actor {
def receive = {
case Become(beh) context.become(beh(context), discardOld = false); sender ! "ok"
case Become(beh) { context.become(beh(context), discardOld = false); sender ! "ok" }
case x sender ! 42
}
}))

View file

@ -201,8 +201,13 @@ object FSMTimingSpec {
case Initial -> TestSingleTimerResubmit setTimer("blah", Tick, 500.millis.dilated, false)
}
when(TestSingleTimerResubmit) {
case Event(Tick, _) tester ! Tick; setTimer("blah", Tock, 500.millis.dilated, false); stay()
case Event(Tock, _) tester ! Tock; goto(Initial)
case Event(Tick, _)
tester ! Tick
setTimer("blah", Tock, 500.millis.dilated, false)
stay()
case Event(Tock, _)
tester ! Tock
goto(Initial)
}
when(TestCancelTimer) {
case Event(Tick, _)

View file

@ -86,7 +86,7 @@ class DispatcherActorSpec extends AkkaSpec(DispatcherActorSpec.config) with Defa
val slowOne = system.actorOf(
Props(new Actor {
def receive = {
case "hogexecutor" sender ! "OK"; start.await
case "hogexecutor" { sender ! "OK"; start.await }
case "ping" if (works.get) latch.countDown()
}
}).withDispatcher(throughputDispatcher))
@ -120,8 +120,8 @@ class DispatcherActorSpec extends AkkaSpec(DispatcherActorSpec.config) with Defa
val slowOne = system.actorOf(
Props(new Actor {
def receive = {
case "hogexecutor" ready.countDown(); start.await
case "ping" works.set(false); context.stop(self)
case "hogexecutor" { ready.countDown(); start.await }
case "ping" { works.set(false); context.stop(self) }
}
}).withDispatcher(throughputDispatcher))

View file

@ -44,7 +44,7 @@ object EventStreamSpec {
bus.subscribe(context.self, classOf[SetTarget])
bus.subscribe(context.self, classOf[UnhandledMessage])
sender ! Logging.LoggerInitialized
case SetTarget(ref) dst = ref; dst ! "OK"
case SetTarget(ref) { dst = ref; dst ! "OK" }
case e: Logging.LogEvent dst ! e
case u: UnhandledMessage dst ! u
}

View file

@ -170,8 +170,8 @@ private[akka] trait FaultHandling { this: ActorCell ⇒
suspendNonRecursive()
// suspend children
val skip: Set[ActorRef] = currentMessage match {
case Envelope(Failed(_, _, _), child) setFailed(child); Set(child)
case _ setFailed(self); Set.empty
case Envelope(Failed(_, _, _), child) { setFailed(child); Set(child) }
case _ { setFailed(self); Set.empty }
}
suspendChildren(exceptFor = skip ++ childrenNotToSuspend)
t match {

View file

@ -94,7 +94,7 @@ class SslTlsSupport(engine: SSLEngine) extends PipelineStage[HasLogging, Command
// to shutdown the connection when getting CLOSED in encrypt
closeEngine()
case x: Tcp.WriteCommand =>
case x: Tcp.WriteCommand
throw new IllegalArgumentException(
"SslTlsSupport doesn't support Tcp.WriteCommands of type " + x.getClass.getSimpleName)

View file

@ -370,8 +370,8 @@ object Tcp extends ExtensionId[TcpExt] with ExtensionIdProvider {
def next(): SimpleWriteCommand =
current match {
case null Iterator.empty.next()
case CompoundWrite(h, t) current = t; h
case x: SimpleWriteCommand current = null; x
case CompoundWrite(h, t) { current = t; h }
case x: SimpleWriteCommand { current = null; x }
}
}
}

View file

@ -50,11 +50,11 @@ private[io] class TcpManager(tcp: TcpExt)
def receive = workerForCommandHandler {
case c: Connect
val commander = sender // cache because we create a function that will run asyncly
registry Props(classOf[TcpOutgoingConnection], tcp, registry, commander, c)
(registry Props(classOf[TcpOutgoingConnection], tcp, registry, commander, c))
case b: Bind
val commander = sender // cache because we create a function that will run asyncly
registry Props(classOf[TcpListener], selectorPool, tcp, registry, commander, b)
(registry Props(classOf[TcpListener], selectorPool, tcp, registry, commander, b))
}
}

View file

@ -48,11 +48,11 @@ private[io] class UdpManager(udp: UdpExt) extends SelectionHandler.SelectorBased
def receive = workerForCommandHandler {
case b: Bind
val commander = sender // cache because we create a function that will run asyncly
registry Props(classOf[UdpListener], udp, registry, commander, b)
(registry Props(classOf[UdpListener], udp, registry, commander, b))
case SimpleSender(options)
val commander = sender // cache because we create a function that will run asyncly
registry Props(classOf[UdpSender], udp, registry, commander, options)
(registry Props(classOf[UdpSender], udp, registry, commander, options))
}
}

View file

@ -27,8 +27,11 @@ object DataflowSpec {
class TestDelayActor(await: TestLatch) extends Actor {
def receive = {
case "Hello" Await.ready(await, TestLatch.DefaultTimeout); sender ! "World"
case "NoReply" Await.ready(await, TestLatch.DefaultTimeout)
case "Hello"
Await.ready(await, TestLatch.DefaultTimeout)
sender ! "World"
case "NoReply"
Await.ready(await, TestLatch.DefaultTimeout)
case "Failure"
Await.ready(await, TestLatch.DefaultTimeout)
sender ! Status.Failure(new RuntimeException("Expected exception; to test fault-tolerance"))

View file

@ -62,4 +62,4 @@ akka {
}
}
}
}
}

View file

@ -90,7 +90,7 @@ class Channel private (_channelId: Option[String]) extends Actor with Stash {
}
private val buffering: Actor.Receive = {
case DeliveredResolved | DeliveredUnresolved context.unbecome(); unstashAll() // TODO: optimize
case DeliveredResolved | DeliveredUnresolved { context.unbecome(); unstashAll() } // TODO: optimize
case _: Deliver stash()
}
@ -232,9 +232,9 @@ private trait ResolvedDelivery extends Actor {
def receive = {
case DeliverResolved context.actorSelection(path) ! Identify(1)
case ActorIdentity(1, Some(ref)) onResolveSuccess(ref); shutdown(DeliveredResolved)
case ActorIdentity(1, None) onResolveFailure(); shutdown(DeliveredUnresolved)
case ReceiveTimeout onResolveFailure(); shutdown(DeliveredUnresolved)
case ActorIdentity(1, Some(ref)) { onResolveSuccess(ref); shutdown(DeliveredResolved) }
case ActorIdentity(1, None) { onResolveFailure(); shutdown(DeliveredUnresolved) }
case ReceiveTimeout { onResolveFailure(); shutdown(DeliveredUnresolved) }
}
def shutdown(message: Any) {

View file

@ -50,10 +50,18 @@ private[persistence] class InmemStore extends Actor {
var messages = Map.empty[String, Vector[PersistentImpl]]
def receive = {
case Write(p) add(p); success()
case WriteBatch(pb) pb.foreach(add); success()
case Delete(p) update(p.processorId, p.sequenceNr)(_.copy(deleted = true)); success()
case Confirm(pid, snr, cid) update(pid, snr)(p p.copy(confirms = cid +: p.confirms)); success()
case Write(p)
add(p)
success()
case WriteBatch(pb)
pb.foreach(add)
success()
case Delete(p)
update(p.processorId, p.sequenceNr)(_.copy(deleted = true))
success()
case Confirm(pid, snr, cid)
update(pid, snr)(p p.copy(confirms = cid +: p.confirms))
success()
case Replay(pid, fromSnr, toSnr, callback) {
for {
ms messages.get(pid)

View file

@ -14,9 +14,9 @@ object ProcessorStashSpec {
var state: List[String] = Nil
val behaviorA: Actor.Receive = {
case Persistent("a", snr) update("a", snr); context.become(behaviorB)
case Persistent("a", snr) { update("a", snr); context.become(behaviorB) }
case Persistent("b", snr) update("b", snr)
case Persistent("c", snr) update("c", snr); unstashAll()
case Persistent("c", snr) { update("c", snr); unstashAll() }
case "x" update("x")
case "boom" throw new TestException("boom")
case Persistent("boom", _) throw new TestException("boom")
@ -24,7 +24,7 @@ object ProcessorStashSpec {
}
val behaviorB: Actor.Receive = {
case Persistent("b", _) stash(); context.become(behaviorA)
case Persistent("b", _) { stash(); context.become(behaviorA) }
case "x" stash()
}

View file

@ -8,7 +8,7 @@ import scala.concurrent.duration._
import com.typesafe.config.ConfigFactory
import akka.actor._
import akka.remote.testconductor.RoleName
import akka.remote.transport.ThrottlerTransportAdapter.{ForceDisassociate, Direction}
import akka.remote.transport.ThrottlerTransportAdapter.{ ForceDisassociate, Direction }
import akka.remote.testkit.MultiNodeConfig
import akka.remote.testkit.MultiNodeSpec
import akka.remote.testkit.STMultiNodeSpec
@ -35,8 +35,8 @@ object RemoteNodeShutdownAndComesBackSpec extends MultiNodeConfig {
class Subject extends Actor {
def receive = {
case "shutdown" => context.system.shutdown()
case msg sender ! msg
case "shutdown" context.system.shutdown()
case msg sender ! msg
}
}
@ -47,7 +47,7 @@ class RemoteNodeShutdownAndComesBackMultiJvmNode2 extends RemoteNodeShutdownAndC
abstract class RemoteNodeShutdownAndComesBackSpec
extends MultiNodeSpec(RemoteNodeShutdownAndComesBackSpec)
with STMultiNodeSpec with ImplicitSender {
with STMultiNodeSpec with ImplicitSender {
import RemoteNodeShutdownAndComesBackSpec._
@ -137,7 +137,6 @@ abstract class RemoteNodeShutdownAndComesBackSpec
""").withFallback(system.settings.config))
freshSystem.actorOf(Props[Subject], "subject")
freshSystem.awaitTermination(30.seconds)
}

View file

@ -5,7 +5,7 @@ package akka.remote.testconductor
import language.postfixOps
import com.typesafe.config.ConfigFactory
import akka.actor.{Props, Actor, ActorIdentity, Identify, Deploy}
import akka.actor.{ Props, Actor, ActorIdentity, Identify, Deploy }
import scala.concurrent.Await
import scala.concurrent.Awaitable
import scala.concurrent.duration._

View file

@ -79,8 +79,8 @@ abstract class StatsSampleSingleMasterSpec extends MultiNodeSpec(StatsSampleSing
Cluster(system) join firstAddress
receiveN(3).collect { case MemberUp(m) => m.address }.toSet must be (
Set(firstAddress, secondAddress, thirdAddress))
receiveN(3).collect { case MemberUp(m) m.address }.toSet must be(
Set(firstAddress, secondAddress, thirdAddress))
Cluster(system).unsubscribe(testActor)

View file

@ -96,8 +96,8 @@ abstract class StatsSampleSpec extends MultiNodeSpec(StatsSampleSpecConfig)
system.actorOf(Props[StatsWorker], "statsWorker")
system.actorOf(Props[StatsService], "statsService")
receiveN(3).collect { case MemberUp(m) => m.address }.toSet must be (
Set(firstAddress, secondAddress, thirdAddress))
receiveN(3).collect { case MemberUp(m) m.address }.toSet must be(
Set(firstAddress, secondAddress, thirdAddress))
Cluster(system).unsubscribe(testActor)

View file

@ -81,8 +81,8 @@ abstract class StatsSampleJapiSpec extends MultiNodeSpec(StatsSampleJapiSpecConf
system.actorOf(Props[StatsWorker], "statsWorker")
system.actorOf(Props[StatsService], "statsService")
receiveN(3).collect { case MemberUp(m) => m.address }.toSet must be (
Set(firstAddress, secondAddress, thirdAddress))
receiveN(3).collect { case MemberUp(m) m.address }.toSet must be(
Set(firstAddress, secondAddress, thirdAddress))
Cluster(system).unsubscribe(testActor)

View file

@ -78,13 +78,13 @@ abstract class StatsSampleSingleMasterJapiSpec extends MultiNodeSpec(StatsSample
Cluster(system) join firstAddress
receiveN(3).collect { case MemberUp(m) => m.address }.toSet must be (
Set(firstAddress, secondAddress, thirdAddress))
receiveN(3).collect { case MemberUp(m) m.address }.toSet must be(
Set(firstAddress, secondAddress, thirdAddress))
Cluster(system).unsubscribe(testActor)
system.actorOf(ClusterSingletonManager.defaultProps(
Props[StatsService],
Props[StatsService],
singletonName = "statsService",
terminationMessage = PoisonPill,
role = null), name = "singleton")

View file

@ -17,7 +17,7 @@ object MultiNodeSampleConfig extends MultiNodeConfig {
//#spec
import akka.remote.testkit.MultiNodeSpec
import akka.testkit.ImplicitSender
import akka.actor.{Props, Actor}
import akka.actor.{ Props, Actor }
class MultiNodeSampleSpecMultiJvmNode1 extends MultiNodeSample
class MultiNodeSampleSpecMultiJvmNode2 extends MultiNodeSample
@ -46,7 +46,7 @@ class MultiNodeSample extends MultiNodeSpec(MultiNodeSampleConfig)
runOn(node2) {
system.actorOf(Props(new Actor {
def receive = {
case "ping" => sender ! "pong"
case "ping" sender ! "pong"
}
}), "ponger")
enterBarrier("deployed")

View file

@ -20,7 +20,7 @@ object SnapshotExample extends App {
case Persistent(s, snr) state = state.update(s"${s}-${snr}")
case SaveSnapshotSuccess(metadata) // ...
case SaveSnapshotFailure(metadata, reason) // ...
case SnapshotOffer(_, s: ExampleState) println("offered state = " + s); state = s
case SnapshotOffer(_, s: ExampleState) { println("offered state = " + s); state = s }
case "print" println("current state = " + state)
case "snap" saveSnapshot(state)
}

View file

@ -6,10 +6,10 @@ package akka.sbt
import sbt._
import sbt.Keys._
import sbt.Load.BuildStructure
import sbt.BuildStructure
import sbt.classpath.ClasspathUtilities
import sbt.Project.Initialize
import sbt.CommandSupport._
import sbt.Def.Initialize
import sbt.CommandUtil._
import java.io.File
object AkkaKernelPlugin extends Plugin {
@ -51,7 +51,7 @@ object AkkaKernelPlugin extends Plugin {
distClean <<= distCleanTask,
dependencyClasspath <<= (dependencyClasspath in Runtime),
unmanagedResourceDirectories <<= (unmanagedResourceDirectories in Runtime),
outputDirectory <<= target / "dist",
outputDirectory <<= target { t t / "dist" },
configSourceDirs <<= defaultConfigSourceDirs,
distJvmOptions := "-Xms1024M -Xmx1024M -Xss1M -XX:MaxPermSize=256M -XX:+UseParallelGC",
distMainClass := "akka.kernel.Main",

View file

@ -63,7 +63,7 @@ private[zeromq] class ConcurrentSocketActor(params: immutable.Seq[SocketOption])
}
private def handleConnectOption(msg: SocketConnectOption): Unit = msg match {
case Connect(endpoint) socket.connect(endpoint); notifyListener(Connecting)
case Connect(endpoint) { socket.connect(endpoint); notifyListener(Connecting) }
case Bind(endpoint) socket.bind(endpoint)
}

View file

@ -10,7 +10,7 @@ import com.typesafe.sbt.SbtMultiJvm
import com.typesafe.sbt.SbtMultiJvm.MultiJvmKeys.{ MultiJvm, extraOptions, jvmOptions, scalatestOptions, multiNodeExecuteTests, multiNodeJavaName, multiNodeHostsFileName, multiNodeTargetDirName, multiTestOptions }
import com.typesafe.sbt.SbtScalariform
import com.typesafe.sbt.SbtScalariform.ScalariformKeys
import com.typesafe.sbtosgi.OsgiPlugin.{ OsgiKeys, osgiSettings }
import com.typesafe.sbt.osgi.SbtOsgi.{ OsgiKeys, osgiSettings }
import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings
import com.typesafe.tools.mima.plugin.MimaKeys.previousArtifact
import com.typesafe.tools.mima.plugin.MimaKeys.reportBinaryIssues
@ -20,7 +20,6 @@ import com.typesafe.sbt.site.SphinxSupport.{ enableOutput, generatePdf, generate
import com.typesafe.sbt.preprocess.Preprocess.{ preprocess, preprocessExts, preprocessVars, simplePreprocess }
import ls.Plugin.{ lsSettings, LsKeys }
import java.lang.Boolean.getBoolean
import sbt.Tests
import LsKeys.{ lsync, docsUrl => lsDocsUrl, tags => lsTags }
import java.io.{PrintWriter, InputStreamReader, FileInputStream, File}
import java.nio.charset.Charset
@ -427,7 +426,7 @@ object AkkaBuild extends Build {
publishMavenStyle := false, // SBT Plugins should be published as Ivy
publishTo <<= Publish.akkaPluginPublishTo,
scalacOptions in Compile := Seq("-encoding", "UTF-8", "-deprecation", "-unchecked"),
scalaVersion := "2.9.2",
scalaVersion := "2.10.2",
scalaBinaryVersion <<= scalaVersion,
reportBinaryIssues := () // disable bin comp check
)
@ -792,6 +791,9 @@ object AkkaBuild extends Build {
// show full stack traces and test case durations
testOptions in Test += Tests.Argument("-oDF"),
// don't save test output to a file
testListeners in (Test, test) := Seq(TestLogger(streams.value.log, {_ => streams.value.log }, logBuffered.value)),
validatePullRequestTask,
validatePullRequest <<= validatePullRequest.dependsOn(/* reportBinaryIssues */)
)
@ -849,9 +851,9 @@ object AkkaBuild extends Build {
.setPreference(AlignSingleLineCaseStatements, true)
}
lazy val multiJvmSettings = SbtMultiJvm.multiJvmSettings ++ inConfig(MultiJvm)(SbtScalariform.scalariformSettings) ++ Seq(
lazy val multiJvmSettings = SbtMultiJvm.multiJvmSettings ++ inConfig(MultiJvm)(SbtScalariform.configScalariformSettings) ++ Seq(
jvmOptions in MultiJvm := defaultMultiJvmOptions,
compileInputs in MultiJvm <<= (compileInputs in MultiJvm) dependsOn (ScalariformKeys.format in MultiJvm),
compileInputs in (MultiJvm, compile) <<= (compileInputs in (MultiJvm, compile)) dependsOn (ScalariformKeys.format in MultiJvm),
compile in MultiJvm <<= (compile in MultiJvm) triggeredBy (compile in Test),
ScalariformKeys.preferences in MultiJvm := formattingPreferences) ++
Option(System.getProperty("akka.test.multi-node.hostsFileName")).map(x => Seq(multiNodeHostsFileName in MultiJvm := x)).getOrElse(Seq.empty) ++
@ -859,16 +861,28 @@ object AkkaBuild extends Build {
Option(System.getProperty("akka.test.multi-node.targetDirName")).map(x => Seq(multiNodeTargetDirName in MultiJvm := x)).getOrElse(Seq.empty) ++
((executeMultiJvmTests, multiNodeEnabled) match {
case (true, true) =>
executeTests in Test <<= ((executeTests in Test), (multiNodeExecuteTests in MultiJvm)) map {
case ((_, testResults), (_, multiNodeResults)) =>
val results = testResults ++ multiNodeResults
(Tests.overall(results.values), results)
executeTests in Test <<= (executeTests in Test, multiNodeExecuteTests in MultiJvm) map {
case (testResults, multiNodeResults) =>
val overall =
if (testResults.overall.id < multiNodeResults.overall.id)
multiNodeResults.overall
else
testResults.overall
Tests.Output(overall,
testResults.events ++ multiNodeResults.events,
testResults.summaries ++ multiNodeResults.summaries)
}
case (true, false) =>
executeTests in Test <<= ((executeTests in Test), (executeTests in MultiJvm)) map {
case ((_, testResults), (_, multiNodeResults)) =>
val results = testResults ++ multiNodeResults
(Tests.overall(results.values), results)
executeTests in Test <<= (executeTests in Test, executeTests in MultiJvm) map {
case (testResults, multiNodeResults) =>
val overall =
if (testResults.overall.id < multiNodeResults.overall.id)
multiNodeResults.overall
else
testResults.overall
Tests.Output(overall,
testResults.events ++ multiNodeResults.events,
testResults.summaries ++ multiNodeResults.summaries)
}
case (false, _) => Seq.empty
})

View file

@ -67,19 +67,19 @@ object Publish {
}
def akkaPublishTo: Initialize[Option[Resolver]] = {
(defaultPublishTo, version) { (default, v) =>
(defaultPublishTo, version) { (defaultPT, v) =>
akkaPublishRepository orElse
sonatypeRepo(v) orElse
Some(Resolver.file("Default Local Repository", default))
Some(Resolver.file("Default Local Repository", defaultPT))
}
}
def akkaPluginPublishTo: Initialize[Option[Resolver]] = {
(defaultPublishTo, version) { (default, version) =>
pluginPublishLocally(default) orElse
(defaultPublishTo, version) { (defaultPT, version) =>
pluginPublishLocally(defaultPT) orElse
akkaPublishRepository orElse
pluginRepo(version) orElse
Some(Resolver.file("Default Local Repository", default))
Some(Resolver.file("Default Local Repository", defaultPT))
}
}
@ -103,9 +103,9 @@ object Publish {
def akkaCredentials: Seq[Credentials] =
Option(System.getProperty("akka.publish.credentials", null)) map (f => Credentials(new File(f))) toSeq
def pluginPublishLocally(default: File): Option[Resolver] =
def pluginPublishLocally(defaultPT: File): Option[Resolver] =
Option(sys.props("publish.plugin.locally")) collect { case pl if pl.toLowerCase == "true" =>
Resolver.file("Default Local Repository", default)
Resolver.file("Default Local Repository", defaultPT)
}
// timestamped versions

View file

@ -1 +1 @@
sbt.version=0.12.4
sbt.version=0.13.0

View file

@ -6,16 +6,16 @@ resolvers += Classpaths.typesafeResolver
addSbtPlugin("com.typesafe.sbt" % "sbt-multi-jvm" % "0.3.8")
//#sbt-multi-jvm
addSbtPlugin("com.typesafe.sbt" % "sbt-scalariform" % "1.0.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-scalariform" % "1.2.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "0.6.2")
addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "0.7.1")
addSbtPlugin("com.typesafe.sbtosgi" % "sbtosgi" % "0.3.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-osgi" % "0.6.0")
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.3")
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.6")
addSbtPlugin("me.lessis" % "ls-sbt" % "0.1.2")
addSbtPlugin("me.lessis" % "ls-sbt" % "0.1.3")
addSbtPlugin("com.typesafe.sbt" % "sbt-pgp" % "0.8")
addSbtPlugin("com.typesafe.sbt" % "sbt-pgp" % "0.8.1")
addSbtPlugin("com.typesafe.sbt" % "sbt-s3" % "0.5")