Deprecated build symbols replaced

This commit is contained in:
Johan Andrén 2017-03-15 14:36:25 +01:00
parent 39199b877a
commit fca07bb976
8 changed files with 80 additions and 64 deletions

View file

@ -24,7 +24,7 @@ object Scaladoc extends AutoPlugin {
override lazy val projectSettings = { override lazy val projectSettings = {
inTask(doc)(Seq( inTask(doc)(Seq(
scalacOptions in Compile <++= (version, baseDirectory in ThisBuild) map scaladocOptions, scalacOptions in Compile ++= scaladocOptions(version.value, (baseDirectory in ThisBuild).value),
autoAPIMappings := CliOptions.scaladocAutoAPI.get autoAPIMappings := CliOptions.scaladocAutoAPI.get
)) ++ )) ++
Seq(validateDiagrams in Compile := true) ++ Seq(validateDiagrams in Compile := true) ++

View file

@ -10,15 +10,15 @@ import com.typesafe.sbt.SbtScalariform.ScalariformKeys
object Formatting { object Formatting {
lazy val formatSettings = Seq( lazy val formatSettings = Seq(
ScalariformKeys.preferences in Compile <<= formattingPreferences, ScalariformKeys.preferences in Compile := formattingPreferences.value,
ScalariformKeys.preferences in Test <<= formattingPreferences, ScalariformKeys.preferences in Test := formattingPreferences.value,
ScalariformKeys.preferences in MultiJvm <<= formattingPreferences ScalariformKeys.preferences in MultiJvm := formattingPreferences.value
) )
lazy val docFormatSettings = Seq( lazy val docFormatSettings = Seq(
ScalariformKeys.preferences in Compile <<= docFormattingPreferences, ScalariformKeys.preferences in Compile := docFormattingPreferences.value,
ScalariformKeys.preferences in Test <<= docFormattingPreferences, ScalariformKeys.preferences in Test := docFormattingPreferences.value,
ScalariformKeys.preferences in MultiJvm <<= docFormattingPreferences ScalariformKeys.preferences in MultiJvm := docFormattingPreferences.value
) )
def formattingPreferences = Def.setting { def formattingPreferences = Def.setting {

View file

@ -58,9 +58,9 @@ object MultiNode extends AutoPlugin {
inConfig(MultiJvm)(SbtScalariform.configScalariformSettings) ++ inConfig(MultiJvm)(SbtScalariform.configScalariformSettings) ++
Seq( Seq(
jvmOptions in MultiJvm := defaultMultiJvmOptions, jvmOptions in MultiJvm := defaultMultiJvmOptions,
compileInputs in(MultiJvm, compile) <<= (compileInputs in(MultiJvm, compile)) dependsOn (ScalariformKeys.format in MultiJvm), compileInputs in(MultiJvm, compile) := ((compileInputs in(MultiJvm, compile)) dependsOn (ScalariformKeys.format in MultiJvm)).value,
scalacOptions in MultiJvm <<= scalacOptions in Test, scalacOptions in MultiJvm := (scalacOptions in Test).value,
compile in MultiJvm <<= (compile in MultiJvm) triggeredBy (compile in Test) compile in MultiJvm := ((compile in MultiJvm) triggeredBy (compile in Test)).value
) ++ ) ++
CliOptions.hostsFileName.map(multiNodeHostsFileName in MultiJvm := _) ++ CliOptions.hostsFileName.map(multiNodeHostsFileName in MultiJvm := _) ++
CliOptions.javaName.map(multiNodeJavaName in MultiJvm := _) ++ CliOptions.javaName.map(multiNodeJavaName in MultiJvm := _) ++
@ -68,17 +68,18 @@ object MultiNode extends AutoPlugin {
(if (multiNodeTestInTest) { (if (multiNodeTestInTest) {
// make sure that MultiJvm tests are executed by the default test target, // make sure that MultiJvm tests are executed by the default test target,
// and combine the results from ordinary test and multi-jvm tests // and combine the results from ordinary test and multi-jvm tests
(executeTests in Test <<= (executeTests in Test, multiExecuteTests) map { (executeTests in Test) := {
case (testResults, multiNodeResults) => val testResults = (executeTests in Test).value
val overall = val multiNodeResults = multiExecuteTests.value
if (testResults.overall.id < multiNodeResults.overall.id) val overall =
multiNodeResults.overall if (testResults.overall.id < multiNodeResults.overall.id)
else multiNodeResults.overall
testResults.overall else
Tests.Output(overall, testResults.overall
testResults.events ++ multiNodeResults.events, Tests.Output(overall,
testResults.summaries ++ multiNodeResults.summaries) testResults.events ++ multiNodeResults.events,
}) testResults.summaries ++ multiNodeResults.summaries)
}
} else Nil) } else Nil)
} }
@ -90,7 +91,8 @@ object MultiNodeScalaTest extends AutoPlugin {
override def requires = MultiNode override def requires = MultiNode
override lazy val projectSettings = Seq( override lazy val projectSettings = Seq(
extraOptions in MultiJvm <<= (sourceDirectory in MultiJvm) { src => extraOptions in MultiJvm := {
val src = (sourceDirectory in MultiJvm).value
(name: String) => (src ** (name + ".conf")).get.headOption.map("-Dakka.config=" + _.absolutePath).toSeq (name: String) => (src ** (name + ".conf")).get.headOption.map("-Dakka.config=" + _.absolutePath).toSeq
}, },
scalatestOptions in MultiJvm := { scalatestOptions in MultiJvm := {

View file

@ -15,7 +15,7 @@ object OSGi {
// in the .../bundles directory which makes testing locally published artifacts // in the .../bundles directory which makes testing locally published artifacts
// a pain. Create bundles but publish them to the normal .../jars directory. // a pain. Create bundles but publish them to the normal .../jars directory.
def osgiSettings = defaultOsgiSettings ++ Seq( def osgiSettings = defaultOsgiSettings ++ Seq(
packagedArtifact in (Compile, packageBin) <<= (artifact in (Compile, packageBin), OsgiKeys.bundle).identityMap, packagedArtifact in (Compile, packageBin) := ((artifact in (Compile, packageBin)).value, OsgiKeys.bundle.value),
// This will fail the build instead of accidentally removing classes from the resulting artifact. // This will fail the build instead of accidentally removing classes from the resulting artifact.
// Each package contained in a project MUST be known to be private or exported, if it's undecided we MUST resolve this // Each package contained in a project MUST be known to be private or exported, if it's undecided we MUST resolve this
OsgiKeys.failOnUndecidedPackage := true, OsgiKeys.failOnUndecidedPackage := true,

View file

@ -14,7 +14,7 @@ object Release {
val releaseDirectory = SettingKey[File]("release-directory") val releaseDirectory = SettingKey[File]("release-directory")
lazy val settings: Seq[Setting[_]] = commandSettings ++ Seq( lazy val settings: Seq[Setting[_]] = commandSettings ++ Seq(
releaseDirectory <<= crossTarget / "release" releaseDirectory := crossTarget.value / "release"
) )
lazy val commandSettings = Seq( lazy val commandSettings = Seq(

View file

@ -26,26 +26,21 @@ object SigarLoader {
/** Sigar agent command line option property. */ /** Sigar agent command line option property. */
val sigarFolderProperty = "kamon.sigar.folder" val sigarFolderProperty = "kamon.sigar.folder"
def provideSigarOptions = (sigarArtifact, sigarFolder) map { (artifact, folder) =>
"-javaagent:" + artifact + "=" + sigarFolderProperty + "=" + folder
}
def locateSigarArtifact = update map { report =>
val artifactList = report.matching(
moduleFilter(organization = sigarLoader.organization, name = sigarLoader.name)
)
require(artifactList.size == 1, "Expecting single artifact, while found: " + artifactList)
artifactList(0)
}
// TODO remove Sigar form test:test* classpath, it is provided by Sigar agent. // TODO remove Sigar form test:test* classpath, it is provided by Sigar agent.
lazy val sigarSettings = { lazy val sigarSettings = {
Seq( Seq(
// //
// Prepare Sigar agent options. // Prepare Sigar agent options.
sigarArtifact <<= locateSigarArtifact, sigarArtifact := {
val report = update.value
val artifactList = report.matching(
moduleFilter(organization = sigarLoader.organization, name = sigarLoader.name)
)
require(artifactList.size == 1, "Expecting single artifact, while found: " + artifactList)
artifactList.head
},
sigarFolder := target.value / "native", sigarFolder := target.value / "native",
sigarOptions <<= provideSigarOptions, sigarOptions := "-javaagent:" + sigarArtifact.value + "=" + sigarFolderProperty + "=" + sigarFolder.value,
// //
fork in Test := true fork in Test := true
) ++ ( ) ++ (

View file

@ -14,42 +14,52 @@ object SphinxDoc {
def akkaSettings = SphinxSupport.settings ++ Seq( def akkaSettings = SphinxSupport.settings ++ Seq(
// generate online version of docs // generate online version of docs
sphinxInputs in Sphinx <<= sphinxInputs in Sphinx in LocalProject(AkkaBuild.docs.id) map { inputs => inputs.copy(tags = inputs.tags :+ "online") }, sphinxInputs in Sphinx := {
val inputs = (sphinxInputs in Sphinx in LocalProject(AkkaBuild.docs.id)).value
inputs.copy(tags = inputs.tags :+ "online")
},
// don't regenerate the pdf, just reuse the akka-docs version // don't regenerate the pdf, just reuse the akka-docs version
generatedPdf in Sphinx <<= generatedPdf in Sphinx in LocalProject(AkkaBuild.docs.id) map identity, generatedPdf in Sphinx := (generatedPdf in Sphinx in LocalProject(AkkaBuild.docs.id)).value,
generatedEpub in Sphinx <<= generatedEpub in Sphinx in LocalProject(AkkaBuild.docs.id) map identity generatedEpub in Sphinx := (generatedEpub in Sphinx in LocalProject(AkkaBuild.docs.id)).value
) )
def docsSettings = Seq( def docsSettings = Seq(
sourceDirectory in Sphinx <<= baseDirectory / "rst", sourceDirectory in Sphinx := baseDirectory.value / "rst",
watchSources <++= (sourceDirectory in Sphinx, excludeFilter in Global) map { (source, excl) => watchSources ++= {
source descendantsExcept ("*.rst", excl) get val source = (sourceDirectory in Sphinx).value
val excl = (excludeFilter in Global).value
source.descendantsExcept("*.rst", excl).get
}, },
sphinxPackages in Sphinx <+= baseDirectory { _ / "_sphinx" / "pygments" }, sphinxPackages in Sphinx += baseDirectory.value / "_sphinx" / "pygments",
// copy akka-contrib/docs into our rst_preprocess/contrib (and apply substitutions) // copy akka-contrib/docs into our rst_preprocess/contrib (and apply substitutions)
preprocess in Sphinx <<= (preprocess in Sphinx, preprocess in Sphinx := {
baseDirectory in AkkaBuild.contrib, val s = streams.value
target in preprocess in Sphinx,
cacheDirectory,
preprocessExts in Sphinx,
preprocessVars in Sphinx,
streams) map { (orig, src, target, cacheDir, exts, vars, s) =>
val contribSrc = Map("contribSrc" -> "../../../akka-contrib") val contribSrc = Map("contribSrc" -> "../../../akka-contrib")
simplePreprocess(src / "docs", target / "contrib", cacheDir / "sphinx" / "preprocessed-contrib", exts, vars ++ contribSrc, s.log) simplePreprocess(
orig (baseDirectory in AkkaBuild.contrib).value / "docs",
(target in preprocess in Sphinx).value / "contrib",
s.cacheDirectory / "sphinx" / "preprocessed-contrib",
(preprocessExts in Sphinx).value,
(preprocessVars in Sphinx).value ++ contribSrc,
s.log)
(preprocess in Sphinx).value
}, },
enableOutput in generatePdf in Sphinx := true, enableOutput in generatePdf in Sphinx := true,
enableOutput in generateEpub in Sphinx := true, enableOutput in generateEpub in Sphinx := true,
unmanagedSourceDirectories in Test <<= sourceDirectory in Sphinx apply { _ ** "code" get } unmanagedSourceDirectories in Test := ((sourceDirectory in Sphinx).value ** "code").get
) )
// pre-processing settings for sphinx // pre-processing settings for sphinx
lazy val sphinxPreprocessing = inConfig(Sphinx)(Seq( lazy val sphinxPreprocessing = inConfig(Sphinx)(Seq(
target in preprocess <<= baseDirectory / "rst_preprocessed", target in preprocess := baseDirectory.value / "rst_preprocessed",
preprocessExts := Set("rst", "py"), preprocessExts := Set("rst", "py"),
// customization of sphinx @<key>@ replacements, add to all sphinx-using projects // customization of sphinx @<key>@ replacements, add to all sphinx-using projects
// add additional replacements here // add additional replacements here
preprocessVars <<= (scalaVersion, version) { (s, v) => preprocessVars := {
val s = scalaVersion.value
val v = version.value
val BinVer = """(\d+\.\d+)\.\d+""".r val BinVer = """(\d+\.\d+)\.\d+""".r
Map( Map(
"version" -> v, "version" -> v,
@ -72,11 +82,18 @@ object SphinxDoc {
"samples" -> "http://github.com/akka/akka-samples" "samples" -> "http://github.com/akka/akka-samples"
) )
}, },
preprocess <<= (sourceDirectory, target in preprocess, cacheDirectory, preprocessExts, preprocessVars, streams) map { preprocess := {
(src, target, cacheDir, exts, vars, s) => simplePreprocess(src, target, cacheDir / "sphinx" / "preprocessed", exts, vars, s.log) val s = streams.value
simplePreprocess(
sourceDirectory.value,
(target in preprocess).value,
s.cacheDirectory / "sphinx" / "preprocessed",
preprocessExts.value,
preprocessVars.value,
s.log)
}, },
sphinxInputs <<= (sphinxInputs, preprocess) map { (inputs, preprocessed) => inputs.copy(src = preprocessed) } sphinxInputs := sphinxInputs.value.copy(src = preprocess.value)
)) ++ Seq( )) ++ Seq(
cleanFiles <+= target in preprocess in Sphinx cleanFiles += (target in preprocess in Sphinx).value
) )
} }

View file

@ -41,15 +41,17 @@ object TestExtras {
onlyTestTags := Params.testTagsOnly, onlyTestTags := Params.testTagsOnly,
// add filters for tests excluded by name // add filters for tests excluded by name
testOptions in Test <++= excludeTestNames map { _.toSeq.map(exclude => Tests.Filter(test => !test.contains(exclude))) }, testOptions in Test ++= excludeTestNames.value.toSeq.map(exclude => Tests.Filter(test => !test.contains(exclude))),
// add arguments for tests excluded by tag // add arguments for tests excluded by tag
testOptions in Test <++= excludeTestTags map { tags => testOptions in Test ++= {
val tags = excludeTestTags.value
if (tags.isEmpty) Seq.empty else Seq(Tests.Argument("-l", tags.mkString(" "))) if (tags.isEmpty) Seq.empty else Seq(Tests.Argument("-l", tags.mkString(" ")))
}, },
// add arguments for running only tests by tag // add arguments for running only tests by tag
testOptions in Test <++= onlyTestTags map { tags => testOptions in Test ++= {
val tags = onlyTestTags.value
if (tags.isEmpty) Seq.empty else Seq(Tests.Argument("-n", tags.mkString(" "))) if (tags.isEmpty) Seq.empty else Seq(Tests.Argument("-n", tags.mkString(" ")))
} }
) )