Deprecated build symbols replaced
This commit is contained in:
parent
39199b877a
commit
fca07bb976
8 changed files with 80 additions and 64 deletions
|
|
@ -24,7 +24,7 @@ object Scaladoc extends AutoPlugin {
|
|||
|
||||
override lazy val projectSettings = {
|
||||
inTask(doc)(Seq(
|
||||
scalacOptions in Compile <++= (version, baseDirectory in ThisBuild) map scaladocOptions,
|
||||
scalacOptions in Compile ++= scaladocOptions(version.value, (baseDirectory in ThisBuild).value),
|
||||
autoAPIMappings := CliOptions.scaladocAutoAPI.get
|
||||
)) ++
|
||||
Seq(validateDiagrams in Compile := true) ++
|
||||
|
|
|
|||
|
|
@ -10,15 +10,15 @@ import com.typesafe.sbt.SbtScalariform.ScalariformKeys
|
|||
|
||||
object Formatting {
|
||||
lazy val formatSettings = Seq(
|
||||
ScalariformKeys.preferences in Compile <<= formattingPreferences,
|
||||
ScalariformKeys.preferences in Test <<= formattingPreferences,
|
||||
ScalariformKeys.preferences in MultiJvm <<= formattingPreferences
|
||||
ScalariformKeys.preferences in Compile := formattingPreferences.value,
|
||||
ScalariformKeys.preferences in Test := formattingPreferences.value,
|
||||
ScalariformKeys.preferences in MultiJvm := formattingPreferences.value
|
||||
)
|
||||
|
||||
lazy val docFormatSettings = Seq(
|
||||
ScalariformKeys.preferences in Compile <<= docFormattingPreferences,
|
||||
ScalariformKeys.preferences in Test <<= docFormattingPreferences,
|
||||
ScalariformKeys.preferences in MultiJvm <<= docFormattingPreferences
|
||||
ScalariformKeys.preferences in Compile := docFormattingPreferences.value,
|
||||
ScalariformKeys.preferences in Test := docFormattingPreferences.value,
|
||||
ScalariformKeys.preferences in MultiJvm := docFormattingPreferences.value
|
||||
)
|
||||
|
||||
def formattingPreferences = Def.setting {
|
||||
|
|
|
|||
|
|
@ -58,9 +58,9 @@ object MultiNode extends AutoPlugin {
|
|||
inConfig(MultiJvm)(SbtScalariform.configScalariformSettings) ++
|
||||
Seq(
|
||||
jvmOptions in MultiJvm := defaultMultiJvmOptions,
|
||||
compileInputs in(MultiJvm, compile) <<= (compileInputs in(MultiJvm, compile)) dependsOn (ScalariformKeys.format in MultiJvm),
|
||||
scalacOptions in MultiJvm <<= scalacOptions in Test,
|
||||
compile in MultiJvm <<= (compile in MultiJvm) triggeredBy (compile in Test)
|
||||
compileInputs in(MultiJvm, compile) := ((compileInputs in(MultiJvm, compile)) dependsOn (ScalariformKeys.format in MultiJvm)).value,
|
||||
scalacOptions in MultiJvm := (scalacOptions in Test).value,
|
||||
compile in MultiJvm := ((compile in MultiJvm) triggeredBy (compile in Test)).value
|
||||
) ++
|
||||
CliOptions.hostsFileName.map(multiNodeHostsFileName in MultiJvm := _) ++
|
||||
CliOptions.javaName.map(multiNodeJavaName in MultiJvm := _) ++
|
||||
|
|
@ -68,17 +68,18 @@ object MultiNode extends AutoPlugin {
|
|||
(if (multiNodeTestInTest) {
|
||||
// make sure that MultiJvm tests are executed by the default test target,
|
||||
// and combine the results from ordinary test and multi-jvm tests
|
||||
(executeTests in Test <<= (executeTests in Test, multiExecuteTests) map {
|
||||
case (testResults, multiNodeResults) =>
|
||||
val overall =
|
||||
if (testResults.overall.id < multiNodeResults.overall.id)
|
||||
multiNodeResults.overall
|
||||
else
|
||||
testResults.overall
|
||||
Tests.Output(overall,
|
||||
testResults.events ++ multiNodeResults.events,
|
||||
testResults.summaries ++ multiNodeResults.summaries)
|
||||
})
|
||||
(executeTests in Test) := {
|
||||
val testResults = (executeTests in Test).value
|
||||
val multiNodeResults = multiExecuteTests.value
|
||||
val overall =
|
||||
if (testResults.overall.id < multiNodeResults.overall.id)
|
||||
multiNodeResults.overall
|
||||
else
|
||||
testResults.overall
|
||||
Tests.Output(overall,
|
||||
testResults.events ++ multiNodeResults.events,
|
||||
testResults.summaries ++ multiNodeResults.summaries)
|
||||
}
|
||||
} else Nil)
|
||||
}
|
||||
|
||||
|
|
@ -90,7 +91,8 @@ object MultiNodeScalaTest extends AutoPlugin {
|
|||
override def requires = MultiNode
|
||||
|
||||
override lazy val projectSettings = Seq(
|
||||
extraOptions in MultiJvm <<= (sourceDirectory in MultiJvm) { src =>
|
||||
extraOptions in MultiJvm := {
|
||||
val src = (sourceDirectory in MultiJvm).value
|
||||
(name: String) => (src ** (name + ".conf")).get.headOption.map("-Dakka.config=" + _.absolutePath).toSeq
|
||||
},
|
||||
scalatestOptions in MultiJvm := {
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ object OSGi {
|
|||
// in the .../bundles directory which makes testing locally published artifacts
|
||||
// a pain. Create bundles but publish them to the normal .../jars directory.
|
||||
def osgiSettings = defaultOsgiSettings ++ Seq(
|
||||
packagedArtifact in (Compile, packageBin) <<= (artifact in (Compile, packageBin), OsgiKeys.bundle).identityMap,
|
||||
packagedArtifact in (Compile, packageBin) := ((artifact in (Compile, packageBin)).value, OsgiKeys.bundle.value),
|
||||
// This will fail the build instead of accidentally removing classes from the resulting artifact.
|
||||
// Each package contained in a project MUST be known to be private or exported, if it's undecided we MUST resolve this
|
||||
OsgiKeys.failOnUndecidedPackage := true,
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ object Release {
|
|||
val releaseDirectory = SettingKey[File]("release-directory")
|
||||
|
||||
lazy val settings: Seq[Setting[_]] = commandSettings ++ Seq(
|
||||
releaseDirectory <<= crossTarget / "release"
|
||||
releaseDirectory := crossTarget.value / "release"
|
||||
)
|
||||
|
||||
lazy val commandSettings = Seq(
|
||||
|
|
|
|||
|
|
@ -26,26 +26,21 @@ object SigarLoader {
|
|||
/** Sigar agent command line option property. */
|
||||
val sigarFolderProperty = "kamon.sigar.folder"
|
||||
|
||||
def provideSigarOptions = (sigarArtifact, sigarFolder) map { (artifact, folder) =>
|
||||
"-javaagent:" + artifact + "=" + sigarFolderProperty + "=" + folder
|
||||
}
|
||||
|
||||
def locateSigarArtifact = update map { report =>
|
||||
val artifactList = report.matching(
|
||||
moduleFilter(organization = sigarLoader.organization, name = sigarLoader.name)
|
||||
)
|
||||
require(artifactList.size == 1, "Expecting single artifact, while found: " + artifactList)
|
||||
artifactList(0)
|
||||
}
|
||||
|
||||
// TODO remove Sigar form test:test* classpath, it is provided by Sigar agent.
|
||||
lazy val sigarSettings = {
|
||||
Seq(
|
||||
//
|
||||
// Prepare Sigar agent options.
|
||||
sigarArtifact <<= locateSigarArtifact,
|
||||
sigarArtifact := {
|
||||
val report = update.value
|
||||
val artifactList = report.matching(
|
||||
moduleFilter(organization = sigarLoader.organization, name = sigarLoader.name)
|
||||
)
|
||||
require(artifactList.size == 1, "Expecting single artifact, while found: " + artifactList)
|
||||
artifactList.head
|
||||
},
|
||||
sigarFolder := target.value / "native",
|
||||
sigarOptions <<= provideSigarOptions,
|
||||
sigarOptions := "-javaagent:" + sigarArtifact.value + "=" + sigarFolderProperty + "=" + sigarFolder.value,
|
||||
//
|
||||
fork in Test := true
|
||||
) ++ (
|
||||
|
|
|
|||
|
|
@ -14,42 +14,52 @@ object SphinxDoc {
|
|||
|
||||
def akkaSettings = SphinxSupport.settings ++ Seq(
|
||||
// generate online version of docs
|
||||
sphinxInputs in Sphinx <<= sphinxInputs in Sphinx in LocalProject(AkkaBuild.docs.id) map { inputs => inputs.copy(tags = inputs.tags :+ "online") },
|
||||
sphinxInputs in Sphinx := {
|
||||
val inputs = (sphinxInputs in Sphinx in LocalProject(AkkaBuild.docs.id)).value
|
||||
inputs.copy(tags = inputs.tags :+ "online")
|
||||
},
|
||||
// don't regenerate the pdf, just reuse the akka-docs version
|
||||
generatedPdf in Sphinx <<= generatedPdf in Sphinx in LocalProject(AkkaBuild.docs.id) map identity,
|
||||
generatedEpub in Sphinx <<= generatedEpub in Sphinx in LocalProject(AkkaBuild.docs.id) map identity
|
||||
generatedPdf in Sphinx := (generatedPdf in Sphinx in LocalProject(AkkaBuild.docs.id)).value,
|
||||
generatedEpub in Sphinx := (generatedEpub in Sphinx in LocalProject(AkkaBuild.docs.id)).value
|
||||
)
|
||||
|
||||
def docsSettings = Seq(
|
||||
sourceDirectory in Sphinx <<= baseDirectory / "rst",
|
||||
watchSources <++= (sourceDirectory in Sphinx, excludeFilter in Global) map { (source, excl) =>
|
||||
source descendantsExcept ("*.rst", excl) get
|
||||
sourceDirectory in Sphinx := baseDirectory.value / "rst",
|
||||
watchSources ++= {
|
||||
val source = (sourceDirectory in Sphinx).value
|
||||
val excl = (excludeFilter in Global).value
|
||||
source.descendantsExcept("*.rst", excl).get
|
||||
},
|
||||
sphinxPackages in Sphinx <+= baseDirectory { _ / "_sphinx" / "pygments" },
|
||||
sphinxPackages in Sphinx += baseDirectory.value / "_sphinx" / "pygments",
|
||||
// copy akka-contrib/docs into our rst_preprocess/contrib (and apply substitutions)
|
||||
preprocess in Sphinx <<= (preprocess in Sphinx,
|
||||
baseDirectory in AkkaBuild.contrib,
|
||||
target in preprocess in Sphinx,
|
||||
cacheDirectory,
|
||||
preprocessExts in Sphinx,
|
||||
preprocessVars in Sphinx,
|
||||
streams) map { (orig, src, target, cacheDir, exts, vars, s) =>
|
||||
preprocess in Sphinx := {
|
||||
val s = streams.value
|
||||
|
||||
val contribSrc = Map("contribSrc" -> "../../../akka-contrib")
|
||||
simplePreprocess(src / "docs", target / "contrib", cacheDir / "sphinx" / "preprocessed-contrib", exts, vars ++ contribSrc, s.log)
|
||||
orig
|
||||
simplePreprocess(
|
||||
(baseDirectory in AkkaBuild.contrib).value / "docs",
|
||||
(target in preprocess in Sphinx).value / "contrib",
|
||||
s.cacheDirectory / "sphinx" / "preprocessed-contrib",
|
||||
(preprocessExts in Sphinx).value,
|
||||
(preprocessVars in Sphinx).value ++ contribSrc,
|
||||
s.log)
|
||||
|
||||
(preprocess in Sphinx).value
|
||||
},
|
||||
enableOutput in generatePdf in Sphinx := true,
|
||||
enableOutput in generateEpub in Sphinx := true,
|
||||
unmanagedSourceDirectories in Test <<= sourceDirectory in Sphinx apply { _ ** "code" get }
|
||||
unmanagedSourceDirectories in Test := ((sourceDirectory in Sphinx).value ** "code").get
|
||||
)
|
||||
|
||||
// pre-processing settings for sphinx
|
||||
lazy val sphinxPreprocessing = inConfig(Sphinx)(Seq(
|
||||
target in preprocess <<= baseDirectory / "rst_preprocessed",
|
||||
target in preprocess := baseDirectory.value / "rst_preprocessed",
|
||||
preprocessExts := Set("rst", "py"),
|
||||
// customization of sphinx @<key>@ replacements, add to all sphinx-using projects
|
||||
// add additional replacements here
|
||||
preprocessVars <<= (scalaVersion, version) { (s, v) =>
|
||||
preprocessVars := {
|
||||
val s = scalaVersion.value
|
||||
val v = version.value
|
||||
val BinVer = """(\d+\.\d+)\.\d+""".r
|
||||
Map(
|
||||
"version" -> v,
|
||||
|
|
@ -72,11 +82,18 @@ object SphinxDoc {
|
|||
"samples" -> "http://github.com/akka/akka-samples"
|
||||
)
|
||||
},
|
||||
preprocess <<= (sourceDirectory, target in preprocess, cacheDirectory, preprocessExts, preprocessVars, streams) map {
|
||||
(src, target, cacheDir, exts, vars, s) => simplePreprocess(src, target, cacheDir / "sphinx" / "preprocessed", exts, vars, s.log)
|
||||
preprocess := {
|
||||
val s = streams.value
|
||||
simplePreprocess(
|
||||
sourceDirectory.value,
|
||||
(target in preprocess).value,
|
||||
s.cacheDirectory / "sphinx" / "preprocessed",
|
||||
preprocessExts.value,
|
||||
preprocessVars.value,
|
||||
s.log)
|
||||
},
|
||||
sphinxInputs <<= (sphinxInputs, preprocess) map { (inputs, preprocessed) => inputs.copy(src = preprocessed) }
|
||||
sphinxInputs := sphinxInputs.value.copy(src = preprocess.value)
|
||||
)) ++ Seq(
|
||||
cleanFiles <+= target in preprocess in Sphinx
|
||||
cleanFiles += (target in preprocess in Sphinx).value
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -41,15 +41,17 @@ object TestExtras {
|
|||
onlyTestTags := Params.testTagsOnly,
|
||||
|
||||
// add filters for tests excluded by name
|
||||
testOptions in Test <++= excludeTestNames map { _.toSeq.map(exclude => Tests.Filter(test => !test.contains(exclude))) },
|
||||
testOptions in Test ++= excludeTestNames.value.toSeq.map(exclude => Tests.Filter(test => !test.contains(exclude))),
|
||||
|
||||
// add arguments for tests excluded by tag
|
||||
testOptions in Test <++= excludeTestTags map { tags =>
|
||||
testOptions in Test ++= {
|
||||
val tags = excludeTestTags.value
|
||||
if (tags.isEmpty) Seq.empty else Seq(Tests.Argument("-l", tags.mkString(" ")))
|
||||
},
|
||||
|
||||
// add arguments for running only tests by tag
|
||||
testOptions in Test <++= onlyTestTags map { tags =>
|
||||
testOptions in Test ++= {
|
||||
val tags = onlyTestTags.value
|
||||
if (tags.isEmpty) Seq.empty else Seq(Tests.Argument("-n", tags.mkString(" ")))
|
||||
}
|
||||
)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue