=pro #15031 use sbt-unidoc plugin

This commit is contained in:
Martynas Mickevicius 2014-04-25 16:32:43 +02:00
parent d84d583585
commit 7b35ded34e
5 changed files with 46 additions and 123 deletions

View file

@ -24,8 +24,9 @@ import java.io.{PrintWriter, InputStreamReader, FileInputStream, File}
import java.nio.charset.Charset import java.nio.charset.Charset
import java.util.Properties import java.util.Properties
import annotation.tailrec import annotation.tailrec
import Unidoc.{ JavaDoc, javadocSettings, junidocSources, sunidoc, unidocExclude } import sbtunidoc.Plugin.{ ScalaUnidoc, JavaUnidoc, scalaJavaUnidocSettings, genjavadocSettings, scalaUnidocSettings }
import TestExtras. { JUnitFileReporting, StatsDMetrics } import sbtunidoc.Plugin.UnidocKeys.{ unidoc, unidocProjectFilter }
import TestExtras.{ JUnitFileReporting, StatsDMetrics }
import com.typesafe.sbt.S3Plugin.{ S3, s3Settings } import com.typesafe.sbt.S3Plugin.{ S3, s3Settings }
object AkkaBuild extends Build { object AkkaBuild extends Build {
@ -49,17 +50,15 @@ object AkkaBuild extends Build {
lazy val akka = Project( lazy val akka = Project(
id = "akka", id = "akka",
base = file("."), base = file("."),
settings = parentSettings ++ Release.settings ++ Unidoc.settings ++ Publish.versionSettings ++ settings = parentSettings ++ Release.settings ++ unidocSettings ++ Publish.versionSettings ++
SphinxSupport.settings ++ Dist.settings ++ s3Settings ++ mimaSettings ++ unidocScaladocSettings ++ SphinxSupport.settings ++ Dist.settings ++ s3Settings ++ mimaSettings ++ scaladocSettings ++
StatsDMetrics.settings ++ StatsDMetrics.settings ++ Protobuf.settings ++ inTask(unidoc)(Seq(
Protobuf.settings ++ inConfig(JavaDoc)(Defaults.configSettings) ++ Seq( unidocProjectFilter in ScalaUnidoc := docProjectFilter,
unidocProjectFilter in JavaUnidoc := docProjectFilter,
apiMappings in ScalaUnidoc := (apiMappings in (Compile, doc)).value
)) ++ Seq(
parallelExecution in GlobalScope := System.getProperty("akka.parallelExecution", "false").toBoolean, parallelExecution in GlobalScope := System.getProperty("akka.parallelExecution", "false").toBoolean,
Publish.defaultPublishTo in ThisBuild <<= crossTarget / "repository", Publish.defaultPublishTo in ThisBuild <<= crossTarget / "repository",
unidocExclude := Seq(samples.id, remoteTests.id),
sources in JavaDoc <<= junidocSources,
javacOptions in JavaDoc := Seq(),
artifactName in packageDoc in JavaDoc := ((sv, mod, art) => "" + mod.name + "_" + sv.binary + "-" + mod.revision + "-javadoc.jar"),
packageDoc in Compile <<= packageDoc in JavaDoc,
Dist.distExclude := Seq(actorTests.id, docs.id, samples.id, osgi.id), Dist.distExclude := Seq(actorTests.id, docs.id, samples.id, osgi.id),
// generate online version of docs // generate online version of docs
sphinxInputs in Sphinx <<= sphinxInputs in Sphinx in LocalProject(docs.id) map { inputs => inputs.copy(tags = inputs.tags :+ "online") }, sphinxInputs in Sphinx <<= sphinxInputs in Sphinx in LocalProject(docs.id) map { inputs => inputs.copy(tags = inputs.tags :+ "online") },
@ -71,11 +70,11 @@ object AkkaBuild extends Build {
S3.progress in S3.upload := true, S3.progress in S3.upload := true,
mappings in S3.upload <<= (Release.releaseDirectory, version) map { (d, v) => mappings in S3.upload <<= (Release.releaseDirectory, version) map { (d, v) =>
val downloads = d / "downloads" val downloads = d / "downloads"
val archivesPathFinder = (downloads * ("*" + v + ".zip")) +++ (downloads * ("*" + v + ".tgz")) val archivesPathFinder = (downloads * ("*" + v + ".zip")) +++ (downloads * ("*" + v + ".tgz"))
archivesPathFinder.get.map(file => (file -> ("akka/" + file.getName))) archivesPathFinder.get.map(file => (file -> ("akka/" + file.getName)))
}, },
// add reportBinaryIssues to validatePullRequest on minor version maintenance branch // add reportBinaryIssues to validatePullRequest on minor version maintenance branch
validatePullRequest <<= (Unidoc.unidoc, SphinxSupport.generate in Sphinx in docs) map { (_, _) => } validatePullRequest <<= (unidoc in Compile, SphinxSupport.generate in Sphinx in docs) map { (_, _) => }
), ),
aggregate = Seq(actor, testkit, actorTests, remote, remoteTests, camel, cluster, slf4j, agent, aggregate = Seq(actor, testkit, actorTests, remote, remoteTests, camel, cluster, slf4j, agent,
persistence, zeroMQ, kernel, osgi, docs, contrib, samples, multiNodeTestkit) persistence, zeroMQ, kernel, osgi, docs, contrib, samples, multiNodeTestkit)
@ -288,7 +287,7 @@ object AkkaBuild extends Build {
id = "akka-samples", id = "akka-samples",
base = file("akka-samples"), base = file("akka-samples"),
settings = parentSettings ++ ActivatorDist.settings, settings = parentSettings ++ ActivatorDist.settings,
aggregate = Seq(camelSampleJava, camelSampleScala, mainSampleJava, mainSampleScala, aggregate = Seq(camelSampleJava, camelSampleScala, mainSampleJava, mainSampleScala,
remoteSampleJava, remoteSampleScala, clusterSampleJava, clusterSampleScala, remoteSampleJava, remoteSampleScala, clusterSampleJava, clusterSampleScala,
fsmSampleScala, persistenceSampleJava, persistenceSampleScala, fsmSampleScala, persistenceSampleJava, persistenceSampleScala,
multiNodeSampleScala, helloKernelSample, osgiDiningHakkersSample) multiNodeSampleScala, helloKernelSample, osgiDiningHakkersSample)
@ -300,7 +299,7 @@ object AkkaBuild extends Build {
dependencies = Seq(actor, camel), dependencies = Seq(actor, camel),
settings = sampleSettings ++ Seq(libraryDependencies ++= Dependencies.camelSample) settings = sampleSettings ++ Seq(libraryDependencies ++= Dependencies.camelSample)
) )
lazy val camelSampleScala = Project( lazy val camelSampleScala = Project(
id = "akka-sample-camel-scala", id = "akka-sample-camel-scala",
base = file("akka-samples/akka-sample-camel-scala"), base = file("akka-samples/akka-sample-camel-scala"),
@ -321,7 +320,7 @@ object AkkaBuild extends Build {
dependencies = Seq(actor), dependencies = Seq(actor),
settings = sampleSettings settings = sampleSettings
) )
lazy val mainSampleScala = Project( lazy val mainSampleScala = Project(
id = "akka-sample-main-scala", id = "akka-sample-main-scala",
base = file("akka-samples/akka-sample-main-scala"), base = file("akka-samples/akka-sample-main-scala"),
@ -342,7 +341,7 @@ object AkkaBuild extends Build {
dependencies = Seq(actor, remote), dependencies = Seq(actor, remote),
settings = sampleSettings settings = sampleSettings
) )
lazy val remoteSampleScala = Project( lazy val remoteSampleScala = Project(
id = "akka-sample-remote-scala", id = "akka-sample-remote-scala",
base = file("akka-samples/akka-sample-remote-scala"), base = file("akka-samples/akka-sample-remote-scala"),
@ -381,7 +380,7 @@ object AkkaBuild extends Build {
} }
) )
) configs (MultiJvm) ) configs (MultiJvm)
lazy val clusterSampleScala = Project( lazy val clusterSampleScala = Project(
id = "akka-sample-cluster-scala", id = "akka-sample-cluster-scala",
base = file("akka-samples/akka-sample-cluster-scala"), base = file("akka-samples/akka-sample-cluster-scala"),
@ -399,7 +398,7 @@ object AkkaBuild extends Build {
} }
) )
) configs (MultiJvm) ) configs (MultiJvm)
lazy val multiNodeSampleScala = Project( lazy val multiNodeSampleScala = Project(
id = "akka-sample-multi-node-scala", id = "akka-sample-multi-node-scala",
base = file("akka-samples/akka-sample-multi-node-scala"), base = file("akka-samples/akka-sample-multi-node-scala"),
@ -471,7 +470,7 @@ object AkkaBuild extends Build {
}}, }},
// force publication of artifacts to local maven repo // force publication of artifacts to local maven repo
compile in Compile <<= compile in Compile <<=
(publishM2 in actor, publishM2 in testkit, publishM2 in remote, publishM2 in cluster, publishM2 in osgi, (publishM2 in actor, publishM2 in testkit, publishM2 in remote, publishM2 in cluster, publishM2 in osgi,
publishM2 in slf4j, publishM2 in persistence, compile in Compile) map publishM2 in slf4j, publishM2 in persistence, compile in Compile) map
((_, _, _, _, _, _, _, c) => c)) ((_, _, _, _, _, _, _, c) => c))
else Seq.empty else Seq.empty
@ -718,13 +717,12 @@ object AkkaBuild extends Build {
// don't save test output to a file // don't save test output to a file
testListeners in (Test, test) := Seq(TestLogger(streams.value.log, {_ => streams.value.log }, logBuffered.value)), testListeners in (Test, test) := Seq(TestLogger(streams.value.log, {_ => streams.value.log }, logBuffered.value)),
validatePullRequestTask, validatePullRequestTask,
// add reportBinaryIssues to validatePullRequest on minor version maintenance branch // add reportBinaryIssues to validatePullRequest on minor version maintenance branch
validatePullRequest <<= validatePullRequest.dependsOn(reportBinaryIssues) validatePullRequest <<= validatePullRequest.dependsOn(reportBinaryIssues)
) ++ mavenLocalResolverSettings ++ JUnitFileReporting.settings ++ StatsDMetrics.settings
) ++ mavenLocalResolverSettings ++ JUnitFileReporting.settings ++ StatsDMetrics.settings
val validatePullRequest = TaskKey[Unit]("validate-pull-request", "Additional tasks for pull request validation") val validatePullRequest = TaskKey[Unit]("validate-pull-request", "Additional tasks for pull request validation")
// the tasks that to run for validation is defined in defaultSettings // the tasks that to run for validation is defined in defaultSettings
@ -774,7 +772,7 @@ object AkkaBuild extends Build {
ScalariformKeys.preferences in Compile := formattingPreferences, ScalariformKeys.preferences in Compile := formattingPreferences,
ScalariformKeys.preferences in Test := formattingPreferences ScalariformKeys.preferences in Test := formattingPreferences
) )
lazy val docFormatSettings = SbtScalariform.scalariformSettings ++ Seq( lazy val docFormatSettings = SbtScalariform.scalariformSettings ++ Seq(
ScalariformKeys.preferences in Compile := docFormattingPreferences, ScalariformKeys.preferences in Compile := docFormattingPreferences,
ScalariformKeys.preferences in Test := docFormattingPreferences, ScalariformKeys.preferences in Test := docFormattingPreferences,
@ -788,7 +786,7 @@ object AkkaBuild extends Build {
.setPreference(AlignParameters, true) .setPreference(AlignParameters, true)
.setPreference(AlignSingleLineCaseStatements, true) .setPreference(AlignSingleLineCaseStatements, true)
} }
def docFormattingPreferences = { def docFormattingPreferences = {
import scalariform.formatter.preferences._ import scalariform.formatter.preferences._
FormattingPreferences() FormattingPreferences()
@ -833,6 +831,15 @@ object AkkaBuild extends Build {
case (false, _) => Seq.empty case (false, _) => Seq.empty
}) })
val genjavadocEnabled = System.getProperty("akka.genjavadoc.enabled", "false").toBoolean
val (unidocSettings, javadocSettings) =
if (genjavadocEnabled) (scalaJavaUnidocSettings, genjavadocSettings)
else (scalaUnidocSettings, Nil)
val docProjectFilter = inAnyProject --
inAggregates(samples, transitive = true, includeRoot = true) --
inProjects(remoteTests)
lazy val scaladocDiagramsEnabled = System.getProperty("akka.scaladoc.diagrams", "true").toBoolean lazy val scaladocDiagramsEnabled = System.getProperty("akka.scaladoc.diagrams", "true").toBoolean
lazy val scaladocAutoAPI = System.getProperty("akka.scaladoc.autoapi", "true").toBoolean lazy val scaladocAutoAPI = System.getProperty("akka.scaladoc.autoapi", "true").toBoolean
@ -846,7 +853,7 @@ object AkkaBuild extends Build {
scaladocSettingsNoVerificationOfDiagrams ++ scaladocSettingsNoVerificationOfDiagrams ++
(if (scaladocDiagramsEnabled) Seq(doc in Compile ~= scaladocVerifier) else Seq.empty) (if (scaladocDiagramsEnabled) Seq(doc in Compile ~= scaladocVerifier) else Seq.empty)
} }
// for projects with few (one) classes there might not be any diagrams // for projects with few (one) classes there might not be any diagrams
lazy val scaladocSettingsNoVerificationOfDiagrams: Seq[sbt.Setting[_]] = { lazy val scaladocSettingsNoVerificationOfDiagrams: Seq[sbt.Setting[_]] = {
inTask(doc)(Seq( inTask(doc)(Seq(
@ -854,14 +861,6 @@ object AkkaBuild extends Build {
autoAPIMappings := scaladocAutoAPI autoAPIMappings := scaladocAutoAPI
)) ))
} }
lazy val unidocScaladocSettings: Seq[sbt.Setting[_]]= {
inTask(doc)(Seq(
scalacOptions <++= (version, baseDirectory in akka) map scaladocOptions,
autoAPIMappings := scaladocAutoAPI
)) ++
(if (scaladocDiagramsEnabled) Seq(sunidoc ~= scaladocVerifier) else Seq.empty)
}
def scaladocVerifier(file: File): File= { def scaladocVerifier(file: File): File= {
@tailrec @tailrec
@ -892,7 +891,7 @@ object AkkaBuild extends Build {
else else
file file
} }
lazy val mimaIgnoredProblems = { lazy val mimaIgnoredProblems = {
import com.typesafe.tools.mima.core._ import com.typesafe.tools.mima.core._
Seq( Seq(
@ -906,7 +905,7 @@ object AkkaBuild extends Build {
binaryIssueFilters ++= mimaIgnoredProblems binaryIssueFilters ++= mimaIgnoredProblems
) )
def akkaPreviousArtifact(id: String, organization: String = "com.typesafe.akka", version: String = "2.3.0", def akkaPreviousArtifact(id: String, organization: String = "com.typesafe.akka", version: String = "2.3.0",
crossVersion: String = "2.10"): Option[sbt.ModuleID] = crossVersion: String = "2.10"): Option[sbt.ModuleID] =
if (enableMiMa) { if (enableMiMa) {
val fullId = if (crossVersion.isEmpty) id else id + "_" + crossVersion val fullId = if (crossVersion.isEmpty) id else id + "_" + crossVersion
@ -945,8 +944,8 @@ object AkkaBuild extends Build {
OsgiKeys.importPackage := (osgiOptionalImports map optionalResolution) ++ Seq("!sun.misc", scalaImport(), configImport(), "*"), OsgiKeys.importPackage := (osgiOptionalImports map optionalResolution) ++ Seq("!sun.misc", scalaImport(), configImport(), "*"),
// dynamicImportPackage needed for loading classes defined in configuration // dynamicImportPackage needed for loading classes defined in configuration
OsgiKeys.dynamicImportPackage := Seq("*") OsgiKeys.dynamicImportPackage := Seq("*")
) )
val agent = exports(Seq("akka.agent.*")) val agent = exports(Seq("akka.agent.*"))
val camel = exports(Seq("akka.camel.*")) val camel = exports(Seq("akka.camel.*"))
@ -977,7 +976,7 @@ object AkkaBuild extends Build {
// needed because testkit is normally not used in the application bundle, // needed because testkit is normally not used in the application bundle,
// but it should still be included as transitive dependency and used by BundleDelegatingClassLoader // but it should still be included as transitive dependency and used by BundleDelegatingClassLoader
// to be able to find refererence.conf // to be able to find refererence.conf
"akka.testkit", "akka.testkit",
"com.google.protobuf") "com.google.protobuf")
def exports(packages: Seq[String] = Seq(), imports: Seq[String] = Nil) = osgiSettings ++ Seq( def exports(packages: Seq[String] = Seq(), imports: Seq[String] = Nil) = osgiSettings ++ Seq(
@ -1071,7 +1070,7 @@ object Dependencies {
} }
import Compile._ import Compile._
val scalaXmlDepencency = (if (AkkaBuild.requestedScalaVersion.startsWith("2.10")) Nil else Seq(Test.scalaXml)) val scalaXmlDepencency = (if (AkkaBuild.requestedScalaVersion.startsWith("2.10")) Nil else Seq(Test.scalaXml))
val actor = Seq(config) val actor = Seq(config)

View file

@ -6,6 +6,7 @@ import sbt.classpath.ClasspathUtilities
import sbt.Project.Initialize import sbt.Project.Initialize
import java.io.File import java.io.File
import com.typesafe.sbt.site.SphinxSupport.{ generate, Sphinx } import com.typesafe.sbt.site.SphinxSupport.{ generate, Sphinx }
import sbtunidoc.Plugin._
object Dist { object Dist {
case class DistSources(depJars: Seq[File], libJars: Seq[File], srcJars: Seq[File], docJars: Seq[File], api: File, docs: File) case class DistSources(depJars: Seq[File], libJars: Seq[File], srcJars: Seq[File], docJars: Seq[File], api: File, docs: File)
@ -29,7 +30,7 @@ object Dist {
distLibJars <<= (thisProjectRef, buildStructure, distExclude) flatMap aggregated(packageBin.task in Compile), distLibJars <<= (thisProjectRef, buildStructure, distExclude) flatMap aggregated(packageBin.task in Compile),
distSrcJars <<= (thisProjectRef, buildStructure, distExclude) flatMap aggregated(packageSrc.task in Compile), distSrcJars <<= (thisProjectRef, buildStructure, distExclude) flatMap aggregated(packageSrc.task in Compile),
distDocJars <<= (thisProjectRef, buildStructure, distExclude) flatMap aggregated(packageDoc.task in Compile), distDocJars <<= (thisProjectRef, buildStructure, distExclude) flatMap aggregated(packageDoc.task in Compile),
distSources <<= (distDependencies, distLibJars, distSrcJars, distDocJars, Unidoc.sunidoc, generate in Sphinx in docsProject) map DistSources, distSources <<= (distDependencies, distLibJars, distSrcJars, distDocJars, doc in ScalaUnidoc, generate in Sphinx in docsProject) map DistSources,
distDirectory <<= crossTarget / "dist", distDirectory <<= crossTarget / "dist",
distUnzipped <<= distDirectory / "unzipped", distUnzipped <<= distDirectory / "unzipped",
distFile <<= (distDirectory, version) { (dir, v) => dir / ("akka-" + v + ".zip") }, distFile <<= (distDirectory, version) { (dir, v) => dir / ("akka-" + v + ".zip") },

View file

@ -6,6 +6,7 @@ import java.io.File
import com.typesafe.sbt.site.SphinxSupport.{ generate, Sphinx } import com.typesafe.sbt.site.SphinxSupport.{ generate, Sphinx }
import com.typesafe.sbt.pgp.PgpKeys.publishSigned import com.typesafe.sbt.pgp.PgpKeys.publishSigned
import com.typesafe.sbt.S3Plugin.S3 import com.typesafe.sbt.S3Plugin.S3
import sbtunidoc.Plugin.UnidocKeys._
object Release { object Release {
val releaseDirectory = SettingKey[File]("release-directory") val releaseDirectory = SettingKey[File]("release-directory")
@ -25,11 +26,11 @@ object Release {
val projectRef = extracted.get(thisProjectRef) val projectRef = extracted.get(thisProjectRef)
val repo = extracted.get(Publish.defaultPublishTo) val repo = extracted.get(Publish.defaultPublishTo)
val state1 = extracted.runAggregated(publishSigned in projectRef, state) val state1 = extracted.runAggregated(publishSigned in projectRef, state)
val (state2, (api, japi)) = extracted.runTask(Unidoc.unidoc, state1) val (state2, Seq(api, japi)) = extracted.runTask(unidoc in Compile, state1)
val (state3, docs) = extracted.runTask(generate in Sphinx, state2) val (state3, docs) = extracted.runTask(generate in Sphinx, state2)
val (state4, dist) = extracted.runTask(Dist.dist, state3) val (state4, dist) = extracted.runTask(Dist.dist, state3)
val (state5, activatorDist) = extracted.runTask(ActivatorDist.activatorDist in LocalProject(AkkaBuild.samples.id), state4) val (state5, activatorDist) = extracted.runTask(ActivatorDist.activatorDist in LocalProject(AkkaBuild.samples.id), state4)
IO.delete(release) IO.delete(release)
IO.createDirectory(release) IO.createDirectory(release)
IO.copyDirectory(repo, release / "releases") IO.copyDirectory(repo, release / "releases")

View file

@ -1,80 +0,0 @@
package akka
import sbt._
import sbt.Keys._
import sbt.Project.Initialize
object Unidoc {
lazy val JavaDoc = config("genjavadoc") extend Compile
lazy val GenJavaDocEnabled = Option(sys.props("akka.genjavadoc.enabled")) filter (_.toLowerCase == "true") map (_ => true) getOrElse false
lazy val javadocSettings =
inConfig(JavaDoc)(Defaults.configSettings) ++
(if (GenJavaDocEnabled) Seq(
packageDoc in Compile <<= packageDoc in JavaDoc,
sources in JavaDoc <<= (target, compile in Compile, sources in Compile) map ((t, c, s) =>
(t / "java" ** "*.java").get ++ s.filter(_.getName.endsWith(".java"))
),
javacOptions in JavaDoc := Seq(),
artifactName in packageDoc in JavaDoc := ((sv, mod, art) => "" + mod.name + "_" + sv.binary + "-" + mod.revision + "-javadoc.jar"),
libraryDependencies += Dependencies.Compile.genjavadoc,
scalacOptions <+= target map (t => "-P:genjavadoc:out=" + (t / "java"))
) else Nil)
val unidocDirectory = SettingKey[File]("unidoc-directory")
val unidocExclude = SettingKey[Seq[String]]("unidoc-exclude")
val unidocAllSources = TaskKey[Seq[Seq[File]]]("unidoc-all-sources")
val unidocSources = TaskKey[Seq[File]]("unidoc-sources")
val unidocAllClasspaths = TaskKey[Seq[Classpath]]("unidoc-all-classpaths")
val unidocClasspath = TaskKey[Seq[File]]("unidoc-classpath")
val unidoc = TaskKey[(File, File)]("unidoc", "Create unified scaladoc and javadoc for all aggregates")
val sunidoc = TaskKey[File]("sunidoc", "Create unified scaladoc for all aggregates")
val junidoc = TaskKey[File]("junidoc", "Create unified javadoc for all aggregates")
val junidocAllSources = TaskKey[Seq[Seq[File]]]("junidoc-all-sources")
val junidocSources = TaskKey[Seq[File]]("junidoc-sources")
lazy val settings = Seq(
unidocDirectory <<= crossTarget / "unidoc",
unidocExclude := Seq.empty,
unidocAllSources <<= (thisProjectRef, buildStructure, unidocExclude) flatMap allSources(Compile),
unidocSources <<= unidocAllSources map { _.flatten },
unidocAllClasspaths <<= (thisProjectRef, buildStructure, unidocExclude) flatMap allClasspaths,
unidocClasspath <<= unidocAllClasspaths map { _.flatten.map(_.data).distinct },
junidocAllSources <<= (thisProjectRef, buildStructure, unidocExclude) flatMap allSources(JavaDoc),
junidocSources <<= junidocAllSources map { _.flatten },
sunidoc <<= sunidocTask,
junidoc <<= (doc in JavaDoc),
unidoc <<= (sunidoc, junidoc) map ((s, t) (s, t))
)
def allSources(conf: Configuration)(projectRef: ProjectRef, structure: Load.BuildStructure, exclude: Seq[String]): Task[Seq[Seq[File]]] = {
val projects = aggregated(projectRef, structure, exclude)
projects flatMap { sources in conf in LocalProject(_) get structure.data } join
}
def allClasspaths(projectRef: ProjectRef, structure: Load.BuildStructure, exclude: Seq[String]): Task[Seq[Classpath]] = {
val projects = aggregated(projectRef, structure, exclude)
projects flatMap { dependencyClasspath in Compile in LocalProject(_) get structure.data } join
}
def aggregated(projectRef: ProjectRef, structure: Load.BuildStructure, exclude: Seq[String]): Seq[String] = {
val aggregate = Project.getProject(projectRef, structure).toSeq.flatMap(_.aggregate)
aggregate flatMap { ref =>
if (exclude contains ref.project) Seq.empty
else ref.project +: aggregated(ref, structure, exclude)
}
}
def sunidocTask: Initialize[Task[File]] = {
(compilers, cacheDirectory, unidocSources, unidocClasspath, unidocDirectory, scalacOptions in doc, apiMappings in (Compile, doc), streams) map {
(compilers, cache, sources, classpath, target, options, api, s) => {
val scaladoc = new Scaladoc(100, compilers.scalac)
val opts1 = options ++ Opts.doc.externalAPI(api)
scaladoc.cached(cache / "unidoc", "main", sources, classpath, target, opts1, s.log)
target
}
}
}
}

View file

@ -18,4 +18,6 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-pgp" % "0.8.1")
addSbtPlugin("com.typesafe.sbt" % "sbt-s3" % "0.5") addSbtPlugin("com.typesafe.sbt" % "sbt-s3" % "0.5")
addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.3.1")
libraryDependencies += "com.timgroup" % "java-statsd-client" % "2.0.0" libraryDependencies += "com.timgroup" % "java-statsd-client" % "2.0.0"