Use sbt-site plugin to generate sphinx docs
This commit is contained in:
parent
0afc3b1721
commit
4ca5cf88f3
6 changed files with 35 additions and 189 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
|
@ -49,8 +49,7 @@ multiverse.log
|
|||
.eprj
|
||||
.*.swp
|
||||
akka-docs/_build/
|
||||
akka-docs/rst_html
|
||||
akka-docs/rst_latex
|
||||
akka-docs/rst_preprocessed/
|
||||
*.pyc
|
||||
akka-docs/exts/
|
||||
_akka_cluster/
|
||||
|
|
|
|||
|
|
@ -13,10 +13,12 @@ import com.typesafe.sbtscalariform.ScalariformPlugin.ScalariformKeys
|
|||
import com.typesafe.sbtosgi.OsgiPlugin.{ OsgiKeys, osgiSettings }
|
||||
import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings
|
||||
import com.typesafe.tools.mima.plugin.MimaKeys.previousArtifact
|
||||
import com.typesafe.sbt.site.SphinxSupport
|
||||
import com.typesafe.sbt.site.SphinxSupport.{ enable, generatePdf, sphinxInputs, sphinxPackages, Sphinx }
|
||||
import com.typesafe.sbt.preprocess.Preprocess.{ preprocess, preprocessExts, preprocessVars, simplePreprocess }
|
||||
import ls.Plugin.{ lsSettings, LsKeys }
|
||||
import java.lang.Boolean.getBoolean
|
||||
import sbt.Tests
|
||||
import Sphinx.{ sphinxDocs, sphinxHtml, sphinxLatex, sphinxPdf, sphinxPygments, sphinxTags, sphinxVars, sphinxExts }
|
||||
import LsKeys.{ lsync, docsUrl => lsDocsUrl, tags => lsTags }
|
||||
|
||||
object AkkaBuild extends Build {
|
||||
|
|
@ -32,8 +34,8 @@ object AkkaBuild extends Build {
|
|||
lazy val akka = Project(
|
||||
id = "akka",
|
||||
base = file("."),
|
||||
settings = parentSettings ++ Release.settings ++ Unidoc.settings ++ Sphinx.settings ++ Publish.versionSettings ++
|
||||
Dist.settings ++ mimaSettings ++ sphinxReplacements ++ Seq(
|
||||
settings = parentSettings ++ Release.settings ++ Unidoc.settings ++ Publish.versionSettings ++
|
||||
SphinxSupport.settings ++ Dist.settings ++ mimaSettings ++ Seq(
|
||||
testMailbox in GlobalScope := System.getProperty("akka.testMailbox", "false").toBoolean,
|
||||
parallelExecution in GlobalScope := System.getProperty("akka.parallelExecution", "false").toBoolean,
|
||||
Publish.defaultPublishTo in ThisBuild <<= crossTarget / "repository",
|
||||
|
|
@ -56,21 +58,11 @@ object AkkaBuild extends Build {
|
|||
|implicit val timeout = Timeout(5 seconds)
|
||||
|""".stripMargin,
|
||||
initialCommands in Test in ThisBuild += "import akka.testkit._",
|
||||
/*
|
||||
* online version of docs: the main docs are generated by
|
||||
* akka-docs/sphinx and its dependencies, but we need to run the HTML
|
||||
* part twice, so add it also as akka/sphinx. The trick is to reroute the
|
||||
* dependencies of that one to their akka-docs brothers, for which the
|
||||
* “map identity” is crucial; if anybody knows how/why, please drop RK a
|
||||
* line (without it, the pygments task would run twice in parallel for
|
||||
* the same directory, wreaking the expected amount of havoc).
|
||||
*/
|
||||
sphinxDocs <<= baseDirectory / "akka-docs/rst",
|
||||
sphinxTags in sphinxHtml += "online",
|
||||
sphinxHtml <<= (sphinxHtml, sphinxHtml in LocalProject(docs.id)) map ((orig, dummy) => orig), // make akka-docs run first
|
||||
sphinxPygments <<= sphinxPygments in LocalProject(docs.id) map identity,
|
||||
sphinxLatex <<= sphinxLatex in LocalProject(docs.id) map identity,
|
||||
sphinxPdf <<= sphinxPdf in LocalProject(docs.id) map identity
|
||||
// generate online version of docs
|
||||
sphinxInputs in Sphinx <<= sphinxInputs in Sphinx in LocalProject(docs.id) map { inputs => inputs.copy(tags = inputs.tags :+ "online") },
|
||||
// don't regenerate the pdf, just reuse the akka-docs version
|
||||
generatePdf in Sphinx <<= generatePdf in Sphinx in LocalProject(docs.id) map identity
|
||||
|
||||
),
|
||||
aggregate = Seq(actor, testkit, actorTests, dataflow, remote, remoteTests, camel, cluster, slf4j, agent, transactor, mailboxes, zeroMQ, kernel, akkaSbtPlugin, osgi, osgiAries, docs)
|
||||
)
|
||||
|
|
@ -359,9 +351,11 @@ object AkkaBuild extends Build {
|
|||
base = file("akka-docs"),
|
||||
dependencies = Seq(actor, testkit % "test->test", mailboxesCommon % "compile;test->test",
|
||||
remote, cluster, slf4j, agent, dataflow, transactor, fileMailbox, zeroMQ, camel, osgi, osgiAries),
|
||||
|
||||
settings = defaultSettings ++ Sphinx.settings ++ sphinxReplacements ++ cpsPlugin ++ Seq(
|
||||
unmanagedSourceDirectories in Test <<= baseDirectory { _ / "rst" ** "code" get },
|
||||
settings = defaultSettings ++ SphinxSupport.settings ++ sphinxPreprocessing ++ cpsPlugin ++ Seq(
|
||||
sourceDirectory in Sphinx <<= baseDirectory / "rst",
|
||||
sphinxPackages in Sphinx <+= baseDirectory { _ / "_sphinx" / "pygments" },
|
||||
enable in generatePdf in Sphinx := true,
|
||||
unmanagedSourceDirectories in Test <<= sourceDirectory in Sphinx apply { _ ** "code" get },
|
||||
libraryDependencies ++= Dependencies.docs,
|
||||
unmanagedSourceDirectories in ScalariformKeys.format in Test <<= unmanagedSourceDirectories in Test,
|
||||
testOptions += Tests.Argument(TestFrameworks.JUnit, "-v")
|
||||
|
|
@ -529,10 +523,13 @@ object AkkaBuild extends Build {
|
|||
testOptions in Test += Tests.Argument("-oDF")
|
||||
)
|
||||
|
||||
// customization of sphinx @<key>@ replacements, add to all sphinx-using projects
|
||||
// add additional replacements here
|
||||
lazy val sphinxReplacements = Seq(
|
||||
sphinxVars <<= (scalaVersion, version) { (s, v) =>
|
||||
// preprocessing settings for sphinx
|
||||
lazy val sphinxPreprocessing = inConfig(Sphinx)(Seq(
|
||||
target in preprocess <<= baseDirectory / "rst_preprocessed",
|
||||
preprocessExts := Set("rst", "py"),
|
||||
// customization of sphinx @<key>@ replacements, add to all sphinx-using projects
|
||||
// add additional replacements here
|
||||
preprocessVars <<= (scalaVersion, version) { (s, v) =>
|
||||
val BinVer = """(\d+\.\d+)\.\d+""".r
|
||||
Map(
|
||||
"version" -> v,
|
||||
|
|
@ -551,7 +548,12 @@ object AkkaBuild extends Build {
|
|||
})
|
||||
)
|
||||
},
|
||||
sphinxExts += "py" // needed for transforming conf.py
|
||||
preprocess <<= (sourceDirectory, target in preprocess, cacheDirectory, preprocessExts, preprocessVars, streams) map {
|
||||
(src, target, cacheDir, exts, vars, s) => simplePreprocess(src, target, cacheDir / "sphinx" / "preprocessed", exts, vars, s.log)
|
||||
},
|
||||
sphinxInputs <<= (sphinxInputs, preprocess) map { (inputs, preprocessed) => inputs.copy(src = preprocessed) }
|
||||
)) ++ Seq(
|
||||
cleanFiles <+= target in preprocess in Sphinx
|
||||
)
|
||||
|
||||
lazy val formatSettings = ScalariformPlugin.scalariformSettings ++ Seq(
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import sbt.Keys._
|
|||
import sbt.classpath.ClasspathUtilities
|
||||
import sbt.Project.Initialize
|
||||
import java.io.File
|
||||
import com.typesafe.sbt.site.SphinxSupport.{ generate, Sphinx }
|
||||
|
||||
object Dist {
|
||||
case class DistSources(depJars: Seq[File], libJars: Seq[File], srcJars: Seq[File], docJars: Seq[File], api: File, docs: File)
|
||||
|
|
@ -28,7 +29,7 @@ object Dist {
|
|||
distLibJars <<= (thisProjectRef, buildStructure, distExclude) flatMap aggregated(packageBin.task in Compile),
|
||||
distSrcJars <<= (thisProjectRef, buildStructure, distExclude) flatMap aggregated(packageSrc.task in Compile),
|
||||
distDocJars <<= (thisProjectRef, buildStructure, distExclude) flatMap aggregated(packageDoc.task in Compile),
|
||||
distSources <<= (distDependencies, distLibJars, distSrcJars, distDocJars, Unidoc.unidoc, Sphinx.sphinx in docsProject) map DistSources,
|
||||
distSources <<= (distDependencies, distLibJars, distSrcJars, distDocJars, Unidoc.unidoc, generate in Sphinx in docsProject) map DistSources,
|
||||
distDirectory <<= crossTarget / "dist",
|
||||
distUnzipped <<= distDirectory / "unzipped",
|
||||
distFile <<= (distDirectory, version) { (dir, v) => dir / ("akka-" + v + ".zip") },
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ package akka
|
|||
import sbt._
|
||||
import sbt.Keys._
|
||||
import java.io.File
|
||||
import com.typesafe.sbt.site.SphinxSupport.{ generate, Sphinx }
|
||||
|
||||
object Release {
|
||||
val releaseDirectory = SettingKey[File]("release-directory")
|
||||
|
|
@ -23,7 +24,7 @@ object Release {
|
|||
val repo = extracted.get(Publish.defaultPublishTo)
|
||||
val state1 = extracted.runAggregated(publish in projectRef, state)
|
||||
val (state2, api) = extracted.runTask(Unidoc.unidoc, state1)
|
||||
val (state3, docs) = extracted.runTask(Sphinx.sphinx, state2)
|
||||
val (state3, docs) = extracted.runTask(generate in Sphinx, state2)
|
||||
val (state4, dist) = extracted.runTask(Dist.dist, state3)
|
||||
IO.delete(release)
|
||||
IO.createDirectory(release)
|
||||
|
|
|
|||
|
|
@ -1,159 +0,0 @@
|
|||
/**
|
||||
* Copyright (C) 2009-2012 Typesafe Inc. <http://www.typesafe.com>
|
||||
*/
|
||||
|
||||
package akka
|
||||
|
||||
import sbt._
|
||||
import sbt.Keys._
|
||||
import java.io.{ File, PrintWriter }
|
||||
|
||||
object Sphinx {
|
||||
val sphinxDocs = SettingKey[File]("sphinx-docs")
|
||||
val sphinxTarget = SettingKey[File]("sphinx-target")
|
||||
val sphinxPygmentsDir = SettingKey[File]("sphinx-pygments-dir")
|
||||
val sphinxTags = SettingKey[Seq[String]]("sphinx-tags")
|
||||
val sphinxPygments = TaskKey[File]("sphinx-pygments", "Sphinx: install pygments styles")
|
||||
val sphinxHtml = TaskKey[File]("sphinx-html", "Sphinx: HTML documentation.")
|
||||
val sphinxLatex = TaskKey[File]("sphinx-latex", "Sphinx: Latex documentation.")
|
||||
val sphinxPdf = TaskKey[File]("sphinx-pdf", "Sphinx: PDF documentation.")
|
||||
val sphinxVars = SettingKey[Map[String, String]]("sphinx-vars", "mappings key->value to be replaced within docs")
|
||||
val sphinxExts = SettingKey[Set[String]]("sphinx-exts", "file extensions which will be filtered for replacements")
|
||||
val sphinx = TaskKey[File]("sphinx", "Build all Sphinx documentation (HTML and PDF combined).")
|
||||
|
||||
lazy val settings = Seq(
|
||||
sphinxDocs <<= baseDirectory / "rst",
|
||||
sphinxTarget <<= crossTarget / "sphinx",
|
||||
sphinxPygmentsDir <<= sphinxDocs { _ / ".." / "_sphinx" / "pygments" },
|
||||
sphinxTags in sphinxHtml := Seq.empty,
|
||||
sphinxTags in sphinxLatex := Seq.empty,
|
||||
sphinxPygments <<= pygmentsTask,
|
||||
sphinxHtml <<= buildTask("html", sphinxTags in sphinxHtml),
|
||||
sphinxLatex <<= buildTask("latex", sphinxTags in sphinxLatex),
|
||||
sphinxPdf <<= pdfTask,
|
||||
sphinxVars := Map("" -> "@"), // this default makes the @@ -> @ subst work
|
||||
sphinxExts := Set("rst"),
|
||||
sphinx <<= sphinxTask
|
||||
)
|
||||
|
||||
def pygmentsTask = (sphinxDocs, sphinxPygmentsDir, sphinxTarget, streams) map {
|
||||
(cwd, pygments, baseTarget, s) => {
|
||||
val target = baseTarget / "site-packages"
|
||||
val empty = (target * "*.egg").get.isEmpty
|
||||
if (empty) {
|
||||
s.log.info("Installing Sphinx pygments styles...")
|
||||
target.mkdirs()
|
||||
val logger = newLogger(s)
|
||||
val command = Seq("easy_install", "--install-dir", target.absolutePath, pygments.absolutePath)
|
||||
val env = "PYTHONPATH" -> target.absolutePath
|
||||
s.log.debug("Command: " + command.mkString(" ") + "\nEnv:" + env)
|
||||
val exitCode = Process(command, cwd, env) ! logger
|
||||
if (exitCode != 0) sys.error("Failed to install custom Sphinx pygments styles: exit code " + exitCode)
|
||||
(pygments * ("*.egg-info" | "build" | "temp")).get.foreach(IO.delete)
|
||||
s.log.info("Sphinx pygments styles installed at: " + target)
|
||||
}
|
||||
target
|
||||
}
|
||||
}
|
||||
|
||||
def buildTask(builder: String, tagsKey: SettingKey[Seq[String]]) = {
|
||||
(cacheDirectory, sphinxDocs, sphinxTarget, sphinxPygments, tagsKey, streams, sphinxVars, sphinxExts) map {
|
||||
(cacheDir, docs, baseTarget, pygments, tags, s, replacements, filterExt) => {
|
||||
val target = baseTarget / builder
|
||||
val doctrees = baseTarget / "doctrees" / builder
|
||||
val temp = docs.getParentFile / (docs.getName + "_" + builder)
|
||||
val cache = cacheDir / "sphinx" / builder
|
||||
val cached = FileFunction.cached(cache)(FilesInfo.hash, FilesInfo.exists) { (in, out) =>
|
||||
def dst(f: File) = temp / IO.relativize(docs, f).get
|
||||
def filter(f: File) = filterExt contains f.getName.reverse.takeWhile('.' !=).reverse
|
||||
val Replacer = """@(\w+)@""".r
|
||||
/*
|
||||
* First Step: bring filtered source tree in sync with orig source tree
|
||||
*/
|
||||
// delete files which were removed
|
||||
in.removed foreach (f => IO delete dst(f))
|
||||
// transform the other files by applying the replacement map for @<key>@ tokens
|
||||
(in.modified ++ (in.checked -- out.checked)).toSeq.sorted foreach { f =>
|
||||
if (f.isFile)
|
||||
if (filter(f)) {
|
||||
s.log.debug("Changed documentation source: " + f)
|
||||
IO.reader(f) { reader =>
|
||||
IO.writer(dst(f), "", IO.defaultCharset, append = false) { writer =>
|
||||
val wr = new PrintWriter(writer)
|
||||
IO.foreachLine(reader) { line =>
|
||||
wr.println(Replacer.replaceAllIn(line, m => replacements.getOrElse(m.group(1), {
|
||||
s.log.warn("unknown replacement " + m.group(1) + " in " + replacements)
|
||||
m.group(0)
|
||||
})))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// do not transform PNGs et al
|
||||
s.log.debug("Changed documentation source (copying): " + f)
|
||||
IO.copyFile(f, dst(f))
|
||||
}
|
||||
}
|
||||
/*
|
||||
* Second Step: invoke sphinx-build
|
||||
*/
|
||||
val tagList = if (tags.isEmpty) "" else tags.mkString(" (", ", ", ")")
|
||||
val desc = "%s%s" format (builder, tagList)
|
||||
s.log.info("Building Sphinx %s documentation..." format desc)
|
||||
val logger = newLogger(s)
|
||||
val tagOptions = tags flatMap (Seq("-t", _))
|
||||
val command = Seq("sphinx-build", "-aEN", "-b", builder, "-d", doctrees.absolutePath) ++ tagOptions ++ Seq(temp.absolutePath, target.absolutePath)
|
||||
val env = "PYTHONPATH" -> pygments.absolutePath
|
||||
s.log.debug("Command: " + command.mkString(" "))
|
||||
val exitCode = Process(command, docs, env) ! logger
|
||||
if (exitCode != 0) sys.error("Failed to build Sphinx %s documentation." format desc)
|
||||
s.log.info("Sphinx %s documentation created: %s" format (desc, target))
|
||||
temp.descendentsExcept("*", "").get.toSet
|
||||
}
|
||||
val toplevel = docs * ("*" - "disabled")
|
||||
val inputs = toplevel.descendentsExcept("*", "").get.toSet
|
||||
cached(inputs)
|
||||
target
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def pdfTask = (sphinxLatex, streams) map {
|
||||
(latex, s) => {
|
||||
val pdf = latex / "Akka.pdf"
|
||||
def failed = sys.error("Failed to build Sphinx pdf documentation.")
|
||||
if (!pdf.exists) {
|
||||
s.log.info("Building Sphinx pdf documentation...")
|
||||
val logger = newLogger(s)
|
||||
val exitCode = Process(Seq("make", "all-pdf"), latex) ! logger
|
||||
if (exitCode != 0) failed
|
||||
s.log.info("Sphinx pdf documentation created: %s" format pdf)
|
||||
}
|
||||
pdf
|
||||
}
|
||||
}
|
||||
|
||||
def newLogger(streams: TaskStreams) = {
|
||||
new ProcessLogger {
|
||||
def info(message: => String): Unit = {
|
||||
val m = message
|
||||
if (m contains "ERROR") streams.log.error(message)
|
||||
else if (m contains "WARNING") streams.log.warn(message)
|
||||
else streams.log.debug(message)
|
||||
}
|
||||
def error(e: => String): Unit = streams.log.warn(e)
|
||||
def buffer[T](f: => T): T = f
|
||||
}
|
||||
}
|
||||
|
||||
def sphinxTask = (sphinxHtml, sphinxPdf, sphinxTarget, streams) map {
|
||||
(html, pdf, baseTarget, s) => {
|
||||
val target = baseTarget / "docs"
|
||||
IO.delete(target)
|
||||
IO.copyDirectory(html, target)
|
||||
IO.copyFile(pdf, target / pdf.name)
|
||||
s.log.info("Combined Sphinx documentation: %s" format target)
|
||||
target
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -8,6 +8,8 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-multi-jvm" % "0.3.3")
|
|||
|
||||
addSbtPlugin("com.typesafe.sbtscalariform" % "sbtscalariform" % "0.4.0")
|
||||
|
||||
addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "0.7.0-SNAPSHOT")
|
||||
|
||||
addSbtPlugin("com.typesafe.sbtosgi" % "sbtosgi" % "0.3.0")
|
||||
|
||||
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.3")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue