Do not build and publish dist and activator zips (#22223)
* Release script updates
This commit is contained in:
parent
3a2a618fa2
commit
0ab494e5dd
7 changed files with 10 additions and 220 deletions
|
|
@ -1,7 +1,6 @@
|
|||
import akka.{ AkkaBuild, Dependencies, Formatting, OSGi, Dist }
|
||||
import akka.{ AkkaBuild, Dependencies, Formatting, OSGi }
|
||||
|
||||
AkkaBuild.defaultSettings
|
||||
Dist.includeInDist := false
|
||||
Formatting.formatSettings
|
||||
OSGi.osgi
|
||||
Dependencies.osgi
|
||||
|
|
|
|||
|
|
@ -1,48 +0,0 @@
|
|||
/**
|
||||
* Copyright (C) 2016-2017 Lightbend Inc. <http://www.lightbend.com>
|
||||
*/
|
||||
package akka
|
||||
|
||||
import sbt._
|
||||
import sbt.Keys._
|
||||
import sbt.Def.Initialize
|
||||
import java.io.File
|
||||
import sbt.Task
|
||||
|
||||
object ActivatorDist {
|
||||
|
||||
val activatorDistDirectory = SettingKey[File]("activator-dist-directory")
|
||||
val activatorDist = TaskKey[File]("activator-dist", "Create a zipped distribution of each activator sample.")
|
||||
|
||||
lazy val settings: Seq[Setting[_]] = Seq(
|
||||
activatorDistDirectory <<= crossTarget / "activator-dist",
|
||||
activatorDist <<= activatorDistTask
|
||||
)
|
||||
|
||||
def activatorDistTask: Initialize[Task[File]] = {
|
||||
(thisProjectRef, baseDirectory, activatorDistDirectory, version, buildStructure, streams) map {
|
||||
(project, projectBase, activatorDistDirectory, version, structure, s) => {
|
||||
val directories = projectBase.listFiles(DirectoryFilter).filter(dir => (dir / "activator.properties").exists)
|
||||
val rootGitignoreLines = IO.readLines(AkkaBuild.root.base / ".gitignore")
|
||||
for (dir <- directories) {
|
||||
val localGitignoreLines = if ((dir / ".gitignore").exists) IO.readLines(dir / ".gitignore") else Nil
|
||||
val gitignoreFileFilter = (".gitignore" :: localGitignoreLines ::: rootGitignoreLines).
|
||||
foldLeft[FileFilter](NothingFilter)((acc, x) => acc || x)
|
||||
val filteredPathFinder = PathFinder(dir) descendantsExcept("*", gitignoreFileFilter) filter(_.isFile)
|
||||
filteredPathFinder pair Path.rebase(dir, activatorDistDirectory / dir.name) map {
|
||||
case (source, target) =>
|
||||
s.log.info(s"copying: $source -> $target")
|
||||
IO.copyFile(source, target, preserveLastModified = true)
|
||||
}
|
||||
val targetDir = activatorDistDirectory / dir.name
|
||||
val targetFile = activatorDistDirectory / (dir.name + "-" + version + ".zip")
|
||||
s.log.info(s"zipping: $targetDir -> $targetFile")
|
||||
Dist.zip(targetDir, targetFile)
|
||||
}
|
||||
|
||||
activatorDistDirectory
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -9,8 +9,6 @@ import java.io.InputStreamReader
|
|||
import java.util.Properties
|
||||
|
||||
import akka.TestExtras.JUnitFileReporting
|
||||
import com.typesafe.sbt.S3Plugin.S3
|
||||
import com.typesafe.sbt.S3Plugin.s3Settings
|
||||
import com.typesafe.sbt.pgp.PgpKeys.publishSigned
|
||||
import com.typesafe.sbt.SbtMultiJvm.MultiJvmKeys.MultiJvm
|
||||
import com.typesafe.tools.mima.plugin.MimaPlugin
|
||||
|
|
@ -38,17 +36,10 @@ object AkkaBuild extends Build {
|
|||
)
|
||||
|
||||
lazy val rootSettings = parentSettings ++ Release.settings ++
|
||||
SphinxDoc.akkaSettings ++ Dist.settings ++ s3Settings ++
|
||||
SphinxDoc.akkaSettings ++
|
||||
UnidocRoot.akkaSettings ++
|
||||
Protobuf.settings ++ Seq(
|
||||
parallelExecution in GlobalScope := System.getProperty("akka.parallelExecution", parallelExecutionByDefault.toString).toBoolean,
|
||||
S3.host in S3.upload := "downloads.typesafe.com.s3.amazonaws.com",
|
||||
S3.progress in S3.upload := true,
|
||||
mappings in S3.upload <<= (Release.releaseDirectory, version) map { (d, v) =>
|
||||
val downloads = d / "downloads"
|
||||
val archivesPathFinder = downloads * s"*$v.zip"
|
||||
archivesPathFinder.get.map(file => file -> ("akka/" + file.getName))
|
||||
}
|
||||
parallelExecution in GlobalScope := System.getProperty("akka.parallelExecution", parallelExecutionByDefault.toString).toBoolean
|
||||
)
|
||||
|
||||
lazy val aggregatedProjects: Seq[ProjectReference] = Seq(
|
||||
|
|
@ -290,7 +281,7 @@ object AkkaBuild extends Build {
|
|||
dependencies = Seq(remote, remoteTests % "test->test", cluster, clusterTools, persistence % "compile;test->provided")
|
||||
).configs(MultiJvm)
|
||||
|
||||
lazy val samplesSettings = parentSettings ++ ActivatorDist.settings
|
||||
lazy val samplesSettings = parentSettings
|
||||
|
||||
lazy val samples = Project(
|
||||
id = "akka-samples",
|
||||
|
|
@ -357,8 +348,7 @@ object AkkaBuild extends Build {
|
|||
val dontPublishSettings = Seq(
|
||||
publishSigned := (),
|
||||
publish := (),
|
||||
publishArtifact in Compile := false,
|
||||
Dist.includeInDist := false
|
||||
publishArtifact in Compile := false
|
||||
)
|
||||
|
||||
val dontPublishDocsSettings = Seq(
|
||||
|
|
@ -474,9 +464,7 @@ object AkkaBuild extends Build {
|
|||
|
||||
// -v Log "test run started" / "test started" / "test run finished" events on log level "info" instead of "debug".
|
||||
// -a Show stack traces and exception class name for AssertionErrors.
|
||||
testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"),
|
||||
|
||||
Dist.includeInDist := true
|
||||
testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a")
|
||||
) ++
|
||||
mavenLocalResolverSettings ++
|
||||
JUnitFileReporting.settings ++
|
||||
|
|
|
|||
|
|
@ -1,118 +0,0 @@
|
|||
/**
|
||||
* Copyright (C) 2016-2017 Lightbend Inc. <http://www.lightbend.com>
|
||||
*/
|
||||
package akka
|
||||
|
||||
import sbt._
|
||||
import sbt.Keys._
|
||||
import sbt.classpath.ClasspathUtilities
|
||||
import java.io.File
|
||||
import com.typesafe.sbt.site.SphinxSupport.{ generate, Sphinx }
|
||||
import sbtunidoc.Plugin._
|
||||
|
||||
object Dist {
|
||||
case class DistSources(depJars: Seq[File], libJars: Seq[File], srcJars: Seq[File], docJars: Seq[File], api: File, docs: File)
|
||||
|
||||
val distDirectory = SettingKey[File]("dist-directory")
|
||||
val distUnzipped = SettingKey[File]("dist-unzipped")
|
||||
val distFile = SettingKey[File]("dist-file")
|
||||
|
||||
val distAllClasspaths = TaskKey[Seq[Classpath]]("dist-all-classpaths")
|
||||
val distDependencies = TaskKey[Seq[File]]("dist-dependencies")
|
||||
val distLibJars = TaskKey[Seq[File]]("dist-lib-jars")
|
||||
val distSrcJars = TaskKey[Seq[File]]("dist-src-jars")
|
||||
val distDocJars = TaskKey[Seq[File]]("dist-doc-jars")
|
||||
val distSources = TaskKey[DistSources]("dist-sources")
|
||||
val dist = TaskKey[File]("dist", "Create a zipped distribution of everything.")
|
||||
val includeInDist = SettingKey[Boolean]("include-in-dist", "Include the artifact of this project in the standalone dist zip-file")
|
||||
|
||||
lazy val settings: Seq[Setting[_]] = Seq(
|
||||
distAllClasspaths <<= (thisProjectRef, buildStructure) flatMap aggregated(dependencyClasspath in Compile),
|
||||
distDependencies <<= distAllClasspaths map { _.flatten.map(_.data).filter(ClasspathUtilities.isArchive).distinct },
|
||||
distLibJars <<= (thisProjectRef, buildStructure) flatMap aggregated(packageBin in Compile),
|
||||
distSrcJars <<= (thisProjectRef, buildStructure) flatMap aggregated(packageSrc in Compile),
|
||||
distDocJars <<= (thisProjectRef, buildStructure) flatMap aggregated(packageDoc in Compile),
|
||||
distSources <<= (distDependencies, distLibJars, distSrcJars, distDocJars, doc in ScalaUnidoc, generate in Sphinx in docsProject) map DistSources,
|
||||
distDirectory <<= crossTarget / "dist",
|
||||
distUnzipped <<= distDirectory / "unzipped",
|
||||
distFile <<= (distDirectory, version, scalaBinaryVersion) { (dir, v, sbv) =>
|
||||
dir / ("akka_" + sbv + "-" + v + ".zip") },
|
||||
dist <<= distTask
|
||||
)
|
||||
|
||||
def docsProject: ProjectReference = LocalProject(AkkaBuild.docs.id)
|
||||
|
||||
def aggregated[T](task: TaskKey[T])(projectRef: ProjectRef, structure: BuildStructure): Task[Seq[T]] = {
|
||||
val projects = aggregatedProjects(projectRef, structure, task.scope)
|
||||
projects flatMap { task in _ get structure.data } join
|
||||
}
|
||||
|
||||
def aggregatedProjects(projectRef: ProjectRef, structure: BuildStructure, scope: Scope): Seq[ProjectRef] = {
|
||||
val aggregate = Project.getProject(projectRef, structure).toSeq.flatMap(_.aggregate)
|
||||
aggregate flatMap { ref =>
|
||||
if (!(includeInDist in ref in scope get structure.data getOrElse false)) Nil
|
||||
else ref +: aggregatedProjects(ref, structure, scope)
|
||||
}
|
||||
}
|
||||
|
||||
def distTask: Def.Initialize[Task[File]] = {
|
||||
(baseDirectory, distSources, distUnzipped, version, distFile, streams) map {
|
||||
(projectBase, allSources, unzipped, version, zipFile, s) => {
|
||||
val base = unzipped / ("akka-" + version)
|
||||
val distBase = projectBase / "akka-kernel" / "src" / "main" / "dist"
|
||||
val doc = base / "doc" / "akka"
|
||||
val api = doc / "api"
|
||||
val docs = doc / "docs"
|
||||
val docJars = doc / "jars"
|
||||
val libs = allSources.depJars ++ allSources.libJars
|
||||
val (scalaLibs, akkaLibs) = libs partition (_.name.contains("scala-library"))
|
||||
val lib = base / "lib"
|
||||
val libAkka = lib / "akka"
|
||||
val src = base / "src" / "akka"
|
||||
IO.delete(unzipped)
|
||||
copyDirectory(distBase, base, setExecutable = true)
|
||||
copyDirectory(allSources.api, api)
|
||||
copyDirectory(allSources.docs, docs)
|
||||
copyFlat(allSources.docJars, docJars)
|
||||
copyFlat(scalaLibs, lib)
|
||||
copyFlat(akkaLibs, libAkka)
|
||||
copyFlat(allSources.srcJars, src)
|
||||
zip(unzipped, zipFile)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def copyDirectory(source: File, target: File, overwrite: Boolean = false, preserveLastModified: Boolean = false, setExecutable: Boolean = false): Set[File] = {
|
||||
val sources = (source ***) pair rebase(source, target)
|
||||
copyMapped(sources, overwrite, preserveLastModified, setExecutable)
|
||||
}
|
||||
|
||||
def copyFlat(files: Seq[File], target: File, overwrite: Boolean = false, preserveLastModified: Boolean = false, setExecutable: Boolean = false): Set[File] = {
|
||||
IO.createDirectory(target)
|
||||
val sources = files map { f => (f, target / f.name) }
|
||||
copyMapped(sources, overwrite, preserveLastModified, setExecutable)
|
||||
}
|
||||
|
||||
def copyMapped(sources: Traversable[(File, File)], overwrite: Boolean, preserveLastModified: Boolean, setExecutable: Boolean): Set[File] = {
|
||||
sources map { Function.tupled(copy(overwrite, preserveLastModified, setExecutable)) } toSet
|
||||
}
|
||||
|
||||
def copy(overwrite: Boolean, preserveLastModified: Boolean, setExecutable: Boolean)(source: File, target: File): File = {
|
||||
if (overwrite || !target.exists || source.lastModified > target.lastModified) {
|
||||
if (source.isDirectory) IO.createDirectory(target)
|
||||
else {
|
||||
IO.createDirectory(target.getParentFile)
|
||||
IO.copyFile(source, target, preserveLastModified)
|
||||
if (setExecutable) target.setExecutable(source.canExecute, false)
|
||||
}
|
||||
}
|
||||
target
|
||||
}
|
||||
|
||||
def zip(source: File, target: File): File = {
|
||||
val files = source ** -DirectoryFilter
|
||||
val sources = files pair relativeTo(source)
|
||||
IO.zip(sources, target)
|
||||
target
|
||||
}
|
||||
}
|
||||
|
|
@ -8,7 +8,6 @@ import sbt.Keys._
|
|||
import java.io.File
|
||||
import com.typesafe.sbt.site.SphinxSupport.{ generate, Sphinx }
|
||||
import com.typesafe.sbt.pgp.PgpKeys.publishSigned
|
||||
import com.typesafe.sbt.S3Plugin.S3
|
||||
import sbtunidoc.Plugin.UnidocKeys._
|
||||
|
||||
object Release {
|
||||
|
|
@ -19,21 +18,18 @@ object Release {
|
|||
)
|
||||
|
||||
lazy val commandSettings = Seq(
|
||||
commands ++= Seq(buildReleaseCommand, uploadReleaseCommand)
|
||||
commands += buildReleaseCommand
|
||||
)
|
||||
|
||||
def buildReleaseCommand = Command.command("buildRelease") { state =>
|
||||
val extracted = Project.extract(state)
|
||||
val release = extracted.get(releaseDirectory)
|
||||
val dist = extracted.get(Dist.distDirectory)
|
||||
val releaseVersion = extracted.get(version)
|
||||
val projectRef = extracted.get(thisProjectRef)
|
||||
val repo = extracted.get(Publish.defaultPublishTo)
|
||||
val state1 = extracted.runAggregated(publishSigned in projectRef, state)
|
||||
val (state2, Seq(api, japi)) = extracted.runTask(unidoc in Compile, state1)
|
||||
val (state3, docs) = extracted.runTask(generate in Sphinx, state2)
|
||||
val (state4, _) = extracted.runTask(Dist.dist, state3)
|
||||
val (state5, activatorDist) = extracted.runTask(ActivatorDist.activatorDist in LocalProject(AkkaBuild.samples.id), state4)
|
||||
|
||||
IO.delete(release)
|
||||
IO.createDirectory(release)
|
||||
|
|
@ -42,20 +38,7 @@ object Release {
|
|||
IO.copyDirectory(japi, release / "japi" / "akka" / releaseVersion)
|
||||
IO.copyDirectory(docs, release / "docs" / "akka" / releaseVersion)
|
||||
|
||||
// copy all distributions from dist dir to downloads dir
|
||||
// may contain distributions from cross-builds
|
||||
for (f <- (dist * "akka_*.zip").get)
|
||||
IO.copyFile(f, release / "downloads" / f.name)
|
||||
|
||||
for (f <- (activatorDist * "*.zip").get)
|
||||
IO.copyFile(f, release / "downloads" / f.name)
|
||||
|
||||
state5
|
||||
state3
|
||||
}
|
||||
|
||||
def uploadReleaseCommand = Command.command("uploadRelease") { state =>
|
||||
val extracted = Project.extract(state)
|
||||
val (state1, _) = extracted.runTask(S3.upload, state)
|
||||
state1
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -19,8 +19,6 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.13")
|
|||
|
||||
addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0")
|
||||
|
||||
addSbtPlugin("com.typesafe.sbt" % "sbt-s3" % "0.5")
|
||||
|
||||
addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.3.3")
|
||||
|
||||
addSbtPlugin("com.thoughtworks.sbt-api-mappings" % "sbt-api-mappings" % "0.2.2")
|
||||
|
|
|
|||
|
|
@ -58,18 +58,8 @@
|
|||
# 3.2) Install your public ssh key to avoid typing in your password.
|
||||
# From the command line:
|
||||
# shell> cat ~/.ssh/id_rsa.pub | ssh akkarepo@repo.akka.io "cat >> ~/.ssh/authorized_keys"
|
||||
#
|
||||
# 4) You must have upload access to S3 bucket "downloads.typesafe.com"
|
||||
#
|
||||
# 4.1) Ask Akka team member for the AWS access key for 'akka.team' user.
|
||||
#
|
||||
# 4.2) Add your credentials to sbt by adding this to your global.sbt file
|
||||
# credentials += Credentials("Amazon S3",
|
||||
# "downloads.typesafe.com.s3.amazonaws.com",
|
||||
# "<Access Key Id>",
|
||||
# "<Secret Access Key>")
|
||||
#
|
||||
# 5) Have access to github.com/akka/akka. This should be a given.
|
||||
##
|
||||
# 4) Have access to github.com/akka/akka. This should be a given.
|
||||
#
|
||||
# Now you should be all set to run the script
|
||||
#
|
||||
|
|
@ -362,11 +352,9 @@ echolog "Pushing ${release_dir} to ${publish_path} ..."
|
|||
if [ $dry_run ]; then
|
||||
echodry "Not actually pushing to server. Command:"
|
||||
echodry " rsync -rlpvz --chmod=Dg+ws,Fg+w --exclude ${release_dir}/downloads ${release_dir}/ ${publish_path}/"
|
||||
echodry " sbt uploadRelease"
|
||||
else
|
||||
important ssh ${release_server} "cd ${release_path}/docs/akka; git add .; git commit -m 'before publishing version $version'; true"
|
||||
important rsync -rlpvz --chmod=Dg+ws,Fg+w --exclude ${release_dir}/downloads ${release_dir}/ ${publish_path}/
|
||||
important sbt uploadRelease
|
||||
important ssh ${release_server} cp -v ${release_path}/docs/akka/${version}/_static/warnOldDocs.js ${release_path}/docs/akka
|
||||
important ssh ${release_server} ln -snvf ../../warnOldDocs.js ${release_path}/docs/akka/${version}/_static/warnOldDocs.js
|
||||
important ssh ${release_server} "cd ${release_path}/docs/akka; git add .; git commit -m 'publish version $version'"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue