=pro #3015 Upload dist to S3 downloads.typesafe.com

This commit is contained in:
Patrik Nordwall 2013-08-21 09:22:46 +02:00
parent ffea36a8c8
commit a6f8b5516a
4 changed files with 38 additions and 5 deletions

View file

@ -27,6 +27,7 @@ import java.util.Properties
import annotation.tailrec
import Unidoc.{ JavaDoc, javadocSettings, junidocSources, sunidoc, unidocExclude }
import scalabuff.ScalaBuffPlugin._
import com.typesafe.sbt.S3Plugin.{ S3, s3Settings }
object AkkaBuild extends Build {
System.setProperty("akka.mode", "test") // Is there better place for this?
@ -50,7 +51,7 @@ object AkkaBuild extends Build {
id = "akka",
base = file("."),
settings = parentSettings ++ Release.settings ++ Unidoc.settings ++ Publish.versionSettings ++
SphinxSupport.settings ++ Dist.settings ++ mimaSettings ++ unidocScaladocSettings ++
SphinxSupport.settings ++ Dist.settings ++ s3Settings ++ mimaSettings ++ unidocScaladocSettings ++
inConfig(JavaDoc)(Defaults.configSettings) ++ Seq(
testMailbox in GlobalScope := System.getProperty("akka.testMailbox", "false").toBoolean,
parallelExecution in GlobalScope := System.getProperty("akka.parallelExecution", "false").toBoolean,
@ -65,7 +66,18 @@ object AkkaBuild extends Build {
sphinxInputs in Sphinx <<= sphinxInputs in Sphinx in LocalProject(docs.id) map { inputs => inputs.copy(tags = inputs.tags :+ "online") },
// don't regenerate the pdf, just reuse the akka-docs version
generatedPdf in Sphinx <<= generatedPdf in Sphinx in LocalProject(docs.id) map identity,
generatedEpub in Sphinx <<= generatedEpub in Sphinx in LocalProject(docs.id) map identity
generatedEpub in Sphinx <<= generatedEpub in Sphinx in LocalProject(docs.id) map identity,
S3.host in S3.upload := "downloads.typesafe.com.s3.amazonaws.com",
S3.progress in S3.upload := true,
mappings in S3.upload <<= (Release.releaseDirectory, version) map { (d, v) =>
def distMapping(extension: String): (File, String) = {
val file = d / "downloads" / ("akka-" + v + "." + extension)
file -> ("akka/" + file.getName)
}
Seq(distMapping("zip"), distMapping("tgz"))
}
),
aggregate = Seq(actor, testkit, actorTests, dataflow, remote, remoteTests, camel, cluster, slf4j, agent, transactor,
mailboxes, zeroMQ, kernel, akkaSbtPlugin, osgi, osgiAries, docs, contrib, samples, channels, channelsTests,

View file

@ -5,6 +5,7 @@ import sbt.Keys._
import java.io.File
import com.typesafe.sbt.site.SphinxSupport.{ generate, Sphinx }
import com.typesafe.sbt.pgp.PgpKeys.publishSigned
import com.typesafe.sbt.S3Plugin.S3
object Release {
val releaseDirectory = SettingKey[File]("release-directory")
@ -14,7 +15,7 @@ object Release {
)
lazy val commandSettings = Seq(
commands += buildReleaseCommand
commands ++= Seq(buildReleaseCommand, uploadReleaseCommand)
)
def buildReleaseCommand = Command.command("build-release") { state =>
@ -36,4 +37,10 @@ object Release {
IO.copyFile(dist, release / "downloads" / dist.name)
state4
}
def uploadReleaseCommand = Command.command("upload-release") { state =>
val extracted = Project.extract(state)
val (state1, _) = extracted.runTask(S3.upload, state)
state1
}
}

View file

@ -19,3 +19,5 @@ addSbtPlugin("me.lessis" % "ls-sbt" % "0.1.2")
addSbtPlugin("com.github.sbt" %% "sbt-scalabuff" % "0.2")
addSbtPlugin("com.typesafe.sbt" % "sbt-pgp" % "0.8")
addSbtPlugin("com.typesafe.sbt" % "sbt-s3" % "0.5")

View file

@ -75,7 +75,17 @@
# From the command line:
# shell> cat ~/.ssh/id_rsa.pub | ssh akkarepo@repo.akka.io "cat >> ~/.ssh/authorized_keys"
#
# 5) Have access to github.com/akka/akka. This should be a given.
# 5) You must have upload access to S3 bucket "downloads.typesafe.com"
#
# 5.1) Ask Akka team member for the AWS access key for 'akka.team' user.
#
# 5.2) Add your credentials to sbt by adding this to your global.sbt file
# credentials += Credentials("Amazon S3",
# "downloads.typesafe.com.s3.amazonaws.com",
# "<Access Key Id>",
# "<Secret Access Key>")
#
# 6) Have access to github.com/akka/akka. This should be a given.
#
# Now you should be all set to run the script
#
@ -369,8 +379,10 @@ echolog "Pushing ${release_dir} to ${publish_path} ..."
if [ $dry_run ]; then
echodry "Not actually pushing to server. Command:"
echodry " rsync -rlpvz --chmod=Dg+ws,Fg+w ${release_dir}/ ${publish_path}/"
echodry " sbt upload-release"
else
important rsync -rlpvz --chmod=Dg+ws,Fg+w ${release_dir}/ ${publish_path}/
important rsync -rlpvz --chmod=Dg+ws,Fg+w --exclude ${release_dir}/downloads ${release_dir}/ ${publish_path}/
important sbt upload-release
fi
if [ $dry_run ]; then