Merge pull request #18893 from johanandren/wip-18471-metadata-plus-file-upload-johanandren

=doc WIP #18471 examples for file uploads
This commit is contained in:
Konrad Malawski 2015-11-18 14:33:10 +01:00
commit 00a7ecd1f0
2 changed files with 118 additions and 0 deletions

View file

@ -0,0 +1,96 @@
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server
import java.io.File
import akka.actor.ActorRef
import akka.http.scaladsl.model.Multipart.FormData.BodyPart
import akka.stream.io.{ Framing, SynchronousFileSink }
import akka.stream.scaladsl._
import akka.http.scaladsl.model.Multipart
import akka.util.ByteString
import scala.concurrent.duration._
import scala.concurrent.Future
class FileUploadExamplesSpec extends RoutingSpec {
case class Video(file: File, title: String, author: String)
object db {
def create(video: Video): Future[Unit] = Future.successful(Unit)
}
"simple-upload" in {
val uploadVideo =
path("video") {
entity(as[Multipart.FormData]) { formData =>
// collect all parts of the multipart as it arrives into a map
val allPartsF: Future[Map[String, Any]] = formData.parts.mapAsync[(String, Any)](1) {
case b: BodyPart if b.name == "file" =>
// stream into a file as the chunks of it arrives and return a future
// file to where it got stored
val file = File.createTempFile("upload", "tmp")
b.entity.dataBytes.runWith(SynchronousFileSink(file)).map(_ =>
(b.name -> file))
case b: BodyPart =>
// collect form field values
b.toStrict(2.seconds).map(strict =>
(b.name -> strict.entity.data.utf8String))
}.runFold(Map.empty[String, Any])((map, tuple) => map + tuple)
val done = allPartsF.map { allParts =>
// You would have some better validation/unmarshalling here
db.create(Video(
file = allParts("file").asInstanceOf[File],
title = allParts("title").asInstanceOf[String],
author = allParts("author").asInstanceOf[String]))
}
// when processing have finished create a response for the user
onSuccess(allPartsF) { allParts =>
complete {
"ok!"
}
}
}
}
}
object MetadataActor {
case class Entry(id: Long, values: Seq[String])
}
val metadataActor: ActorRef = system.deadLetters
"stream-csv-upload" in {
val splitLines = Framing.delimiter(ByteString("\n"), 256)
val csvUploads =
path("metadata" / LongNumber) { id =>
entity(as[Multipart.FormData]) { formData =>
val done = formData.parts.mapAsync(1) {
case b: BodyPart if b.filename.exists(_.endsWith(".csv")) =>
b.entity.dataBytes
.via(splitLines)
.map(_.utf8String.split(",").toVector)
.runForeach(csv =>
metadataActor ! MetadataActor.Entry(id, csv))
case _ => Future.successful(Unit)
}.runWith(Sink.ignore)
// when processing have finished create a response for the user
onSuccess(done) {
complete {
"ok!"
}
}
}
}
}
}

View file

@ -74,3 +74,25 @@ Failures and exceptions inside the Routing DSL
Exception handling within the Routing DSL is done by providing :class:`ExceptionHandler` s which are documented in-depth
in the :ref:`exception-handling-scala` section of the documtnation. You can use them to transform exceptions into
:class:`HttpResponse` s with apropriate error codes and human-readable failure descriptions.
File uploads
^^^^^^^^^^^^
Handling a simple file upload from for example a browser form with a `file` input can be done
by accepting a `Multipart.FormData` entity, note that the body parts are `Source` rather than
all available right away, and so is the individual body part payload so you will need to consume
those streams both for the file and for the form fields.
Here is a simple example which just dumps the uploaded file into a temporary file on disk, collects
some form fields and saves an entry to a fictive database:
.. includecode2:: ../../code/docs/http/scaladsl/server/FileUploadExamplesSpec.scala
:snippet: simple-upload
You can transform the uploaded files as they arrive rather than storing then in a temporary file as
in the previous example. In this example we accept any number of ``.csv`` files, parse those into lines
and split each line before we send it to an actor for further processing:
.. includecode2:: ../../code/docs/http/scaladsl/server/FileUploadExamplesSpec.scala
:snippet: stream-csv-upload