diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/GzipSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/GzipSpec.scala index f95dad83d9..914f834f31 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/GzipSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/GzipSpec.scala @@ -22,14 +22,12 @@ class GzipSpec extends CoderSpec { override def extraTests(): Unit = { "decode concatenated compressions" in { - pending // FIXME: unbreak ourDecode(Seq(encode("Hello, "), encode("dear "), encode("User!")).join) should readAs("Hello, dear User!") } "provide a better compression ratio than the standard Gzip/Gunzip streams" in { ourEncode(largeTextBytes).length should be < streamEncode(largeTextBytes).length } "throw an error on truncated input" in { - pending // FIXME: unbreak val ex = the[RuntimeException] thrownBy ourDecode(streamEncode(smallTextBytes).dropRight(5)) ex.getCause.getMessage should equal("Truncated GZIP stream") } diff --git a/akka-http/src/main/scala/akka/http/scaladsl/coding/Decoder.scala b/akka-http/src/main/scala/akka/http/scaladsl/coding/Decoder.scala index 951328895b..e4227db256 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/coding/Decoder.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/coding/Decoder.scala @@ -28,7 +28,7 @@ trait Decoder { def decoderFlow: Flow[ByteString, ByteString, Unit] def decode(input: ByteString)(implicit mat: Materializer): Future[ByteString] = - Source.single(input).via(decoderFlow).runWith(Sink.head) + Source.single(input).via(decoderFlow).runWith(Sink.fold(ByteString.empty)(_ ++ _)) } object Decoder { val MaxBytesPerChunkDefault: Int = 65536