diff --git a/akka-docs-dev/rst/scala/code/docs/http/HttpServerExampleSpec.scala b/akka-docs-dev/rst/scala/code/docs/http/HttpServerExampleSpec.scala index dcc859f462..b9b4d3f137 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/HttpServerExampleSpec.scala +++ b/akka-docs-dev/rst/scala/code/docs/http/HttpServerExampleSpec.scala @@ -26,7 +26,7 @@ class HttpServerExampleSpec implicit val system = ActorSystem() import system.dispatcher - val materializer = FlowMaterializer(MaterializerSettings()) + implicit val materializer = FlowMaterializer(MaterializerSettings()) implicit val askTimeout: Timeout = 500.millis val bindingFuture = IO(Http) ? Http.Bind(interface = "localhost", port = 8080) @@ -37,7 +37,7 @@ class HttpServerExampleSpec println("Accepted new connection from " + remoteAddress) // handle connection here - }, materializer) + }) } //#bind-example } @@ -52,7 +52,7 @@ class HttpServerExampleSpec implicit val system = ActorSystem() import system.dispatcher - val materializer = FlowMaterializer(MaterializerSettings()) + implicit val materializer = FlowMaterializer(MaterializerSettings()) implicit val askTimeout: Timeout = 500.millis val bindingFuture = IO(Http) ? Http.Bind(interface = "localhost", port = 8080) @@ -78,8 +78,8 @@ class HttpServerExampleSpec case Http.IncomingConnection(remoteAddress, requestProducer, responseConsumer) ⇒ println("Accepted new connection from " + remoteAddress) - Flow(requestProducer).map(requestHandler).produceTo(responseConsumer, materializer) - }, materializer) + Flow(requestProducer).map(requestHandler).produceTo(responseConsumer) + }) } //#full-server-example } diff --git a/akka-http-core/src/main/scala/akka/http/HttpManager.scala b/akka-http-core/src/main/scala/akka/http/HttpManager.scala index 455e3f3504..394d79a760 100644 --- a/akka-http-core/src/main/scala/akka/http/HttpManager.scala +++ b/akka-http-core/src/main/scala/akka/http/HttpManager.scala @@ -61,11 +61,11 @@ private[http] class HttpManager(httpSettings: HttpExt#Settings) extends Actor wi tcpServerBindingFuture onComplete { case Success(StreamTcp.TcpServerBinding(localAddress, connectionStream)) ⇒ log.info("Bound to {}", endpoint) - val materializer = FlowMaterializer(materializerSettings) + implicit val materializer = FlowMaterializer(materializerSettings) val httpServerPipeline = new HttpServerPipeline(effectiveSettings, materializer, log) val httpConnectionStream = Flow(connectionStream) .map(httpServerPipeline) - .toPublisher(materializer) + .toPublisher() commander ! Http.ServerBinding(localAddress, httpConnectionStream) case Failure(error) ⇒ diff --git a/akka-http-core/src/main/scala/akka/http/client/HttpClientPipeline.scala b/akka-http-core/src/main/scala/akka/http/client/HttpClientPipeline.scala index 54825c080f..c73b36e5f4 100644 --- a/akka-http-core/src/main/scala/akka/http/client/HttpClientPipeline.scala +++ b/akka-http-core/src/main/scala/akka/http/client/HttpClientPipeline.scala @@ -38,7 +38,7 @@ private[http] class HttpClientPipeline(effectiveSettings: ClientConnectionSettin val requestMethodByPass = new RequestMethodByPass(tcpConn.remoteAddress) val (contextBypassSubscriber, contextBypassPublisher) = - Duct[(HttpRequest, Any)].map(_._2).build(materializer) + Duct[(HttpRequest, Any)].map(_._2).build()(materializer) val requestSubscriber = Duct[(HttpRequest, Any)] @@ -47,7 +47,7 @@ private[http] class HttpClientPipeline(effectiveSettings: ClientConnectionSettin .transform(responseRendererFactory.newRenderer) .flatten(FlattenStrategy.concat) .transform(errorLogger(log, "Outgoing request stream error")) - .produceTo(tcpConn.outputStream, materializer) + .produceTo(tcpConn.outputStream)(materializer) val responsePublisher = Flow(tcpConn.inputStream) @@ -59,7 +59,7 @@ private[http] class HttpClientPipeline(effectiveSettings: ClientConnectionSettin HttpResponse(statusCode, headers, createEntity(entityParts), protocol) } .zip(contextBypassPublisher) - .toPublisher(materializer) + .toPublisher()(materializer) val processor = HttpClientProcessor(requestSubscriber, responsePublisher) Http.OutgoingConnection(tcpConn.remoteAddress, tcpConn.localAddress, processor) diff --git a/akka-http-core/src/main/scala/akka/http/model/HttpEntity.scala b/akka-http-core/src/main/scala/akka/http/model/HttpEntity.scala index 715a77b260..999bdcec63 100644 --- a/akka-http-core/src/main/scala/akka/http/model/HttpEntity.scala +++ b/akka-http-core/src/main/scala/akka/http/model/HttpEntity.scala @@ -59,7 +59,7 @@ sealed trait HttpEntity extends japi.HttpEntity { throw new java.util.concurrent.TimeoutException( s"HttpEntity.toStrict timed out after $timeout while still waiting for outstanding data") }) - .toFuture(materializer) + .toFuture()(materializer) /** * Creates a copy of this HttpEntity with the `contentType` overridden with the given one. @@ -170,7 +170,7 @@ object HttpEntity { override def isChunked: Boolean = true def dataBytes(materializer: FlowMaterializer): Publisher[ByteString] = - Flow(chunks).map(_.data).filter(_.nonEmpty).toPublisher(materializer) + Flow(chunks).map(_.data).filter(_.nonEmpty).toPublisher()(materializer) def withContentType(contentType: ContentType): Chunked = if (contentType == this.contentType) this else copy(contentType = contentType) @@ -186,7 +186,7 @@ object HttpEntity { def apply(contentType: ContentType, chunks: Publisher[ByteString], materializer: FlowMaterializer): Chunked = Chunked(contentType, Flow(chunks).collect[ChunkStreamPart] { case b: ByteString if b.nonEmpty ⇒ Chunk(b) - }.toPublisher(materializer)) + }.toPublisher()(materializer)) } /** diff --git a/akka-http-core/src/main/scala/akka/http/model/MultipartContent.scala b/akka-http-core/src/main/scala/akka/http/model/MultipartContent.scala index 914a9dd1d7..34917c4e30 100644 --- a/akka-http-core/src/main/scala/akka/http/model/MultipartContent.scala +++ b/akka-http-core/src/main/scala/akka/http/model/MultipartContent.scala @@ -57,7 +57,7 @@ case class MultipartFormData(parts: Publisher[BodyPart]) extends MultipartParts * hint. */ def toStrict(materializer: FlowMaterializer, maxFieldCount: Int = 1000)(implicit ec: ExecutionContext): Future[StrictMultipartFormData] = - Flow(parts).grouped(maxFieldCount).toFuture(materializer).map(new StrictMultipartFormData(_)) + Flow(parts).grouped(maxFieldCount).toFuture()(materializer).map(new StrictMultipartFormData(_)) } /** diff --git a/akka-http-core/src/main/scala/akka/http/parsing/BodyPartParser.scala b/akka-http-core/src/main/scala/akka/http/parsing/BodyPartParser.scala index 8f2686a6a1..7a741f59d1 100644 --- a/akka-http-core/src/main/scala/akka/http/parsing/BodyPartParser.scala +++ b/akka-http-core/src/main/scala/akka/http/parsing/BodyPartParser.scala @@ -145,7 +145,7 @@ private[http] final class BodyPartParser(defaultContentType: ContentType, emitPartChunk: (List[HttpHeader], ContentType, ByteString) ⇒ Unit = { (headers, ct, bytes) ⇒ emit(BodyPartStart(headers, entityParts ⇒ HttpEntity.CloseDelimited(ct, - Flow(entityParts).collect { case EntityPart(data) ⇒ data }.toPublisher(materializer)))) + Flow(entityParts).collect { case EntityPart(data) ⇒ data }.toPublisher()(materializer)))) emit(bytes) }, emitFinalPartChunk: (List[HttpHeader], ContentType, ByteString) ⇒ Unit = { diff --git a/akka-http-core/src/main/scala/akka/http/parsing/HttpMessageParser.scala b/akka-http-core/src/main/scala/akka/http/parsing/HttpMessageParser.scala index b9fbc2af29..73d089d936 100644 --- a/akka-http-core/src/main/scala/akka/http/parsing/HttpMessageParser.scala +++ b/akka-http-core/src/main/scala/akka/http/parsing/HttpMessageParser.scala @@ -240,13 +240,13 @@ private[http] abstract class HttpMessageParser[Output >: ParserOutput.MessageOut def defaultEntity(cth: Option[`Content-Type`], contentLength: Long, materializer: FlowMaterializer)(entityParts: Publisher[_ <: ParserOutput]): HttpEntity.Regular = { - val data = Flow(entityParts).collect { case ParserOutput.EntityPart(bytes) ⇒ bytes }.toPublisher(materializer) + val data = Flow(entityParts).collect { case ParserOutput.EntityPart(bytes) ⇒ bytes }.toPublisher()(materializer) HttpEntity.Default(contentType(cth), contentLength, data) } def chunkedEntity(cth: Option[`Content-Type`], materializer: FlowMaterializer)(entityChunks: Publisher[_ <: ParserOutput]): HttpEntity.Regular = { - val chunks = Flow(entityChunks).collect { case ParserOutput.EntityChunk(chunk) ⇒ chunk }.toPublisher(materializer) + val chunks = Flow(entityChunks).collect { case ParserOutput.EntityChunk(chunk) ⇒ chunk }.toPublisher()(materializer) HttpEntity.Chunked(contentType(cth), chunks) } } \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/parsing/HttpResponseParser.scala b/akka-http-core/src/main/scala/akka/http/parsing/HttpResponseParser.scala index 48c2a2c8d0..35b913985e 100644 --- a/akka-http-core/src/main/scala/akka/http/parsing/HttpResponseParser.scala +++ b/akka-http-core/src/main/scala/akka/http/parsing/HttpResponseParser.scala @@ -100,7 +100,7 @@ private[http] class HttpResponseParser(_settings: ParserSettings, } case None ⇒ emitResponseStart { entityParts ⇒ - val data = Flow(entityParts).collect { case ParserOutput.EntityPart(bytes) ⇒ bytes }.toPublisher(materializer) + val data = Flow(entityParts).collect { case ParserOutput.EntityPart(bytes) ⇒ bytes }.toPublisher()(materializer) HttpEntity.CloseDelimited(contentType(cth), data) } parseToCloseBody(input, bodyStart) diff --git a/akka-http-core/src/main/scala/akka/http/rendering/BodyPartRenderer.scala b/akka-http-core/src/main/scala/akka/http/rendering/BodyPartRenderer.scala index 1a5b7162eb..f04e85b228 100644 --- a/akka-http-core/src/main/scala/akka/http/rendering/BodyPartRenderer.scala +++ b/akka-http-core/src/main/scala/akka/http/rendering/BodyPartRenderer.scala @@ -62,8 +62,8 @@ private[http] class BodyPartRenderer(boundary: String, } def bodyPartChunks(data: Publisher[ByteString]): List[Publisher[ChunkStreamPart]] = { - val entityChunks = Flow(data).map[ChunkStreamPart](Chunk(_)).toPublisher(materializer) - Flow[ChunkStreamPart](Chunk(r.get) :: Nil).concat(entityChunks).toPublisher(materializer) :: Nil + val entityChunks = Flow(data).map[ChunkStreamPart](Chunk(_)).toPublisher()(materializer) + Flow[ChunkStreamPart](Chunk(r.get) :: Nil).concat(entityChunks).toPublisher()(materializer) :: Nil } def completePartRendering(): List[Publisher[ChunkStreamPart]] = @@ -73,8 +73,8 @@ private[http] class BodyPartRenderer(boundary: String, case Default(_, _, data) ⇒ bodyPartChunks(data) case CloseDelimited(_, data) ⇒ bodyPartChunks(data) case Chunked(_, chunks) ⇒ - val entityChunks = Flow(chunks).filter(!_.isLastChunk).toPublisher(materializer) - Flow(Chunk(r.get) :: Nil).concat(entityChunks).toPublisher(materializer) :: Nil + val entityChunks = Flow(chunks).filter(!_.isLastChunk).toPublisher()(materializer) + Flow(Chunk(r.get) :: Nil).concat(entityChunks).toPublisher()(materializer) :: Nil } renderBoundary() diff --git a/akka-http-core/src/main/scala/akka/http/rendering/HttpRequestRendererFactory.scala b/akka-http-core/src/main/scala/akka/http/rendering/HttpRequestRendererFactory.scala index c4f6f8398b..95375b42ba 100644 --- a/akka-http-core/src/main/scala/akka/http/rendering/HttpRequestRendererFactory.scala +++ b/akka-http-core/src/main/scala/akka/http/rendering/HttpRequestRendererFactory.scala @@ -106,12 +106,12 @@ private[http] class HttpRequestRendererFactory(userAgentHeader: Option[headers.` case HttpEntity.Default(_, contentLength, data) ⇒ renderContentLength(contentLength) renderByteStrings(r, - Flow(data).transform(new CheckContentLengthTransformer(contentLength)).toPublisher(materializer), + Flow(data).transform(new CheckContentLengthTransformer(contentLength)).toPublisher()(materializer), materializer) case HttpEntity.Chunked(_, chunks) ⇒ r ~~ `Transfer-Encoding` ~~ ChunkedBytes ~~ CrLf ~~ CrLf - renderByteStrings(r, Flow(chunks).transform(new ChunkTransformer).toPublisher(materializer), materializer) + renderByteStrings(r, Flow(chunks).transform(new ChunkTransformer).toPublisher()(materializer), materializer) } renderRequestLine() diff --git a/akka-http-core/src/main/scala/akka/http/rendering/HttpResponseRendererFactory.scala b/akka-http-core/src/main/scala/akka/http/rendering/HttpResponseRendererFactory.scala index cf49ba1424..c5bf94508a 100644 --- a/akka-http-core/src/main/scala/akka/http/rendering/HttpResponseRendererFactory.scala +++ b/akka-http-core/src/main/scala/akka/http/rendering/HttpResponseRendererFactory.scala @@ -132,7 +132,7 @@ private[http] class HttpResponseRendererFactory(serverHeader: Option[headers.Ser renderHeaders(headers.toList) renderEntityContentType(r, entity) r ~~ `Content-Length` ~~ contentLength ~~ CrLf ~~ CrLf - byteStrings(Flow(data).transform(new CheckContentLengthTransformer(contentLength)).toPublisher(materializer)) + byteStrings(Flow(data).transform(new CheckContentLengthTransformer(contentLength)).toPublisher()(materializer)) case HttpEntity.CloseDelimited(_, data) ⇒ renderHeaders(headers.toList, alwaysClose = true) @@ -142,14 +142,14 @@ private[http] class HttpResponseRendererFactory(serverHeader: Option[headers.Ser case HttpEntity.Chunked(contentType, chunks) ⇒ if (ctx.requestProtocol == `HTTP/1.0`) - completeResponseRendering(HttpEntity.CloseDelimited(contentType, Flow(chunks).map(_.data).toPublisher(materializer))) + completeResponseRendering(HttpEntity.CloseDelimited(contentType, Flow(chunks).map(_.data).toPublisher()(materializer))) else { renderHeaders(headers.toList) renderEntityContentType(r, entity) if (!entity.isKnownEmpty || ctx.requestMethod == HttpMethods.HEAD) r ~~ `Transfer-Encoding` ~~ ChunkedBytes ~~ CrLf r ~~ CrLf - byteStrings(Flow(chunks).transform(new ChunkTransformer).toPublisher(materializer)) + byteStrings(Flow(chunks).transform(new ChunkTransformer).toPublisher()(materializer)) } } diff --git a/akka-http-core/src/main/scala/akka/http/rendering/RenderSupport.scala b/akka-http-core/src/main/scala/akka/http/rendering/RenderSupport.scala index 96f9684fe0..c6623c900e 100644 --- a/akka-http-core/src/main/scala/akka/http/rendering/RenderSupport.scala +++ b/akka-http-core/src/main/scala/akka/http/rendering/RenderSupport.scala @@ -38,7 +38,7 @@ private object RenderSupport { skipEntity: Boolean = false): List[Publisher[ByteString]] = { val messageStart = SynchronousPublisherFromIterable(r.get :: Nil) val messageBytes = - if (!skipEntity) Flow(messageStart).concat(entityBytes).toPublisher(materializer) + if (!skipEntity) Flow(messageStart).concat(entityBytes).toPublisher()(materializer) else messageStart messageBytes :: Nil } diff --git a/akka-http-core/src/main/scala/akka/http/server/HttpServerPipeline.scala b/akka-http-core/src/main/scala/akka/http/server/HttpServerPipeline.scala index a8809b27be..6513f45ef9 100644 --- a/akka-http-core/src/main/scala/akka/http/server/HttpServerPipeline.scala +++ b/akka-http-core/src/main/scala/akka/http/server/HttpServerPipeline.scala @@ -36,7 +36,7 @@ private[http] class HttpServerPipeline(settings: ServerSettings, val (applicationBypassSubscriber, applicationBypassPublisher) = Duct[(RequestOutput, Publisher[RequestOutput])] .collect[MessageStart with RequestOutput] { case (x: MessageStart, _) ⇒ x } - .build(materializer) + .build()(materializer) val requestPublisher = Flow(tcpConn.inputStream) @@ -53,7 +53,7 @@ private[http] class HttpServerPipeline(settings: ServerSettings, val effectiveUri = HttpRequest.effectiveUri(uri, headers, securedConnection = false, settings.defaultHostHeader) HttpRequest(method, effectiveUri, headers, createEntity(entityParts), protocol) } - .toPublisher(materializer) + .toPublisher()(materializer) val responseSubscriber = Duct[HttpResponse] @@ -62,7 +62,7 @@ private[http] class HttpServerPipeline(settings: ServerSettings, .transform(responseRendererFactory.newRenderer) .flatten(FlattenStrategy.concat) .transform(errorLogger(log, "Outgoing response stream error")) - .produceTo(tcpConn.outputStream, materializer) + .produceTo(tcpConn.outputStream)(materializer) Http.IncomingConnection(tcpConn.remoteAddress, requestPublisher, responseSubscriber) } diff --git a/akka-http-core/src/main/scala/akka/http/util/package.scala b/akka-http-core/src/main/scala/akka/http/util/package.scala index 79a8f4696d..1cc467e08b 100644 --- a/akka-http-core/src/main/scala/akka/http/util/package.scala +++ b/akka-http-core/src/main/scala/akka/http/util/package.scala @@ -31,7 +31,7 @@ package object util { private[http] implicit class FlowWithHeadAndTail[T](val underlying: Flow[Publisher[T]]) extends AnyVal { def headAndTail(materializer: FlowMaterializer): Flow[(T, Publisher[T])] = underlying.map { p ⇒ - Flow(p).prefixAndTail(1).map { case (prefix, tail) ⇒ (prefix.head, tail) }.toPublisher(materializer) + Flow(p).prefixAndTail(1).map { case (prefix, tail) ⇒ (prefix.head, tail) }.toPublisher()(materializer) }.flatten(FlattenStrategy.Concat()) } diff --git a/akka-http-core/src/test/scala/akka/http/ClientServerSpec.scala b/akka-http-core/src/test/scala/akka/http/ClientServerSpec.scala index 3d52e2e306..83db54a9ef 100644 --- a/akka-http-core/src/test/scala/akka/http/ClientServerSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/ClientServerSpec.scala @@ -93,7 +93,7 @@ class ClientServerSpec extends WordSpec with Matchers with BeforeAndAfterAll { private val HttpRequest(POST, uri, List(`User-Agent`(_), Host(_, _), Accept(Vector(MediaRanges.`*/*`))), Chunked(`chunkedContentType`, chunkStream), HttpProtocols.`HTTP/1.1`) = serverIn.expectNext() uri shouldEqual Uri(s"http://$hostname:$port/chunked") - Await.result(Flow(chunkStream).grouped(4).toFuture(materializer), 100.millis) shouldEqual chunks + Await.result(Flow(chunkStream).grouped(4).toFuture()(materializer), 100.millis) shouldEqual chunks val serverOutSub = serverOut.expectSubscription() serverOutSub.sendNext(HttpResponse(206, List(RawHeader("Age", "42")), chunkedEntity)) @@ -102,7 +102,7 @@ class ClientServerSpec extends WordSpec with Matchers with BeforeAndAfterAll { clientInSub.request(1) val (HttpResponse(StatusCodes.PartialContent, List(Date(_), Server(_), RawHeader("Age", "42")), Chunked(`chunkedContentType`, chunkStream2), HttpProtocols.`HTTP/1.1`), 12345678) = clientIn.expectNext() - Await.result(Flow(chunkStream2).grouped(1000).toFuture(materializer), 100.millis) shouldEqual chunks + Await.result(Flow(chunkStream2).grouped(1000).toFuture()(materializer), 100.millis) shouldEqual chunks } } diff --git a/akka-http-core/src/test/scala/akka/http/TestClient.scala b/akka-http-core/src/test/scala/akka/http/TestClient.scala index 6dd9c76fc8..c9922b50d2 100644 --- a/akka-http-core/src/test/scala/akka/http/TestClient.scala +++ b/akka-http-core/src/test/scala/akka/http/TestClient.scala @@ -25,7 +25,7 @@ object TestClient extends App { implicit val system = ActorSystem("ServerTest", testConf) import system.dispatcher - val materializer = FlowMaterializer(MaterializerSettings()) + implicit val materializer = FlowMaterializer(MaterializerSettings()) implicit val askTimeout: Timeout = 500.millis val host = "spray.io" @@ -37,8 +37,8 @@ object TestClient extends App { } yield response.header[headers.Server] def sendRequest(request: HttpRequest, connection: Http.OutgoingConnection): Future[HttpResponse] = { - Flow(List(HttpRequest() -> 'NoContext)).produceTo(connection.processor, materializer) - Flow(connection.processor).map(_._1).toFuture(materializer) + Flow(List(HttpRequest() -> 'NoContext)).produceTo(connection.processor) + Flow(connection.processor).map(_._1).toFuture() } result onComplete { diff --git a/akka-http-core/src/test/scala/akka/http/TestServer.scala b/akka-http-core/src/test/scala/akka/http/TestServer.scala index 9e16fc292f..8f26eae6ad 100644 --- a/akka-http-core/src/test/scala/akka/http/TestServer.scala +++ b/akka-http-core/src/test/scala/akka/http/TestServer.scala @@ -30,17 +30,17 @@ object TestServer extends App { case _: HttpRequest ⇒ HttpResponse(404, entity = "Unknown resource!") } - val materializer = FlowMaterializer(MaterializerSettings()) + implicit val materializer = FlowMaterializer(MaterializerSettings()) implicit val askTimeout: Timeout = 500.millis val bindingFuture = IO(Http) ? Http.Bind(interface = "localhost", port = 8080) bindingFuture foreach { case Http.ServerBinding(localAddress, connectionStream) ⇒ - Flow(connectionStream).foreach({ + Flow(connectionStream).foreach { case Http.IncomingConnection(remoteAddress, requestPublisher, responseSubscriber) ⇒ println("Accepted new connection from " + remoteAddress) - Flow(requestPublisher).map(requestHandler).produceTo(responseSubscriber, materializer) - }, materializer) + Flow(requestPublisher).map(requestHandler).produceTo(responseSubscriber) + } } println(s"Server online at http://localhost:8080/\nPress RETURN to stop...") diff --git a/akka-http-core/src/test/scala/akka/http/model/HttpEntitySpec.scala b/akka-http-core/src/test/scala/akka/http/model/HttpEntitySpec.scala index 7ac0eb998c..9e94749e3f 100644 --- a/akka-http-core/src/test/scala/akka/http/model/HttpEntitySpec.scala +++ b/akka-http-core/src/test/scala/akka/http/model/HttpEntitySpec.scala @@ -78,7 +78,7 @@ class HttpEntitySpec extends FreeSpec with MustMatchers with BeforeAndAfterAll { } "Infinite data stream" in { val neverCompleted = Promise[ByteString]() - val stream: Publisher[ByteString] = Flow(neverCompleted.future).toPublisher(materializer) + val stream: Publisher[ByteString] = Flow(neverCompleted.future).toPublisher()(materializer) intercept[TimeoutException] { Await.result(Default(tpe, 42, stream).toStrict(100.millis, materializer), 150.millis) }.getMessage must be("HttpEntity.toStrict timed out after 100 milliseconds while still waiting for outstanding data") @@ -92,7 +92,7 @@ class HttpEntitySpec extends FreeSpec with MustMatchers with BeforeAndAfterAll { equal(bytes.toVector).matcher[Seq[ByteString]].compose { entity ⇒ val future = Flow(entity.dataBytes(materializer)) - .grouped(1000).toFuture(materializer) + .grouped(1000).toFuture()(materializer) Await.result(future, 250.millis) } diff --git a/akka-http-core/src/test/scala/akka/http/parsing/RequestParserSpec.scala b/akka-http-core/src/test/scala/akka/http/parsing/RequestParserSpec.scala index 41c8d6c7ac..82ee4064ca 100644 --- a/akka-http-core/src/test/scala/akka/http/parsing/RequestParserSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/parsing/RequestParserSpec.scala @@ -369,10 +369,10 @@ class RequestParserSpec extends FreeSpec with Matchers with BeforeAndAfterAll { case Right(request) ⇒ compactEntity(request.entity).map(x ⇒ Right(request.withEntity(x))) case Left(error) ⇒ Future.successful(Left(error)) } - }.toPublisher(materializer) + }.toPublisher()(materializer) } .flatten(FlattenStrategy.concat) - .grouped(1000).toFuture(materializer) + .grouped(1000).toFuture()(materializer) Await.result(future, 250.millis) } @@ -385,7 +385,7 @@ class RequestParserSpec extends FreeSpec with Matchers with BeforeAndAfterAll { } private def compactEntityChunks(data: Publisher[ChunkStreamPart]): Future[Publisher[ChunkStreamPart]] = - Flow(data).grouped(1000).toFuture(materializer) + Flow(data).grouped(1000).toFuture()(materializer) .map(publisher(_: _*)) .recover { case _: NoSuchElementException ⇒ publisher[ChunkStreamPart]() } diff --git a/akka-http-core/src/test/scala/akka/http/parsing/ResponseParserSpec.scala b/akka-http-core/src/test/scala/akka/http/parsing/ResponseParserSpec.scala index 15064177df..dca2c7458d 100644 --- a/akka-http-core/src/test/scala/akka/http/parsing/ResponseParserSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/parsing/ResponseParserSpec.scala @@ -227,10 +227,10 @@ class ResponseParserSpec extends FreeSpec with Matchers with BeforeAndAfterAll { case Right(response) ⇒ compactEntity(response.entity).map(x ⇒ Right(response.withEntity(x))) case Left(error) ⇒ Future.successful(Left(error.info.formatPretty)) } - }.toPublisher(materializer) + }.toPublisher()(materializer) } .flatten(FlattenStrategy.concat) - .grouped(1000).toFuture(materializer) + .grouped(1000).toFuture()(materializer) Await.result(future, 250.millis) } @@ -247,7 +247,7 @@ class ResponseParserSpec extends FreeSpec with Matchers with BeforeAndAfterAll { } private def compactEntityChunks(data: Publisher[ChunkStreamPart]): Future[Publisher[ChunkStreamPart]] = - Flow(data).grouped(1000).toFuture(materializer) + Flow(data).grouped(1000).toFuture()(materializer) .map(publisher(_: _*)) .recover { case _: NoSuchElementException ⇒ publisher[ChunkStreamPart]() diff --git a/akka-http-core/src/test/scala/akka/http/rendering/RequestRendererSpec.scala b/akka-http-core/src/test/scala/akka/http/rendering/RequestRendererSpec.scala index afb85976be..a7d11454dc 100644 --- a/akka-http-core/src/test/scala/akka/http/rendering/RequestRendererSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/rendering/RequestRendererSpec.scala @@ -190,7 +190,7 @@ class RequestRendererSpec extends FreeSpec with Matchers with BeforeAndAfterAll equal(expected.stripMarginWithNewline("\r\n")).matcher[String] compose { request ⇒ val renderer = newRenderer val byteStringPublisher :: Nil = renderer.onNext(RequestRenderingContext(request, serverAddress)) - val future = Flow(byteStringPublisher).grouped(1000).toFuture(materializer).map(_.reduceLeft(_ ++ _).utf8String) + val future = Flow(byteStringPublisher).grouped(1000).toFuture()(materializer).map(_.reduceLeft(_ ++ _).utf8String) Await.result(future, 250.millis) } } diff --git a/akka-http-core/src/test/scala/akka/http/rendering/ResponseRendererSpec.scala b/akka-http-core/src/test/scala/akka/http/rendering/ResponseRendererSpec.scala index a988aa7a4a..958ff59264 100644 --- a/akka-http-core/src/test/scala/akka/http/rendering/ResponseRendererSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/rendering/ResponseRendererSpec.scala @@ -337,7 +337,7 @@ class ResponseRendererSpec extends FreeSpec with Matchers with BeforeAndAfterAll equal(expected.stripMarginWithNewline("\r\n") -> close).matcher[(String, Boolean)] compose { ctx ⇒ val renderer = newRenderer val byteStringPublisher :: Nil = renderer.onNext(ctx) - val future = Flow(byteStringPublisher).grouped(1000).toFuture(materializer).map(_.reduceLeft(_ ++ _).utf8String) + val future = Flow(byteStringPublisher).grouped(1000).toFuture()(materializer).map(_.reduceLeft(_ ++ _).utf8String) Await.result(future, 250.millis) -> renderer.isComplete } diff --git a/akka-http/src/main/scala/akka/http/marshalling/MultipartMarshallers.scala b/akka-http/src/main/scala/akka/http/marshalling/MultipartMarshallers.scala index 52821c8b49..2e2df5f56b 100644 --- a/akka-http/src/main/scala/akka/http/marshalling/MultipartMarshallers.scala +++ b/akka-http/src/main/scala/akka/http/marshalling/MultipartMarshallers.scala @@ -39,7 +39,7 @@ trait MultipartMarshallers { Marshaller.withOpenCharset(mediaTypeWithBoundary) { (value, charset) ⇒ val log = actorSystem(refFactory).log val bodyPartRenderer = new BodyPartRenderer(boundary, charset.nioCharset, partHeadersSizeHint = 128, fm, log) - val chunks = Flow(value.parts).transform(bodyPartRenderer).flatten(FlattenStrategy.concat).toPublisher(fm) + val chunks = Flow(value.parts).transform(bodyPartRenderer).flatten(FlattenStrategy.concat).toPublisher()(fm) HttpEntity.Chunked(ContentType(mediaTypeWithBoundary), chunks) } } diff --git a/akka-http/src/main/scala/akka/http/unmarshalling/MultipartUnmarshallers.scala b/akka-http/src/main/scala/akka/http/unmarshalling/MultipartUnmarshallers.scala index 39ea9e7c49..bc608b64ed 100644 --- a/akka-http/src/main/scala/akka/http/unmarshalling/MultipartUnmarshallers.scala +++ b/akka-http/src/main/scala/akka/http/unmarshalling/MultipartUnmarshallers.scala @@ -47,7 +47,7 @@ trait MultipartUnmarshallers { case (BodyPartParser.BodyPartStart(headers, createEntity), entityParts) ⇒ BodyPart(createEntity(entityParts), headers) case (BodyPartParser.ParseError(errorInfo), _) ⇒ throw new ParsingException(errorInfo) - }.toPublisher(fm) + }.toPublisher()(fm) Unmarshalling.Success(create(bodyParts)) } } else Unmarshalling.UnsupportedContentType(ContentTypeRange(mediaRange) :: Nil) @@ -61,7 +61,7 @@ trait MultipartUnmarshallers { refFactory: ActorRefFactory): FromEntityUnmarshaller[MultipartFormData] = multipartPartsUnmarshaller(`multipart/form-data`, ContentTypes.`application/octet-stream`) { bodyParts ⇒ def verify(part: BodyPart): BodyPart = part // TODO - val parts = if (verifyIntegrity) Flow(bodyParts).map(verify).toPublisher(fm) else bodyParts + val parts = if (verifyIntegrity) Flow(bodyParts).map(verify).toPublisher()(fm) else bodyParts MultipartFormData(parts) } diff --git a/akka-http/src/main/scala/akka/http/unmarshalling/PredefinedFromEntityUnmarshallers.scala b/akka-http/src/main/scala/akka/http/unmarshalling/PredefinedFromEntityUnmarshallers.scala index e97340d23b..fa720f7b86 100644 --- a/akka-http/src/main/scala/akka/http/unmarshalling/PredefinedFromEntityUnmarshallers.scala +++ b/akka-http/src/main/scala/akka/http/unmarshalling/PredefinedFromEntityUnmarshallers.scala @@ -19,7 +19,7 @@ trait PredefinedFromEntityUnmarshallers extends MultipartUnmarshallers { implicit def byteStringUnmarshaller(implicit fm: FlowMaterializer): FromEntityUnmarshaller[ByteString] = Unmarshaller { entity ⇒ if (entity.isKnownEmpty) Future.successful(Unmarshalling.Success(ByteString.empty)) - else Flow(entity.dataBytes(fm)).fold(ByteString.empty)(_ ++ _).map(Unmarshalling.Success(_)).toFuture(fm) + else Flow(entity.dataBytes(fm)).fold(ByteString.empty)(_ ++ _).map(Unmarshalling.Success(_)).toFuture()(fm) } implicit def byteArrayUnmarshaller(implicit fm: FlowMaterializer, diff --git a/akka-http/src/test/scala/akka/http/UnmarshallingSpec.scala b/akka-http/src/test/scala/akka/http/UnmarshallingSpec.scala index 88df8fbcee..ee33cd8779 100644 --- a/akka-http/src/test/scala/akka/http/UnmarshallingSpec.scala +++ b/akka-http/src/test/scala/akka/http/UnmarshallingSpec.scala @@ -86,7 +86,7 @@ class UnmarshallingSpec extends FreeSpec with Matchers with BeforeAndAfterAll { |-----""".stripMarginWithNewline("\r\n"))).to[MultipartContent] Await.result(future, 1.second) match { case Unmarshalling.Success(x) ⇒ - Await.result(Flow(x.parts).toFuture(materializer).failed, 1.second).getMessage shouldEqual + Await.result(Flow(x.parts).toFuture()(materializer).failed, 1.second).getMessage shouldEqual "multipart part must not contain more than one Content-Type header" case x ⇒ fail(x.toString) } @@ -172,7 +172,7 @@ class UnmarshallingSpec extends FreeSpec with Matchers with BeforeAndAfterAll { equal(parts).matcher[Seq[BodyPart]] compose { unmarshallingFuture ⇒ Await.result(unmarshallingFuture, 1.second) match { case Unmarshalling.Success(x) ⇒ - Await.result(Flow(x.parts).grouped(100).toFuture(materializer).recover { + Await.result(Flow(x.parts).grouped(100).toFuture()(materializer).recover { case _: NoSuchElementException ⇒ Nil }, 1.second) case x ⇒ fail(x.toString) @@ -183,7 +183,7 @@ class UnmarshallingSpec extends FreeSpec with Matchers with BeforeAndAfterAll { equal(fields).matcher[Seq[(String, BodyPart)]] compose { unmarshallingFuture ⇒ Await.result(unmarshallingFuture, 1.second) match { case Unmarshalling.Success(x) ⇒ - val partsSeq = Await.result(Flow(x.parts).grouped(100).toFuture(materializer).recover { + val partsSeq = Await.result(Flow(x.parts).grouped(100).toFuture()(materializer).recover { case _: NoSuchElementException ⇒ Nil }, 1.second) partsSeq map { part ⇒ diff --git a/akka-stream/src/main/scala/akka/stream/impl/FlowImpl.scala b/akka-stream/src/main/scala/akka/stream/impl/FlowImpl.scala index 6f29f1ef56..f2023c5a37 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/FlowImpl.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/FlowImpl.scala @@ -35,7 +35,7 @@ private[akka] case class FlowImpl[I, O](publisherNode: Ast.PublisherNode[I], ops override def appendJava[U](duct: akka.stream.javadsl.Duct[_ >: O, U]): Flow[U] = copy(ops = duct.ops ++: ops) - override def toFuture(materializer: FlowMaterializer): Future[O] = { + override def toFuture()(implicit materializer: FlowMaterializer): Future[O] = { val p = Promise[O]() transform(new Transformer[O, Unit] { var done = false @@ -43,14 +43,14 @@ private[akka] case class FlowImpl[I, O](publisherNode: Ast.PublisherNode[I], ops override def onError(e: Throwable) = { p failure e } override def isComplete = done override def onTermination(e: Option[Throwable]) = { p.tryFailure(new NoSuchElementException("empty stream")); Nil } - }).consume(materializer) + }).consume() p.future } - override def consume(materializer: FlowMaterializer): Unit = - produceTo(new BlackholeSubscriber[Any](materializer.settings.maximumInputBufferSize), materializer) + override def consume()(implicit materializer: FlowMaterializer): Unit = + produceTo(new BlackholeSubscriber[Any](materializer.settings.maximumInputBufferSize)) - override def onComplete(callback: Try[Unit] ⇒ Unit, materializer: FlowMaterializer): Unit = + override def onComplete(callback: Try[Unit] ⇒ Unit)(implicit materializer: FlowMaterializer): Unit = transform(new Transformer[O, Unit] { override def onNext(in: O) = Nil override def onError(e: Throwable) = { @@ -61,15 +61,15 @@ private[akka] case class FlowImpl[I, O](publisherNode: Ast.PublisherNode[I], ops callback(Builder.SuccessUnit) Nil } - }).consume(materializer) + }).consume() - override def toPublisher[U >: O](materializer: FlowMaterializer): Publisher[U] = materializer.toPublisher(publisherNode, ops) + override def toPublisher[U >: O]()(implicit materializer: FlowMaterializer): Publisher[U] = materializer.toPublisher(publisherNode, ops) - override def produceTo(subscriber: Subscriber[_ >: O], materializer: FlowMaterializer): Unit = - toPublisher(materializer).subscribe(subscriber.asInstanceOf[Subscriber[O]]) + override def produceTo(subscriber: Subscriber[_ >: O])(implicit materializer: FlowMaterializer): Unit = + toPublisher().subscribe(subscriber.asInstanceOf[Subscriber[O]]) - override def foreach(c: O ⇒ Unit, materializer: FlowMaterializer): Future[Unit] = - foreachTransform(c).toFuture(materializer) + override def foreach(c: O ⇒ Unit)(implicit materializer: FlowMaterializer): Future[Unit] = + foreachTransform(c).toFuture() } /** @@ -88,13 +88,13 @@ private[akka] case class DuctImpl[In, Out](ops: List[Ast.AstNode]) extends Duct[ override def appendJava[U](duct: akka.stream.javadsl.Duct[_ >: Out, U]): Duct[In, U] = copy(ops = duct.ops ++: ops) - override def produceTo[U >: Out](subscriber: Subscriber[U], materializer: FlowMaterializer): Subscriber[In] = + override def produceTo[U >: Out](subscriber: Subscriber[U])(implicit materializer: FlowMaterializer): Subscriber[In] = materializer.ductProduceTo(subscriber, ops) - override def consume(materializer: FlowMaterializer): Subscriber[In] = - produceTo(new BlackholeSubscriber[Any](materializer.settings.maximumInputBufferSize), materializer) + override def consume()(implicit materializer: FlowMaterializer): Subscriber[In] = + produceTo(new BlackholeSubscriber[Any](materializer.settings.maximumInputBufferSize)) - override def onComplete(callback: Try[Unit] ⇒ Unit, materializer: FlowMaterializer): Subscriber[In] = + override def onComplete(callback: Try[Unit] ⇒ Unit)(implicit materializer: FlowMaterializer): Subscriber[In] = transform(new Transformer[Out, Unit] { override def onNext(in: Out) = Nil override def onError(e: Throwable) = { @@ -105,17 +105,17 @@ private[akka] case class DuctImpl[In, Out](ops: List[Ast.AstNode]) extends Duct[ callback(Builder.SuccessUnit) Nil } - }).consume(materializer) + }).consume() - override def build[U >: Out](materializer: FlowMaterializer): (Subscriber[In], Publisher[U]) = + override def build[U >: Out]()(implicit materializer: FlowMaterializer): (Subscriber[In], Publisher[U]) = materializer.ductBuild(ops) - override def foreach(c: Out ⇒ Unit, materializer: FlowMaterializer): (Subscriber[In], Future[Unit]) = { + override def foreach(c: Out ⇒ Unit)(implicit materializer: FlowMaterializer): (Subscriber[In], Future[Unit]) = { val p = Promise[Unit]() - val s = foreachTransform(c).onComplete({ + val s = foreachTransform(c).onComplete { case Success(_) ⇒ p.success(()) case Failure(e) ⇒ p.failure(e) - }, materializer) + } (s, p.future) } diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/Duct.scala b/akka-stream/src/main/scala/akka/stream/javadsl/Duct.scala index 0d85591a37..7163e55b51 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/Duct.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/Duct.scala @@ -418,25 +418,24 @@ private[akka] class DuctAdapter[In, T](delegate: SDuct[In, T]) extends Duct[In, new DuctAdapter(delegate.appendJava(duct)) override def produceTo(subscriber: Subscriber[T], materializer: FlowMaterializer): Subscriber[In] = - delegate.produceTo(subscriber, materializer) + delegate.produceTo(subscriber)(materializer) override def consume(materializer: FlowMaterializer): Subscriber[In] = - delegate.consume(materializer) + delegate.consume()(materializer) override def onComplete(callback: OnCompleteCallback, materializer: FlowMaterializer): Subscriber[In] = - delegate.onComplete({ - + delegate.onComplete { case Success(_) ⇒ callback.onComplete(null) case Failure(e) ⇒ callback.onComplete(e) - }, materializer) + }(materializer) override def build(materializer: FlowMaterializer): Pair[Subscriber[In], Publisher[T]] = { - val (in, out) = delegate.build(materializer) + val (in, out) = delegate.build()(materializer) Pair(in, out) } override def foreach(c: Procedure[T], materializer: FlowMaterializer): Pair[Subscriber[In], Future[Void]] = { - val (in, fut) = delegate.foreach(elem ⇒ c.apply(elem), materializer) + val (in, fut) = delegate.foreach(elem ⇒ c.apply(elem))(materializer) implicit val ec = ExecutionContexts.sameThreadExecutionContext val voidFut = fut.map(_ ⇒ null).mapTo[Void] Pair(in, voidFut) diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/Flow.scala b/akka-stream/src/main/scala/akka/stream/javadsl/Flow.scala index 28bb10e134..a635bf9b7b 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/Flow.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/Flow.scala @@ -484,26 +484,26 @@ private[akka] class FlowAdapter[T](delegate: SFlow[T]) extends Flow[T] { new FlowAdapter(delegate.appendJava(duct)) override def toFuture(materializer: FlowMaterializer): Future[T] = - delegate.toFuture(materializer) + delegate.toFuture()(materializer) override def consume(materializer: FlowMaterializer): Unit = - delegate.consume(materializer) + delegate.consume()(materializer) override def onComplete(callback: OnCompleteCallback, materializer: FlowMaterializer): Unit = - delegate.onComplete({ + delegate.onComplete { case Success(_) ⇒ callback.onComplete(null) case Failure(e) ⇒ callback.onComplete(e) - }, materializer) + }(materializer) override def toPublisher(materializer: FlowMaterializer): Publisher[T] = - delegate.toPublisher(materializer) + delegate.toPublisher()(materializer) override def produceTo(subsriber: Subscriber[_ >: T], materializer: FlowMaterializer): Unit = - delegate.produceTo(subsriber, materializer) + delegate.produceTo(subsriber)(materializer) override def foreach(c: Procedure[T], materializer: FlowMaterializer): Future[Void] = { implicit val ec = ExecutionContexts.sameThreadExecutionContext - delegate.foreach(elem ⇒ c.apply(elem), materializer).map(_ ⇒ null).mapTo[Void] + delegate.foreach(elem ⇒ c.apply(elem))(materializer).map(_ ⇒ null).mapTo[Void] } } \ No newline at end of file diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/Duct.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/Duct.scala index 6c7708fbc9..da22908a63 100644 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/Duct.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/Duct.scala @@ -33,6 +33,8 @@ object Duct { * methods on it and then attach the `Subscriber` representing the input side of the `Duct` to an * upstream `Publisher`. * + * Use [[ImplicitFlowMaterializer]] to define an implicit [[akka.stream.FlowMaterializer]] + * inside an [[akka.actor.Actor]]. */ trait Duct[In, +Out] { /** @@ -271,7 +273,7 @@ trait Duct[In, +Out] { * The given FlowMaterializer decides how the flow’s logical structure is * broken down into individual processing steps. */ - def produceTo[U >: Out](subscriber: Subscriber[U], materializer: FlowMaterializer): Subscriber[In] + def produceTo[U >: Out](subscriber: Subscriber[U])(implicit materializer: FlowMaterializer): Subscriber[In] /** * Attaches a subscriber to this stream which will just discard all received @@ -283,7 +285,7 @@ trait Duct[In, +Out] { * The given FlowMaterializer decides how the flow’s logical structure is * broken down into individual processing steps. */ - def consume(materializer: FlowMaterializer): Subscriber[In] + def consume()(implicit materializer: FlowMaterializer): Subscriber[In] /** * When this flow is completed, either through an error or normal @@ -293,7 +295,7 @@ trait Duct[In, +Out] { * * *This operation materializes the flow and initiates its execution.* */ - def onComplete(callback: Try[Unit] ⇒ Unit, materializer: FlowMaterializer): Subscriber[In] + def onComplete(callback: Try[Unit] ⇒ Unit)(implicit materializer: FlowMaterializer): Subscriber[In] /** * Materialize this `Duct` into a `Subscriber` representing the input side of the `Duct` @@ -307,7 +309,7 @@ trait Duct[In, +Out] { * The given FlowMaterializer decides how the flow’s logical structure is * broken down into individual processing steps. */ - def build[U >: Out](materializer: FlowMaterializer): (Subscriber[In], Publisher[U]) + def build[U >: Out]()(implicit materializer: FlowMaterializer): (Subscriber[In], Publisher[U]) /** * Invoke the given procedure for each received element. @@ -325,7 +327,7 @@ trait Duct[In, +Out] { * The given FlowMaterializer decides how the flow’s logical structure is * broken down into individual processing steps. */ - def foreach(c: Out ⇒ Unit, materializer: FlowMaterializer): (Subscriber[In], Future[Unit]) + def foreach(c: Out ⇒ Unit)(implicit materializer: FlowMaterializer): (Subscriber[In], Future[Unit]) /** * INTERNAL API diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/Flow.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/Flow.scala index ab1aed659c..98a52de972 100644 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/Flow.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/Flow.scala @@ -98,6 +98,9 @@ object Flow { * by those methods that materialize the Flow into a series of * [[org.reactivestreams.Processor]] instances. The returned reactive stream * is fully started and active. + * + * Use [[ImplicitFlowMaterializer]] to define an implicit [[akka.stream.FlowMaterializer]] + * inside an [[akka.actor.Actor]]. */ trait Flow[+T] { @@ -340,7 +343,7 @@ trait Flow[+T] { * The given FlowMaterializer decides how the flow’s logical structure is * broken down into individual processing steps. */ - def toFuture(materializer: FlowMaterializer): Future[T] + def toFuture()(implicit materializer: FlowMaterializer): Future[T] /** * Attaches a subscriber to this stream which will just discard all received @@ -349,7 +352,7 @@ trait Flow[+T] { * The given FlowMaterializer decides how the flow’s logical structure is * broken down into individual processing steps. */ - def consume(materializer: FlowMaterializer): Unit + def consume()(implicit materializer: FlowMaterializer): Unit /** * When this flow is completed, either through an error or normal @@ -358,7 +361,7 @@ trait Flow[+T] { * * *This operation materializes the flow and initiates its execution.* */ - def onComplete(callback: Try[Unit] ⇒ Unit, materializer: FlowMaterializer): Unit + def onComplete(callback: Try[Unit] ⇒ Unit)(implicit materializer: FlowMaterializer): Unit /** * Materialize this flow and return the downstream-most @@ -370,7 +373,7 @@ trait Flow[+T] { * The given FlowMaterializer decides how the flow’s logical structure is * broken down into individual processing steps. */ - def toPublisher[U >: T](materializer: FlowMaterializer): Publisher[U] + def toPublisher[U >: T]()(implicit materializer: FlowMaterializer): Publisher[U] /** * Attaches a subscriber to this stream. @@ -380,7 +383,7 @@ trait Flow[+T] { * The given FlowMaterializer decides how the flow’s logical structure is * broken down into individual processing steps. */ - def produceTo(subscriber: Subscriber[_ >: T], materializer: FlowMaterializer): Unit + def produceTo(subscriber: Subscriber[_ >: T])(implicit materializer: FlowMaterializer): Unit /** * Invoke the given procedure for each received element. Returns a [[scala.concurrent.Future]] @@ -392,7 +395,7 @@ trait Flow[+T] { * The given FlowMaterializer decides how the flow’s logical structure is * broken down into individual processing steps. */ - def foreach(c: T ⇒ Unit, materializer: FlowMaterializer): Future[Unit] + def foreach(c: T ⇒ Unit)(implicit materializer: FlowMaterializer): Future[Unit] } diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/ImplicitFlowMaterializer.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/ImplicitFlowMaterializer.scala new file mode 100644 index 0000000000..5e5645d6de --- /dev/null +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/ImplicitFlowMaterializer.scala @@ -0,0 +1,25 @@ +/** + * Copyright (C) 2014 Typesafe Inc. + */ +package akka.stream.scaladsl + +import akka.actor.Actor +import akka.stream.FlowMaterializer +import akka.stream.MaterializerSettings + +/** + * Mix this trait into your [[akka.actor.Actor]] if you need an implicit + * [[akka.stream.FlowMaterializer]] in scope. + * + * Subclass may override [[#flowMaterializerSettings]] to define custom + * [[akka.stream.MaterializerSettings]] for the `FlowMaterializer`. + */ +trait ImplicitFlowMaterializer { this: Actor ⇒ + /** + * Subclass may override this to define custom + * [[akka.stream.MaterializerSettings]] for the `FlowMaterializer`. + */ + def flowMaterializerSettings: MaterializerSettings = MaterializerSettings() + + final implicit val flowMaterializer: FlowMaterializer = FlowMaterializer(flowMaterializerSettings) +} \ No newline at end of file diff --git a/akka-stream/src/test/scala/akka/persistence/stream/PersistentPublisherSpec.scala b/akka-stream/src/test/scala/akka/persistence/stream/PersistentPublisherSpec.scala index d6e00efd1f..b6e1819c44 100644 --- a/akka-stream/src/test/scala/akka/persistence/stream/PersistentPublisherSpec.scala +++ b/akka-stream/src/test/scala/akka/persistence/stream/PersistentPublisherSpec.scala @@ -30,7 +30,7 @@ class PersistentPublisherSpec extends AkkaSpec(PersistenceSpec.config("leveldb", val numMessages = 10 val publisherSettings = PersistentPublisherSettings(idle = Some(100.millis)) - val materializer = FlowMaterializer(MaterializerSettings(dispatcher = "akka.test.stream-dispatcher")) + implicit val materializer = FlowMaterializer(MaterializerSettings(dispatcher = "akka.test.stream-dispatcher")) var processor1: ActorRef = _ var processor2: ActorRef = _ @@ -69,9 +69,9 @@ class PersistentPublisherSpec extends AkkaSpec(PersistenceSpec.config("leveldb", "pull existing messages from a processor's journal" in { val streamProbe = TestProbe() - PersistentFlow.fromProcessor(processorId(1), publisherSettings).foreach({ + PersistentFlow.fromProcessor(processorId(1), publisherSettings).foreach { case Persistent(payload, sequenceNr) ⇒ streamProbe.ref ! s"${payload}-${sequenceNr}" - }, materializer) + } 1 to numMessages foreach { i ⇒ streamProbe.expectMsg(s"a-${i}") @@ -80,9 +80,9 @@ class PersistentPublisherSpec extends AkkaSpec(PersistenceSpec.config("leveldb", "pull existing messages and new from a processor's journal" in { val streamProbe = TestProbe() - PersistentFlow.fromProcessor(processorId(1), publisherSettings).foreach({ + PersistentFlow.fromProcessor(processorId(1), publisherSettings).foreach { case Persistent(payload, sequenceNr) ⇒ streamProbe.ref ! s"${payload}-${sequenceNr}" - }, materializer) + } 1 to numMessages foreach { i ⇒ streamProbe.expectMsg(s"a-${i}") @@ -98,9 +98,9 @@ class PersistentPublisherSpec extends AkkaSpec(PersistenceSpec.config("leveldb", val streamProbe = TestProbe() val fromSequenceNr = 5L - PersistentFlow.fromProcessor(processorId(1), publisherSettings.copy(fromSequenceNr = fromSequenceNr)).foreach({ + PersistentFlow.fromProcessor(processorId(1), publisherSettings.copy(fromSequenceNr = fromSequenceNr)).foreach { case Persistent(payload, sequenceNr) ⇒ streamProbe.ref ! s"${payload}-${sequenceNr}" - }, materializer) + } fromSequenceNr to numMessages foreach { i ⇒ streamProbe.expectMsg(s"a-${i}") @@ -113,11 +113,11 @@ class PersistentPublisherSpec extends AkkaSpec(PersistenceSpec.config("leveldb", val streamProbe1 = TestProbe() val streamProbe2 = TestProbe() - val publisher = PersistentFlow.fromProcessor(processorId(1), publisherSettings).toPublisher(materializer) + val publisher = PersistentFlow.fromProcessor(processorId(1), publisherSettings).toPublisher() - Flow(publisher).foreach({ + Flow(publisher).foreach { case Persistent(payload, sequenceNr) ⇒ streamProbe1.ref ! s"${payload}-${sequenceNr}" - }, materializer) + } // let subscriber consume all existing messages 1 to numMessages foreach { i ⇒ @@ -125,9 +125,9 @@ class PersistentPublisherSpec extends AkkaSpec(PersistenceSpec.config("leveldb", } // subscribe another subscriber - Flow(publisher).foreach({ + Flow(publisher).foreach { case Persistent(payload, sequenceNr) ⇒ streamProbe2.ref ! s"${payload}-${sequenceNr}" - }, materializer) + } // produce new messages and let both subscribers handle them 1 to 2 foreach { i ⇒ @@ -146,13 +146,13 @@ class PersistentPublisherSpec extends AkkaSpec(PersistenceSpec.config("leveldb", val fromSequenceNr1 = 7L val fromSequenceNr2 = 3L - val publisher1 = PersistentFlow.fromProcessor(processorId(1), publisherSettings.copy(fromSequenceNr = fromSequenceNr1)).toPublisher(materializer) - val publisher2 = PersistentFlow.fromProcessor(processorId(2), publisherSettings.copy(fromSequenceNr = fromSequenceNr2)).toPublisher(materializer) + val publisher1 = PersistentFlow.fromProcessor(processorId(1), publisherSettings.copy(fromSequenceNr = fromSequenceNr1)).toPublisher() + val publisher2 = PersistentFlow.fromProcessor(processorId(2), publisherSettings.copy(fromSequenceNr = fromSequenceNr2)).toPublisher() - Flow(publisher1).merge(publisher2).foreach({ + Flow(publisher1).merge(publisher2).foreach { case Persistent(payload: String, sequenceNr) if (payload.startsWith("a")) ⇒ streamProbe1.ref ! s"${payload}-${sequenceNr}" case Persistent(payload: String, sequenceNr) if (payload.startsWith("b")) ⇒ streamProbe2.ref ! s"${payload}-${sequenceNr}" - }, materializer) + } 1 to numMessages foreach { i ⇒ if (i >= fromSequenceNr1) streamProbe1.expectMsg(s"a-${i}") diff --git a/akka-stream/src/test/scala/akka/stream/DuctSpec.scala b/akka-stream/src/test/scala/akka/stream/DuctSpec.scala index 7b97dbc21b..1e32cc9e39 100644 --- a/akka-stream/src/test/scala/akka/stream/DuctSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/DuctSpec.scala @@ -22,18 +22,18 @@ object DuctSpec { class DuctSpec extends AkkaSpec { import DuctSpec._ - val materializer = FlowMaterializer(MaterializerSettings(dispatcher = "akka.test.stream-dispatcher")) + implicit val materializer = FlowMaterializer(MaterializerSettings(dispatcher = "akka.test.stream-dispatcher")) "A Duct" must { "materialize into Publisher/Subscriber" in { val duct: Duct[String, String] = Duct[String] - val (ductIn: Subscriber[String], ductOut: Publisher[String]) = duct.build(materializer) + val (ductIn: Subscriber[String], ductOut: Publisher[String]) = duct.build() val c1 = StreamTestKit.SubscriberProbe[String]() ductOut.subscribe(c1) - val source: Publisher[String] = Flow(List("1", "2", "3")).toPublisher(materializer) + val source: Publisher[String] = Flow(List("1", "2", "3")).toPublisher() source.subscribe(ductIn) val sub1 = c1.expectSubscription @@ -46,7 +46,7 @@ class DuctSpec extends AkkaSpec { "materialize into Publisher/Subscriber and transformation processor" in { val duct: Duct[Int, String] = Duct[Int].map((i: Int) ⇒ i.toString) - val (ductIn: Subscriber[Int], ductOut: Publisher[String]) = duct.build(materializer) + val (ductIn: Subscriber[Int], ductOut: Publisher[String]) = duct.build() val c1 = StreamTestKit.SubscriberProbe[String]() ductOut.subscribe(c1) @@ -54,7 +54,7 @@ class DuctSpec extends AkkaSpec { sub1.request(3) c1.expectNoMsg(200.millis) - val source: Publisher[Int] = Flow(List(1, 2, 3)).toPublisher(materializer) + val source: Publisher[Int] = Flow(List(1, 2, 3)).toPublisher() source.subscribe(ductIn) c1.expectNext("1") @@ -65,7 +65,7 @@ class DuctSpec extends AkkaSpec { "materialize into Publisher/Subscriber and multiple transformation processors" in { val duct = Duct[Int].map(_.toString).map("elem-" + _) - val (ductIn, ductOut) = duct.build(materializer) + val (ductIn, ductOut) = duct.build() val c1 = StreamTestKit.SubscriberProbe[String]() ductOut.subscribe(c1) @@ -73,7 +73,7 @@ class DuctSpec extends AkkaSpec { sub1.request(3) c1.expectNoMsg(200.millis) - val source: Publisher[Int] = Flow(List(1, 2, 3)).toPublisher(materializer) + val source: Publisher[Int] = Flow(List(1, 2, 3)).toPublisher() source.subscribe(ductIn) c1.expectNext("elem-1") @@ -85,8 +85,8 @@ class DuctSpec extends AkkaSpec { "subscribe Subscriber" in { val duct: Duct[String, String] = Duct[String] val c1 = StreamTestKit.SubscriberProbe[String]() - val c2: Subscriber[String] = duct.produceTo(c1, materializer) - val source: Publisher[String] = Flow(List("1", "2", "3")).toPublisher(materializer) + val c2: Subscriber[String] = duct.produceTo(c1) + val source: Publisher[String] = Flow(List("1", "2", "3")).toPublisher() source.subscribe(c2) val sub1 = c1.expectSubscription @@ -99,9 +99,9 @@ class DuctSpec extends AkkaSpec { "perform transformation operation" in { val duct = Duct[Int].map(i ⇒ { testActor ! i.toString; i.toString }) - val c = duct.consume(materializer) + val c = duct.consume() - val source = Flow(List(1, 2, 3)).toPublisher(materializer) + val source = Flow(List(1, 2, 3)).toPublisher() source.subscribe(c) expectMsg("1") @@ -110,9 +110,9 @@ class DuctSpec extends AkkaSpec { } "perform multiple transformation operations" in { - val (in, fut) = Duct[Int].map(_.toString).map("elem-" + _).foreach(testActor ! _, materializer) + val (in, fut) = Duct[Int].map(_.toString).map("elem-" + _).foreach(testActor ! _) - val source = Flow(List(1, 2, 3)).toPublisher(materializer) + val source = Flow(List(1, 2, 3)).toPublisher() source.subscribe(in) expectMsg("elem-1") @@ -123,13 +123,13 @@ class DuctSpec extends AkkaSpec { "perform transformation operation and subscribe Subscriber" in { val duct = Duct[Int].map(_.toString) val c1 = StreamTestKit.SubscriberProbe[String]() - val c2: Subscriber[Int] = duct.produceTo(c1, materializer) + val c2: Subscriber[Int] = duct.produceTo(c1) val sub1 = c1.expectSubscription sub1.request(3) c1.expectNoMsg(200.millis) - val source: Publisher[Int] = Flow(List(1, 2, 3)).toPublisher(materializer) + val source: Publisher[Int] = Flow(List(1, 2, 3)).toPublisher() source.subscribe(c2) c1.expectNext("1") @@ -141,13 +141,13 @@ class DuctSpec extends AkkaSpec { "perform multiple transformation operations and subscribe Subscriber" in { val duct = Duct[Int].map(_.toString).map("elem-" + _) val c1 = StreamTestKit.SubscriberProbe[String]() - val c2 = duct.produceTo(c1, materializer) + val c2 = duct.produceTo(c1) val sub1 = c1.expectSubscription sub1.request(3) c1.expectNoMsg(200.millis) - val source: Publisher[Int] = Flow(List(1, 2, 3)).toPublisher(materializer) + val source: Publisher[Int] = Flow(List(1, 2, 3)).toPublisher() source.subscribe(c2) c1.expectNext("elem-1") @@ -158,12 +158,12 @@ class DuctSpec extends AkkaSpec { "call onComplete callback when done" in { val duct = Duct[Int].map(i ⇒ { testActor ! i.toString; i.toString }) - val c = duct.onComplete({ + val c = duct.onComplete { case Success(_) ⇒ testActor ! "DONE" case Failure(e) ⇒ testActor ! e - }, materializer) + } - val source = Flow(List(1, 2, 3)).toPublisher(materializer) + val source = Flow(List(1, 2, 3)).toPublisher() source.subscribe(c) expectMsg("1") @@ -175,7 +175,7 @@ class DuctSpec extends AkkaSpec { "be appendable to a Flow" in { val c = StreamTestKit.SubscriberProbe[String]() val duct = Duct[Int].map(_ + 10).map(_.toString) - Flow(List(1, 2, 3)).map(_ * 2).append(duct).map((s: String) ⇒ "elem-" + s).produceTo(c, materializer) + Flow(List(1, 2, 3)).map(_ * 2).append(duct).map((s: String) ⇒ "elem-" + s).produceTo(c) val sub = c.expectSubscription sub.request(3) @@ -192,9 +192,9 @@ class DuctSpec extends AkkaSpec { .map { i ⇒ (i * 2).toString } .append(duct1) .map { i ⇒ "elem-" + (i + 10) } - .produceTo(c, materializer) + .produceTo(c) - Flow(List(1, 2, 3)).produceTo(ductInSubscriber, materializer) + Flow(List(1, 2, 3)).produceTo(ductInSubscriber) val sub = c.expectSubscription sub.request(3) @@ -209,8 +209,8 @@ class DuctSpec extends AkkaSpec { val d2: Duct[String, (Boolean, Publisher[Fruit])] = Duct[String].map(_ ⇒ new Apple).groupBy(_ ⇒ true) val d3: Duct[String, (immutable.Seq[Apple], Publisher[Fruit])] = Duct[String].map(_ ⇒ new Apple).prefixAndTail(1) val s1: Subscriber[Fruit] = StreamTestKit.SubscriberProbe[Fruit]() - val s2: Subscriber[String] = Duct[String].map(_ ⇒ new Apple).produceTo(s1, materializer) - val t: Tuple2[Subscriber[String], Publisher[Fruit]] = Duct[String].map(_ ⇒ new Apple).build(materializer) + val s2: Subscriber[String] = Duct[String].map(_ ⇒ new Apple).produceTo(s1) + val t: Tuple2[Subscriber[String], Publisher[Fruit]] = Duct[String].map(_ ⇒ new Apple).build() } } diff --git a/akka-stream/src/test/scala/akka/stream/FlowBufferSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowBufferSpec.scala index a9a63961e7..a4393562b4 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowBufferSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowBufferSpec.scala @@ -11,7 +11,7 @@ import OverflowStrategy._ class FlowBufferSpec extends AkkaSpec { - val materializer = FlowMaterializer(MaterializerSettings( + implicit val materializer = FlowMaterializer(MaterializerSettings( initialInputBufferSize = 1, maximumInputBufferSize = 1, initialFanOutBufferSize = 1, @@ -21,12 +21,12 @@ class FlowBufferSpec extends AkkaSpec { "Buffer" must { "pass elements through normally in backpressured mode" in { - val future = Flow((1 to 1000).iterator).buffer(100, overflowStrategy = OverflowStrategy.backpressure).grouped(1001).toFuture(materializer) + val future = Flow((1 to 1000).iterator).buffer(100, overflowStrategy = OverflowStrategy.backpressure).grouped(1001).toFuture() Await.result(future, 3.seconds) should be(1 to 1000) } "pass elements through normally in backpressured mode with buffer size one" in { - val future = Flow((1 to 1000).iterator).buffer(1, overflowStrategy = OverflowStrategy.backpressure).grouped(1001).toFuture(materializer) + val future = Flow((1 to 1000).iterator).buffer(1, overflowStrategy = OverflowStrategy.backpressure).grouped(1001).toFuture() Await.result(future, 3.seconds) should be(1 to 1000) } @@ -39,7 +39,7 @@ class FlowBufferSpec extends AkkaSpec { .buffer(5, overflowStrategy = OverflowStrategy.backpressure) .buffer(128, overflowStrategy = OverflowStrategy.backpressure) .grouped(1001) - .toFuture(materializer) + .toFuture() Await.result(future, 3.seconds) should be(1 to 1000) } @@ -47,7 +47,7 @@ class FlowBufferSpec extends AkkaSpec { val publisher = StreamTestKit.PublisherProbe[Int]() val subscriber = StreamTestKit.SubscriberProbe[Int]() - Flow(publisher).buffer(100, overflowStrategy = OverflowStrategy.backpressure).produceTo(subscriber, materializer) + Flow(publisher).buffer(100, overflowStrategy = OverflowStrategy.backpressure).produceTo(subscriber) val autoPublisher = new StreamTestKit.AutoPublisher(publisher) val sub = subscriber.expectSubscription() @@ -67,7 +67,7 @@ class FlowBufferSpec extends AkkaSpec { val publisher = StreamTestKit.PublisherProbe[Int]() val subscriber = StreamTestKit.SubscriberProbe[Int]() - Flow(publisher).buffer(100, overflowStrategy = OverflowStrategy.dropHead).produceTo(subscriber, materializer) + Flow(publisher).buffer(100, overflowStrategy = OverflowStrategy.dropHead).produceTo(subscriber) val autoPublisher = new StreamTestKit.AutoPublisher(publisher) val sub = subscriber.expectSubscription() @@ -95,7 +95,7 @@ class FlowBufferSpec extends AkkaSpec { val publisher = StreamTestKit.PublisherProbe[Int]() val subscriber = StreamTestKit.SubscriberProbe[Int]() - Flow(publisher).buffer(100, overflowStrategy = OverflowStrategy.dropTail).produceTo(subscriber, materializer) + Flow(publisher).buffer(100, overflowStrategy = OverflowStrategy.dropTail).produceTo(subscriber) val autoPublisher = new StreamTestKit.AutoPublisher(publisher) val sub = subscriber.expectSubscription() @@ -126,7 +126,7 @@ class FlowBufferSpec extends AkkaSpec { val publisher = StreamTestKit.PublisherProbe[Int] val subscriber = StreamTestKit.SubscriberProbe[Int]() - Flow(publisher).buffer(100, overflowStrategy = OverflowStrategy.dropBuffer).produceTo(subscriber, materializer) + Flow(publisher).buffer(100, overflowStrategy = OverflowStrategy.dropBuffer).produceTo(subscriber) val autoPublisher = new StreamTestKit.AutoPublisher(publisher) val sub = subscriber.expectSubscription() @@ -157,7 +157,7 @@ class FlowBufferSpec extends AkkaSpec { val publisher = StreamTestKit.PublisherProbe[Int] val subscriber = StreamTestKit.SubscriberProbe[Int]() - Flow(publisher).buffer(1, overflowStrategy = strategy).produceTo(subscriber, materializer) + Flow(publisher).buffer(1, overflowStrategy = strategy).produceTo(subscriber) val autoPublisher = new StreamTestKit.AutoPublisher(publisher) val sub = subscriber.expectSubscription() diff --git a/akka-stream/src/test/scala/akka/stream/FlowConcatAllSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowConcatAllSpec.scala index 20298158bf..e03a4e37b4 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowConcatAllSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowConcatAllSpec.scala @@ -12,7 +12,7 @@ import scala.util.control.NoStackTrace class FlowConcatAllSpec extends AkkaSpec { - val m = FlowMaterializer(MaterializerSettings( + implicit val materializer = FlowMaterializer(MaterializerSettings( initialInputBufferSize = 2, maximumInputBufferSize = 2, initialFanOutBufferSize = 2, @@ -24,27 +24,27 @@ class FlowConcatAllSpec extends AkkaSpec { val testException = new Exception("test") with NoStackTrace "work in the happy case" in { - val s1 = Flow((1 to 2).iterator).toPublisher(m) - val s2 = Flow(List.empty[Int]).toPublisher(m) - val s3 = Flow(List(3)).toPublisher(m) - val s4 = Flow((4 to 6).iterator).toPublisher(m) - val s5 = Flow((7 to 10).iterator).toPublisher(m) + val s1 = Flow((1 to 2).iterator).toPublisher() + val s2 = Flow(List.empty[Int]).toPublisher() + val s3 = Flow(List(3)).toPublisher() + val s4 = Flow((4 to 6).iterator).toPublisher() + val s5 = Flow((7 to 10).iterator).toPublisher() val main: Flow[Publisher[Int]] = Flow(List(s1, s2, s3, s4, s5)) - Await.result(main.flatten(FlattenStrategy.concat).grouped(10).toFuture(m), 3.seconds) should be(1 to 10) + Await.result(main.flatten(FlattenStrategy.concat).grouped(10).toFuture(), 3.seconds) should be(1 to 10) } "work together with SplitWhen" in { Await.result( - Flow((1 to 10).iterator).splitWhen(_ % 2 == 0).flatten(FlattenStrategy.concat).grouped(10).toFuture(m), + Flow((1 to 10).iterator).splitWhen(_ % 2 == 0).flatten(FlattenStrategy.concat).grouped(10).toFuture(), 3.seconds) should be(1 to 10) } "on onError on master stream cancel the current open substream and signal error" in { val publisher = StreamTestKit.PublisherProbe[Publisher[Int]]() val subscriber = StreamTestKit.SubscriberProbe[Int]() - Flow(publisher).flatten(FlattenStrategy.concat).produceTo(subscriber, m) + Flow(publisher).flatten(FlattenStrategy.concat).produceTo(subscriber) val upstream = publisher.expectSubscription() val downstream = subscriber.expectSubscription() @@ -63,7 +63,7 @@ class FlowConcatAllSpec extends AkkaSpec { "on onError on open substream, cancel the master stream and signal error " in { val publisher = StreamTestKit.PublisherProbe[Publisher[Int]]() val subscriber = StreamTestKit.SubscriberProbe[Int]() - Flow(publisher).flatten(FlattenStrategy.concat).produceTo(subscriber, m) + Flow(publisher).flatten(FlattenStrategy.concat).produceTo(subscriber) val upstream = publisher.expectSubscription() val downstream = subscriber.expectSubscription() @@ -82,7 +82,7 @@ class FlowConcatAllSpec extends AkkaSpec { "on cancellation cancel the current open substream and the master stream" in { val publisher = StreamTestKit.PublisherProbe[Publisher[Int]]() val subscriber = StreamTestKit.SubscriberProbe[Int]() - Flow(publisher).flatten(FlattenStrategy.concat).produceTo(subscriber, m) + Flow(publisher).flatten(FlattenStrategy.concat).produceTo(subscriber) val upstream = publisher.expectSubscription() val downstream = subscriber.expectSubscription() diff --git a/akka-stream/src/test/scala/akka/stream/FlowConcatSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowConcatSpec.scala index 056351b04e..b835cbb7ea 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowConcatSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowConcatSpec.scala @@ -16,10 +16,10 @@ class FlowConcatSpec extends TwoStreamsSetup { "Concat" must { "work in the happy case" in { - val source0 = Flow(List.empty[Int].iterator).toPublisher(materializer) - val source1 = Flow((1 to 4).iterator).toPublisher(materializer) - val source2 = Flow((5 to 10).iterator).toPublisher(materializer) - val p = Flow(source0).concat(source1).concat(source2).toPublisher(materializer) + val source0 = Flow(List.empty[Int].iterator).toPublisher() + val source1 = Flow((1 to 4).iterator).toPublisher() + val source2 = Flow((5 to 10).iterator).toPublisher() + val p = Flow(source0).concat(source1).concat(source2).toPublisher() val probe = StreamTestKit.SubscriberProbe[Int]() p.subscribe(probe) @@ -93,9 +93,9 @@ class FlowConcatSpec extends TwoStreamsSetup { "correctly handle async errors in secondary upstream" in { val promise = Promise[Int]() - val flow = Flow(List(1, 2, 3)).concat(Flow(promise.future).toPublisher(materializer)) + val flow = Flow(List(1, 2, 3)).concat(Flow(promise.future).toPublisher()) val subscriber = StreamTestKit.SubscriberProbe[Int]() - flow.produceTo(subscriber, materializer) + flow.produceTo(subscriber) val subscription = subscriber.expectSubscription() subscription.request(4) subscriber.expectNext(1) diff --git a/akka-stream/src/test/scala/akka/stream/FlowConflateSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowConflateSpec.scala index 5475013c7c..659c899217 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowConflateSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowConflateSpec.scala @@ -11,7 +11,7 @@ import scala.concurrent.duration._ class FlowConflateSpec extends AkkaSpec { - val materializer = FlowMaterializer(MaterializerSettings( + implicit val materializer = FlowMaterializer(MaterializerSettings( initialInputBufferSize = 2, maximumInputBufferSize = 2, initialFanOutBufferSize = 2, @@ -24,7 +24,7 @@ class FlowConflateSpec extends AkkaSpec { val publisher = StreamTestKit.PublisherProbe[Int]() val subscriber = StreamTestKit.SubscriberProbe[Int]() - Flow(publisher).conflate[Int](seed = i ⇒ i, aggregate = (sum, i) ⇒ sum + i).produceTo(subscriber, materializer) + Flow(publisher).conflate[Int](seed = i ⇒ i, aggregate = (sum, i) ⇒ sum + i).produceTo(subscriber) val autoPublisher = new StreamTestKit.AutoPublisher(publisher) val sub = subscriber.expectSubscription() @@ -42,7 +42,7 @@ class FlowConflateSpec extends AkkaSpec { val publisher = StreamTestKit.PublisherProbe[Int]() val subscriber = StreamTestKit.SubscriberProbe[Int]() - Flow(publisher).conflate[Int](seed = i ⇒ i, aggregate = (sum, i) ⇒ sum + i).produceTo(subscriber, materializer) + Flow(publisher).conflate[Int](seed = i ⇒ i, aggregate = (sum, i) ⇒ sum + i).produceTo(subscriber) val autoPublisher = new StreamTestKit.AutoPublisher(publisher) val sub = subscriber.expectSubscription() @@ -61,7 +61,7 @@ class FlowConflateSpec extends AkkaSpec { .conflate[Int](seed = i ⇒ i, aggregate = (sum, i) ⇒ sum + i) .map { i ⇒ if (ThreadLocalRandom.current().nextBoolean()) Thread.sleep(10); i } .fold(0)(_ + _) - .toFuture(materializer) + .toFuture() Await.result(future, 10.seconds) should be(500500) } @@ -70,7 +70,7 @@ class FlowConflateSpec extends AkkaSpec { val publisher = StreamTestKit.PublisherProbe[Int]() val subscriber = StreamTestKit.SubscriberProbe[Int]() - Flow(publisher).conflate[Int](seed = i ⇒ i, aggregate = (sum, i) ⇒ sum + i).produceTo(subscriber, materializer) + Flow(publisher).conflate[Int](seed = i ⇒ i, aggregate = (sum, i) ⇒ sum + i).produceTo(subscriber) val autoPublisher = new StreamTestKit.AutoPublisher(publisher) val sub = subscriber.expectSubscription() diff --git a/akka-stream/src/test/scala/akka/stream/FlowDispatcherSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowDispatcherSpec.scala index 9961a9629b..e2f3022b94 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowDispatcherSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowDispatcherSpec.scala @@ -10,14 +10,14 @@ import akka.testkit.TestProbe @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class FlowDispatcherSpec extends AkkaSpec { - val materializer = FlowMaterializer(MaterializerSettings(dispatcher = "akka.test.stream-dispatcher")) + implicit val materializer = FlowMaterializer(MaterializerSettings(dispatcher = "akka.test.stream-dispatcher")) "Flow with dispatcher setting" must { "use the specified dispatcher" in { val probe = TestProbe() val p = Flow(List(1, 2, 3)).map(i ⇒ { probe.ref ! Thread.currentThread().getName(); i }). - consume(materializer) + consume() probe.receiveN(3) foreach { case s: String ⇒ s should startWith(system.name + "-akka.test.stream-dispatcher") } diff --git a/akka-stream/src/test/scala/akka/stream/FlowDropWithinSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowDropWithinSpec.scala index fe01dc529b..cd336c7594 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowDropWithinSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowDropWithinSpec.scala @@ -11,7 +11,7 @@ import akka.stream.testkit.StreamTestKit @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class FlowDropWithinSpec extends AkkaSpec { - val materializer = FlowMaterializer(MaterializerSettings( + implicit val materializer = FlowMaterializer(MaterializerSettings( dispatcher = "akka.test.stream-dispatcher")) "A DropWithin" must { @@ -20,7 +20,7 @@ class FlowDropWithinSpec extends AkkaSpec { val input = Iterator.from(1) val p = StreamTestKit.PublisherProbe[Int]() val c = StreamTestKit.SubscriberProbe[Int]() - Flow(p).dropWithin(1.second).produceTo(c, materializer) + Flow(p).dropWithin(1.second).produceTo(c) val pSub = p.expectSubscription val cSub = c.expectSubscription cSub.request(100) diff --git a/akka-stream/src/test/scala/akka/stream/FlowExpandSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowExpandSpec.scala index 0c8f51d0de..2df3c392c3 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowExpandSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowExpandSpec.scala @@ -11,7 +11,7 @@ import scala.concurrent.duration._ class FlowExpandSpec extends AkkaSpec { - val materializer = FlowMaterializer(MaterializerSettings( + implicit val materializer = FlowMaterializer(MaterializerSettings( initialInputBufferSize = 2, maximumInputBufferSize = 2, initialFanOutBufferSize = 2, @@ -25,7 +25,7 @@ class FlowExpandSpec extends AkkaSpec { val subscriber = StreamTestKit.SubscriberProbe[Int]() // Simply repeat the last element as an extrapolation step - Flow(publisher).expand[Int, Int](seed = i ⇒ i, extrapolate = i ⇒ (i, i)).produceTo(subscriber, materializer) + Flow(publisher).expand[Int, Int](seed = i ⇒ i, extrapolate = i ⇒ (i, i)).produceTo(subscriber) val autoPublisher = new StreamTestKit.AutoPublisher(publisher) val sub = subscriber.expectSubscription() @@ -45,7 +45,7 @@ class FlowExpandSpec extends AkkaSpec { val subscriber = StreamTestKit.SubscriberProbe[Int]() // Simply repeat the last element as an extrapolation step - Flow(publisher).expand[Int, Int](seed = i ⇒ i, extrapolate = i ⇒ (i, i)).produceTo(subscriber, materializer) + Flow(publisher).expand[Int, Int](seed = i ⇒ i, extrapolate = i ⇒ (i, i)).produceTo(subscriber) val autoPublisher = new StreamTestKit.AutoPublisher(publisher) val sub = subscriber.expectSubscription() @@ -69,7 +69,7 @@ class FlowExpandSpec extends AkkaSpec { .map { i ⇒ if (ThreadLocalRandom.current().nextBoolean()) Thread.sleep(10); i } .expand[Int, Int](seed = i ⇒ i, extrapolate = i ⇒ (i, i)) .fold(Set.empty[Int])(_ + _) - .toFuture(materializer) + .toFuture() Await.result(future, 10.seconds) should be(Set.empty[Int] ++ (1 to 100)) } @@ -78,7 +78,7 @@ class FlowExpandSpec extends AkkaSpec { val publisher = StreamTestKit.PublisherProbe[Int]() val subscriber = StreamTestKit.SubscriberProbe[Int]() - Flow(publisher).expand[Int, Int](seed = i ⇒ i, extrapolate = i ⇒ (i, i)).produceTo(subscriber, materializer) + Flow(publisher).expand[Int, Int](seed = i ⇒ i, extrapolate = i ⇒ (i, i)).produceTo(subscriber) val autoPublisher = new StreamTestKit.AutoPublisher(publisher) val sub = subscriber.expectSubscription() diff --git a/akka-stream/src/test/scala/akka/stream/FlowFilterSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowFilterSpec.scala index e94173c470..11d1227176 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowFilterSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowFilterSpec.scala @@ -26,7 +26,7 @@ class FlowFilterSpec extends AkkaSpec with ScriptedTest { } "not blow up with high request counts" in { - val materializer = FlowMaterializer(MaterializerSettings( + implicit val materializer = FlowMaterializer(MaterializerSettings( initialInputBufferSize = 1, maximumInputBufferSize = 1, initialFanOutBufferSize = 1, @@ -35,7 +35,7 @@ class FlowFilterSpec extends AkkaSpec with ScriptedTest { val probe = StreamTestKit.SubscriberProbe[Int]() Flow(Iterator.fill(1000)(0) ++ List(1)).filter(_ != 0). - toPublisher(materializer).subscribe(probe) + toPublisher().subscribe(probe) val subscription = probe.expectSubscription() for (_ ← 1 to 10000) { diff --git a/akka-stream/src/test/scala/akka/stream/FlowForeachSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowForeachSpec.scala index c868e7e2c2..7da645d13a 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowForeachSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowForeachSpec.scala @@ -12,13 +12,13 @@ import scala.util.control.NoStackTrace class FlowForeachSpec extends AkkaSpec { - val mat = FlowMaterializer(MaterializerSettings(dispatcher = "akka.test.stream-dispatcher")) + implicit val mat = FlowMaterializer(MaterializerSettings(dispatcher = "akka.test.stream-dispatcher")) import system.dispatcher "A Foreach" must { "call the procedure for each element" in { - Flow(1 to 3).foreach(testActor ! _, mat).onSuccess { + Flow(1 to 3).foreach(testActor ! _).onSuccess { case _ ⇒ testActor ! "done" } expectMsg(1) @@ -28,7 +28,7 @@ class FlowForeachSpec extends AkkaSpec { } "complete the future for an empty stream" in { - Flow(Nil).foreach(testActor ! _, mat).onSuccess { + Flow(Nil).foreach(testActor ! _).onSuccess { case _ ⇒ testActor ! "done" } expectMsg("done") @@ -36,7 +36,7 @@ class FlowForeachSpec extends AkkaSpec { "yield the first error" in { val p = StreamTestKit.PublisherProbe[Int]() - Flow(p).foreach(testActor ! _, mat).onFailure { + Flow(p).foreach(testActor ! _).onFailure { case ex ⇒ testActor ! ex } val proc = p.expectSubscription diff --git a/akka-stream/src/test/scala/akka/stream/FlowFromFutureSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowFromFutureSpec.scala index 6bd81abcd6..624f45fcb9 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowFromFutureSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowFromFutureSpec.scala @@ -12,11 +12,11 @@ import scala.concurrent.duration._ @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class FlowFromFutureSpec extends AkkaSpec { - val materializer = FlowMaterializer(MaterializerSettings(dispatcher = "akka.test.stream-dispatcher")) + implicit val materializer = FlowMaterializer(MaterializerSettings(dispatcher = "akka.test.stream-dispatcher")) "A Flow based on a Future" must { "produce one element from already successful Future" in { - val p = Flow(Future.successful(1)).toPublisher(materializer) + val p = Flow(Future.successful(1)).toPublisher() val c = StreamTestKit.SubscriberProbe[Int]() p.subscribe(c) val sub = c.expectSubscription() @@ -28,7 +28,7 @@ class FlowFromFutureSpec extends AkkaSpec { "produce error from already failed Future" in { val ex = new RuntimeException("test") - val p = Flow(Future.failed[Int](ex)).toPublisher(materializer) + val p = Flow(Future.failed[Int](ex)).toPublisher() val c = StreamTestKit.SubscriberProbe[Int]() p.subscribe(c) c.expectError(ex) @@ -36,7 +36,7 @@ class FlowFromFutureSpec extends AkkaSpec { "produce one element when Future is completed" in { val promise = Promise[Int]() - val p = Flow(promise.future).toPublisher(materializer) + val p = Flow(promise.future).toPublisher() val c = StreamTestKit.SubscriberProbe[Int]() p.subscribe(c) val sub = c.expectSubscription() @@ -50,7 +50,7 @@ class FlowFromFutureSpec extends AkkaSpec { "produce one element when Future is completed but not before request" in { val promise = Promise[Int]() - val p = Flow(promise.future).toPublisher(materializer) + val p = Flow(promise.future).toPublisher() val c = StreamTestKit.SubscriberProbe[Int]() p.subscribe(c) val sub = c.expectSubscription() @@ -63,7 +63,7 @@ class FlowFromFutureSpec extends AkkaSpec { "produce elements with multiple subscribers" in { val promise = Promise[Int]() - val p = Flow(promise.future).toPublisher(materializer) + val p = Flow(promise.future).toPublisher() val c1 = StreamTestKit.SubscriberProbe[Int]() val c2 = StreamTestKit.SubscriberProbe[Int]() p.subscribe(c1) @@ -81,7 +81,7 @@ class FlowFromFutureSpec extends AkkaSpec { "produce elements to later subscriber" in { val promise = Promise[Int]() - val p = Flow(promise.future).toPublisher(materializer) + val p = Flow(promise.future).toPublisher() val keepAlive = StreamTestKit.SubscriberProbe[Int]() val c1 = StreamTestKit.SubscriberProbe[Int]() val c2 = StreamTestKit.SubscriberProbe[Int]() @@ -102,7 +102,7 @@ class FlowFromFutureSpec extends AkkaSpec { "allow cancel before receiving element" in { val promise = Promise[Int]() - val p = Flow(promise.future).toPublisher(materializer) + val p = Flow(promise.future).toPublisher() val keepAlive = StreamTestKit.SubscriberProbe[Int]() val c = StreamTestKit.SubscriberProbe[Int]() p.subscribe(keepAlive) diff --git a/akka-stream/src/test/scala/akka/stream/FlowGroupBySpec.scala b/akka-stream/src/test/scala/akka/stream/FlowGroupBySpec.scala index 7104a80bbe..6b599f5ea2 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowGroupBySpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowGroupBySpec.scala @@ -12,7 +12,7 @@ import scala.util.control.NoStackTrace @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class FlowGroupBySpec extends AkkaSpec { - val materializer = FlowMaterializer(MaterializerSettings( + implicit val materializer = FlowMaterializer(MaterializerSettings( initialInputBufferSize = 2, maximumInputBufferSize = 2, initialFanOutBufferSize = 2, @@ -33,8 +33,8 @@ class FlowGroupBySpec extends AkkaSpec { } class SubstreamsSupport(groupCount: Int = 2, elementCount: Int = 6) { - val source = Flow((1 to elementCount).iterator).toPublisher(materializer) - val groupStream = Flow(source).groupBy(_ % groupCount).toPublisher(materializer) + val source = Flow((1 to elementCount).iterator).toPublisher() + val groupStream = Flow(source).groupBy(_ % groupCount).toPublisher() val masterSubscriber = StreamTestKit.SubscriberProbe[(Int, Publisher[Int])]() groupStream.subscribe(masterSubscriber) @@ -108,7 +108,7 @@ class FlowGroupBySpec extends AkkaSpec { "accept cancellation of master stream when not consumed anything" in { val publisherProbeProbe = StreamTestKit.PublisherProbe[Int]() - val publisher = Flow(publisherProbeProbe).groupBy(_ % 2).toPublisher(materializer) + val publisher = Flow(publisherProbeProbe).groupBy(_ % 2).toPublisher() val subscriber = StreamTestKit.SubscriberProbe[(Int, Publisher[Int])]() publisher.subscribe(subscriber) @@ -157,8 +157,8 @@ class FlowGroupBySpec extends AkkaSpec { } "work with fanout on master stream" in { - val source = Flow((1 to 4).iterator).toPublisher(materializer) - val groupStream = Flow(source).groupBy(_ % 2).toPublisher(materializer) + val source = Flow((1 to 4).iterator).toPublisher() + val groupStream = Flow(source).groupBy(_ % 2).toPublisher() val masterSubscriber1 = StreamTestKit.SubscriberProbe[(Int, Publisher[Int])]() val masterSubscriber2 = StreamTestKit.SubscriberProbe[(Int, Publisher[Int])]() @@ -200,7 +200,7 @@ class FlowGroupBySpec extends AkkaSpec { } "work with empty input stream" in { - val publisher = Flow(List.empty[Int]).groupBy(_ % 2).toPublisher(materializer) + val publisher = Flow(List.empty[Int]).groupBy(_ % 2).toPublisher() val subscriber = StreamTestKit.SubscriberProbe[(Int, Publisher[Int])]() publisher.subscribe(subscriber) @@ -209,7 +209,7 @@ class FlowGroupBySpec extends AkkaSpec { "abort on onError from upstream" in { val publisherProbeProbe = StreamTestKit.PublisherProbe[Int]() - val publisher = Flow(publisherProbeProbe).groupBy(_ % 2).toPublisher(materializer) + val publisher = Flow(publisherProbeProbe).groupBy(_ % 2).toPublisher() val subscriber = StreamTestKit.SubscriberProbe[(Int, Publisher[Int])]() publisher.subscribe(subscriber) @@ -226,7 +226,7 @@ class FlowGroupBySpec extends AkkaSpec { "abort on onError from upstream when substreams are running" in { val publisherProbeProbe = StreamTestKit.PublisherProbe[Int]() - val publisher = Flow(publisherProbeProbe).groupBy(_ % 2).toPublisher(materializer) + val publisher = Flow(publisherProbeProbe).groupBy(_ % 2).toPublisher() val subscriber = StreamTestKit.SubscriberProbe[(Int, Publisher[Int])]() publisher.subscribe(subscriber) diff --git a/akka-stream/src/test/scala/akka/stream/FlowGroupedWithinSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowGroupedWithinSpec.scala index 97c3d27e8f..ec4ac86457 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowGroupedWithinSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowGroupedWithinSpec.scala @@ -15,7 +15,7 @@ import akka.stream.testkit.ScriptedTest class FlowGroupedWithinSpec extends AkkaSpec with ScriptedTest { val settings = MaterializerSettings(dispatcher = "akka.test.stream-dispatcher") - val materializer = FlowMaterializer(settings) + implicit val materializer = FlowMaterializer(settings) "A GroupedWithin" must { @@ -23,7 +23,7 @@ class FlowGroupedWithinSpec extends AkkaSpec with ScriptedTest { val input = Iterator.from(1) val p = StreamTestKit.PublisherProbe[Int]() val c = StreamTestKit.SubscriberProbe[immutable.Seq[Int]]() - Flow(p).groupedWithin(1000, 1.second).produceTo(c, materializer) + Flow(p).groupedWithin(1000, 1.second).produceTo(c) val pSub = p.expectSubscription val cSub = c.expectSubscription cSub.request(100) @@ -48,7 +48,7 @@ class FlowGroupedWithinSpec extends AkkaSpec with ScriptedTest { "deliver bufferd elements onComplete before the timeout" in { val c = StreamTestKit.SubscriberProbe[immutable.Seq[Int]]() - Flow(1 to 3).groupedWithin(1000, 10.second).produceTo(c, materializer) + Flow(1 to 3).groupedWithin(1000, 10.second).produceTo(c) val cSub = c.expectSubscription cSub.request(100) c.expectNext((1 to 3).toList) @@ -60,7 +60,7 @@ class FlowGroupedWithinSpec extends AkkaSpec with ScriptedTest { val input = Iterator.from(1) val p = StreamTestKit.PublisherProbe[Int]() val c = StreamTestKit.SubscriberProbe[immutable.Seq[Int]]() - Flow(p).groupedWithin(1000, 1.second).produceTo(c, materializer) + Flow(p).groupedWithin(1000, 1.second).produceTo(c) val pSub = p.expectSubscription val cSub = c.expectSubscription cSub.request(1) @@ -80,7 +80,7 @@ class FlowGroupedWithinSpec extends AkkaSpec with ScriptedTest { "drop empty groups" in { val p = StreamTestKit.PublisherProbe[Int]() val c = StreamTestKit.SubscriberProbe[immutable.Seq[Int]]() - Flow(p).groupedWithin(1000, 500.millis).produceTo(c, materializer) + Flow(p).groupedWithin(1000, 500.millis).produceTo(c) val pSub = p.expectSubscription val cSub = c.expectSubscription cSub.request(2) @@ -102,7 +102,7 @@ class FlowGroupedWithinSpec extends AkkaSpec with ScriptedTest { val input = Iterator.from(1) val p = StreamTestKit.PublisherProbe[Int]() val c = StreamTestKit.SubscriberProbe[immutable.Seq[Int]]() - Flow(p).groupedWithin(3, 2.second).produceTo(c, materializer) + Flow(p).groupedWithin(3, 2.second).produceTo(c) val pSub = p.expectSubscription val cSub = c.expectSubscription cSub.request(4) diff --git a/akka-stream/src/test/scala/akka/stream/FlowIterableSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowIterableSpec.scala index 93639889aa..b053a1c620 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowIterableSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowIterableSpec.scala @@ -12,13 +12,13 @@ import scala.concurrent.duration._ @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class FlowIterableSpec extends AkkaSpec { - val materializer = FlowMaterializer(MaterializerSettings( + implicit val materializer = FlowMaterializer(MaterializerSettings( maximumInputBufferSize = 512, dispatcher = "akka.test.stream-dispatcher")) "A Flow based on an iterable" must { "produce elements" in { - val p = Flow(List(1, 2, 3)).toPublisher(materializer) + val p = Flow(List(1, 2, 3)).toPublisher() val c = StreamTestKit.SubscriberProbe[Int]() p.subscribe(c) val sub = c.expectSubscription() @@ -32,7 +32,7 @@ class FlowIterableSpec extends AkkaSpec { } "complete empty" in { - val p = Flow(List.empty[Int]).toPublisher(materializer) + val p = Flow(List.empty[Int]).toPublisher() val c = StreamTestKit.SubscriberProbe[Int]() p.subscribe(c) c.expectComplete() @@ -44,7 +44,7 @@ class FlowIterableSpec extends AkkaSpec { } "produce elements with multiple subscribers" in { - val p = Flow(List(1, 2, 3)).toPublisher(materializer) + val p = Flow(List(1, 2, 3)).toPublisher() val c1 = StreamTestKit.SubscriberProbe[Int]() val c2 = StreamTestKit.SubscriberProbe[Int]() p.subscribe(c1) @@ -68,7 +68,7 @@ class FlowIterableSpec extends AkkaSpec { } "produce elements to later subscriber" in { - val p = Flow(List(1, 2, 3)).toPublisher(materializer) + val p = Flow(List(1, 2, 3)).toPublisher() val c1 = StreamTestKit.SubscriberProbe[Int]() val c2 = StreamTestKit.SubscriberProbe[Int]() p.subscribe(c1) @@ -94,7 +94,7 @@ class FlowIterableSpec extends AkkaSpec { } "produce elements with one transformation step" in { - val p = Flow(List(1, 2, 3)).map(_ * 2).toPublisher(materializer) + val p = Flow(List(1, 2, 3)).map(_ * 2).toPublisher() val c = StreamTestKit.SubscriberProbe[Int]() p.subscribe(c) val sub = c.expectSubscription() @@ -106,7 +106,7 @@ class FlowIterableSpec extends AkkaSpec { } "produce elements with two transformation steps" in { - val p = Flow(List(1, 2, 3, 4)).filter(_ % 2 == 0).map(_ * 2).toPublisher(materializer) + val p = Flow(List(1, 2, 3, 4)).filter(_ % 2 == 0).map(_ * 2).toPublisher() val c = StreamTestKit.SubscriberProbe[Int]() p.subscribe(c) val sub = c.expectSubscription() @@ -118,7 +118,7 @@ class FlowIterableSpec extends AkkaSpec { "allow cancel before receiving all elements" in { val count = 100000 - val p = Flow(1 to count).toPublisher(materializer) + val p = Flow(1 to count).toPublisher() val c = StreamTestKit.SubscriberProbe[Int]() p.subscribe(c) val sub = c.expectSubscription() @@ -134,19 +134,19 @@ class FlowIterableSpec extends AkkaSpec { } "have value equality of publisher" in { - val p1 = Flow(List(1, 2, 3)).toPublisher(materializer) - val p2 = Flow(List(1, 2, 3)).toPublisher(materializer) + val p1 = Flow(List(1, 2, 3)).toPublisher() + val p2 = Flow(List(1, 2, 3)).toPublisher() p1 should be(p2) p2 should be(p1) - val p3 = Flow(List(1, 2, 3, 4)).toPublisher(materializer) + val p3 = Flow(List(1, 2, 3, 4)).toPublisher() p1 should not be (p3) p3 should not be (p1) - val p4 = Flow(Vector.empty[String]).toPublisher(materializer) - val p5 = Flow(Set.empty[String]).toPublisher(materializer) + val p4 = Flow(Vector.empty[String]).toPublisher() + val p5 = Flow(Set.empty[String]).toPublisher() p1 should not be (p4) p4 should be(p5) p5 should be(p4) - val p6 = Flow(List(1, 2, 3).iterator).toPublisher(materializer) + val p6 = Flow(List(1, 2, 3).iterator).toPublisher() p1 should not be (p6) p6 should not be (p1) } diff --git a/akka-stream/src/test/scala/akka/stream/FlowIteratorSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowIteratorSpec.scala index 4792dea6d6..c42f20588c 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowIteratorSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowIteratorSpec.scala @@ -14,7 +14,7 @@ import akka.stream.scaladsl.Flow @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class FlowIteratorSpec extends AkkaSpec { - val materializer = FlowMaterializer(MaterializerSettings( + implicit val materializer = FlowMaterializer(MaterializerSettings( initialInputBufferSize = 2, maximumInputBufferSize = 2, initialFanOutBufferSize = 4, @@ -23,7 +23,7 @@ class FlowIteratorSpec extends AkkaSpec { "A Flow based on an iterator" must { "produce elements" in { - val p = Flow(List(1, 2, 3).iterator).toPublisher(materializer) + val p = Flow(List(1, 2, 3).iterator).toPublisher() val c = StreamTestKit.SubscriberProbe[Int]() p.subscribe(c) val sub = c.expectSubscription() @@ -37,7 +37,7 @@ class FlowIteratorSpec extends AkkaSpec { } "complete empty" in { - val p = Flow(List.empty[Int].iterator).toPublisher(materializer) + val p = Flow(List.empty[Int].iterator).toPublisher() val c = StreamTestKit.SubscriberProbe[Int]() p.subscribe(c) c.expectComplete() @@ -49,7 +49,7 @@ class FlowIteratorSpec extends AkkaSpec { } "produce elements with multiple subscribers" in { - val p = Flow(List(1, 2, 3).iterator).toPublisher(materializer) + val p = Flow(List(1, 2, 3).iterator).toPublisher() val c1 = StreamTestKit.SubscriberProbe[Int]() val c2 = StreamTestKit.SubscriberProbe[Int]() p.subscribe(c1) @@ -73,7 +73,7 @@ class FlowIteratorSpec extends AkkaSpec { } "produce elements to later subscriber" in { - val p = Flow(List(1, 2, 3).iterator).toPublisher(materializer) + val p = Flow(List(1, 2, 3).iterator).toPublisher() val c1 = StreamTestKit.SubscriberProbe[Int]() val c2 = StreamTestKit.SubscriberProbe[Int]() p.subscribe(c1) @@ -96,7 +96,7 @@ class FlowIteratorSpec extends AkkaSpec { } "produce elements with one transformation step" in { - val p = Flow(List(1, 2, 3).iterator).map(_ * 2).toPublisher(materializer) + val p = Flow(List(1, 2, 3).iterator).map(_ * 2).toPublisher() val c = StreamTestKit.SubscriberProbe[Int]() p.subscribe(c) val sub = c.expectSubscription() @@ -108,7 +108,7 @@ class FlowIteratorSpec extends AkkaSpec { } "produce elements with two transformation steps" in { - val p = Flow(List(1, 2, 3, 4).iterator).filter(_ % 2 == 0).map(_ * 2).toPublisher(materializer) + val p = Flow(List(1, 2, 3, 4).iterator).filter(_ % 2 == 0).map(_ * 2).toPublisher() val c = StreamTestKit.SubscriberProbe[Int]() p.subscribe(c) val sub = c.expectSubscription() @@ -120,7 +120,7 @@ class FlowIteratorSpec extends AkkaSpec { "allow cancel before receiving all elements" in { val count = 100000 - val p = Flow((1 to count).iterator).toPublisher(materializer) + val p = Flow((1 to count).iterator).toPublisher() val c = StreamTestKit.SubscriberProbe[Int]() p.subscribe(c) val sub = c.expectSubscription() diff --git a/akka-stream/src/test/scala/akka/stream/FlowMapFutureSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowMapFutureSpec.scala index e954c356bb..259eb78344 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowMapFutureSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowMapFutureSpec.scala @@ -17,7 +17,7 @@ import scala.concurrent.Await @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class FlowMapFutureSpec extends AkkaSpec { - val materializer = FlowMaterializer(MaterializerSettings( + implicit val materializer = FlowMaterializer(MaterializerSettings( dispatcher = "akka.test.stream-dispatcher")) "A Flow with mapFuture" must { @@ -25,7 +25,7 @@ class FlowMapFutureSpec extends AkkaSpec { "produce future elements" in { val c = StreamTestKit.SubscriberProbe[Int]() implicit val ec = system.dispatcher - val p = Flow(1 to 3).mapFuture(n ⇒ Future(n)).produceTo(c, materializer) + val p = Flow(1 to 3).mapFuture(n ⇒ Future(n)).produceTo(c) val sub = c.expectSubscription() sub.request(2) c.expectNext(1) @@ -42,7 +42,7 @@ class FlowMapFutureSpec extends AkkaSpec { val p = Flow(1 to 50).mapFuture(n ⇒ Future { Thread.sleep(ThreadLocalRandom.current().nextInt(1, 10)) n - }).produceTo(c, materializer) + }).produceTo(c) val sub = c.expectSubscription() sub.request(1000) for (n ← 1 to 50) c.expectNext(n) @@ -56,7 +56,7 @@ class FlowMapFutureSpec extends AkkaSpec { val p = Flow(1 to 20).mapFuture(n ⇒ Future { probe.ref ! n n - }).produceTo(c, materializer) + }).produceTo(c) val sub = c.expectSubscription() // nothing before requested probe.expectNoMsg(500.millis) @@ -84,7 +84,7 @@ class FlowMapFutureSpec extends AkkaSpec { Await.ready(latch, 10.seconds) n } - }).produceTo(c, materializer) + }).produceTo(c) val sub = c.expectSubscription() sub.request(10) c.expectError.getMessage should be("err1") @@ -103,7 +103,7 @@ class FlowMapFutureSpec extends AkkaSpec { n } }). - produceTo(c, materializer) + produceTo(c) val sub = c.expectSubscription() sub.request(10) c.expectError.getMessage should be("err2") diff --git a/akka-stream/src/test/scala/akka/stream/FlowMapSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowMapSpec.scala index 3e21d88459..a9ea38aa3a 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowMapSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowMapSpec.scala @@ -18,7 +18,7 @@ class FlowMapSpec extends AkkaSpec with ScriptedTest { maxFanOutBufferSize = 16, dispatcher = "akka.test.stream-dispatcher") - val gen = FlowMaterializer(settings) + implicit val materializer = FlowMaterializer(settings) "A Map" must { @@ -31,7 +31,7 @@ class FlowMapSpec extends AkkaSpec with ScriptedTest { val probe = StreamTestKit.SubscriberProbe[Int]() Flow(List(1).iterator). map(_ + 1).map(_ + 1).map(_ + 1).map(_ + 1).map(_ + 1). - toPublisher(gen).subscribe(probe) + toPublisher().subscribe(probe) val subscription = probe.expectSubscription() for (_ ← 1 to 10000) { diff --git a/akka-stream/src/test/scala/akka/stream/FlowMergeSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowMergeSpec.scala index 6afac6d0cb..5f59df319f 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowMergeSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowMergeSpec.scala @@ -19,10 +19,10 @@ class FlowMergeSpec extends TwoStreamsSetup { "work in the happy case" in { // Different input sizes (4 and 6) - val source1 = Flow((1 to 4).iterator).toPublisher(materializer) - val source2 = Flow((5 to 10).iterator).toPublisher(materializer) - val source3 = Flow(List.empty[Int].iterator).toPublisher(materializer) - val p = Flow(source1).merge(source2).merge(source3).toPublisher(materializer) + val source1 = Flow((1 to 4).iterator).toPublisher() + val source2 = Flow((5 to 10).iterator).toPublisher() + val source3 = Flow(List.empty[Int].iterator).toPublisher() + val p = Flow(source1).merge(source2).merge(source3).toPublisher() val probe = StreamTestKit.SubscriberProbe[Int]() p.subscribe(probe) diff --git a/akka-stream/src/test/scala/akka/stream/FlowOnCompleteSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowOnCompleteSpec.scala index 3d945a4006..7fd33b06df 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowOnCompleteSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowOnCompleteSpec.scala @@ -19,7 +19,7 @@ import scala.util.control.NoStackTrace @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class FlowOnCompleteSpec extends AkkaSpec with ScriptedTest { - val materializer = FlowMaterializer(MaterializerSettings( + implicit val materializer = FlowMaterializer(MaterializerSettings( initialInputBufferSize = 2, maximumInputBufferSize = 16, initialFanOutBufferSize = 1, @@ -31,7 +31,7 @@ class FlowOnCompleteSpec extends AkkaSpec with ScriptedTest { "invoke callback on normal completion" in { val onCompleteProbe = TestProbe() val p = StreamTestKit.PublisherProbe[Int]() - Flow(p).onComplete({ onCompleteProbe.ref ! _ }, materializer) + Flow(p).onComplete { onCompleteProbe.ref ! _ } val proc = p.expectSubscription proc.expectRequest() proc.sendNext(42) @@ -43,7 +43,7 @@ class FlowOnCompleteSpec extends AkkaSpec with ScriptedTest { "yield the first error" in { val onCompleteProbe = TestProbe() val p = StreamTestKit.PublisherProbe[Int]() - Flow(p).onComplete({ onCompleteProbe.ref ! _ }, materializer) + Flow(p).onComplete { onCompleteProbe.ref ! _ } val proc = p.expectSubscription proc.expectRequest() val ex = new RuntimeException("ex") with NoStackTrace @@ -55,7 +55,7 @@ class FlowOnCompleteSpec extends AkkaSpec with ScriptedTest { "invoke callback for an empty stream" in { val onCompleteProbe = TestProbe() val p = StreamTestKit.PublisherProbe[Int]() - Flow(p).onComplete({ onCompleteProbe.ref ! _ }, materializer) + Flow(p).onComplete { onCompleteProbe.ref ! _ } val proc = p.expectSubscription proc.expectRequest() proc.sendComplete() @@ -70,9 +70,9 @@ class FlowOnCompleteSpec extends AkkaSpec with ScriptedTest { Flow(p).map { x ⇒ onCompleteProbe.ref ! ("map-" + x) x - }.foreach({ + }.foreach { x ⇒ onCompleteProbe.ref ! ("foreach-" + x) - }, materializer).onComplete { onCompleteProbe.ref ! _ } + }.onComplete { onCompleteProbe.ref ! _ } val proc = p.expectSubscription proc.expectRequest() proc.sendNext(42) diff --git a/akka-stream/src/test/scala/akka/stream/FlowPrefixAndTailSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowPrefixAndTailSpec.scala index 9ce4fcfc03..2a96960f8a 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowPrefixAndTailSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowPrefixAndTailSpec.scala @@ -14,7 +14,7 @@ import scala.util.control.NoStackTrace class FlowPrefixAndTailSpec extends AkkaSpec { - val m = FlowMaterializer(MaterializerSettings( + implicit val materializer = FlowMaterializer(MaterializerSettings( initialInputBufferSize = 2, maximumInputBufferSize = 2, initialFanOutBufferSize = 2, @@ -26,30 +26,30 @@ class FlowPrefixAndTailSpec extends AkkaSpec { val testException = new Exception("test") with NoStackTrace "work on empty input" in { - Await.result(Flow(Nil).prefixAndTail(10).toFuture(m), 3.seconds) should be((Nil, EmptyPublisher)) + Await.result(Flow(Nil).prefixAndTail(10).toFuture(), 3.seconds) should be((Nil, EmptyPublisher)) } "work on short input" in { - Await.result(Flow(List(1, 2, 3)).prefixAndTail(10).toFuture(m), 3.seconds) should be((List(1, 2, 3), EmptyPublisher)) + Await.result(Flow(List(1, 2, 3)).prefixAndTail(10).toFuture(), 3.seconds) should be((List(1, 2, 3), EmptyPublisher)) } "work on longer inputs" in { - val (takes, tail) = Await.result(Flow((1 to 10).iterator).prefixAndTail(5).toFuture(m), 3.seconds) + val (takes, tail) = Await.result(Flow((1 to 10).iterator).prefixAndTail(5).toFuture(), 3.seconds) takes should be(1 to 5) - Await.result(Flow(tail).grouped(6).toFuture(m), 3.seconds) should be(6 to 10) + Await.result(Flow(tail).grouped(6).toFuture(), 3.seconds) should be(6 to 10) } "handle zero take count" in { - val (takes, tail) = Await.result(Flow((1 to 10).iterator).prefixAndTail(0).toFuture(m), 3.seconds) + val (takes, tail) = Await.result(Flow((1 to 10).iterator).prefixAndTail(0).toFuture(), 3.seconds) takes should be(Nil) - Await.result(Flow(tail).grouped(11).toFuture(m), 3.seconds) should be(1 to 10) + Await.result(Flow(tail).grouped(11).toFuture(), 3.seconds) should be(1 to 10) } "work if size of take is equals to stream size" in { - val (takes, tail) = Await.result(Flow((1 to 10).iterator).prefixAndTail(10).toFuture(m), 3.seconds) + val (takes, tail) = Await.result(Flow((1 to 10).iterator).prefixAndTail(10).toFuture(), 3.seconds) takes should be(1 to 10) val subscriber = StreamTestKit.SubscriberProbe[Int]() - Flow(tail).produceTo(subscriber, m) + Flow(tail).produceTo(subscriber) subscriber.expectCompletedOrSubscriptionFollowedByComplete() } @@ -57,7 +57,7 @@ class FlowPrefixAndTailSpec extends AkkaSpec { val publisher = StreamTestKit.PublisherProbe[Int]() val subscriber = StreamTestKit.SubscriberProbe[(Seq[Int], Publisher[Int])]() - Flow(publisher).prefixAndTail(3).produceTo(subscriber, m) + Flow(publisher).prefixAndTail(3).produceTo(subscriber) val upstream = publisher.expectSubscription() val downstream = subscriber.expectSubscription() @@ -75,7 +75,7 @@ class FlowPrefixAndTailSpec extends AkkaSpec { val publisher = StreamTestKit.PublisherProbe[Int]() val subscriber = StreamTestKit.SubscriberProbe[(Seq[Int], Publisher[Int])]() - Flow(publisher).prefixAndTail(1).produceTo(subscriber, m) + Flow(publisher).prefixAndTail(1).produceTo(subscriber) val upstream = publisher.expectSubscription() val downstream = subscriber.expectSubscription() @@ -90,7 +90,7 @@ class FlowPrefixAndTailSpec extends AkkaSpec { subscriber.expectComplete() val substreamSubscriber = StreamTestKit.SubscriberProbe[Int]() - Flow(tail).produceTo(substreamSubscriber, m) + Flow(tail).produceTo(substreamSubscriber) substreamSubscriber.expectSubscription() upstream.sendError(testException) @@ -102,7 +102,7 @@ class FlowPrefixAndTailSpec extends AkkaSpec { val publisher = StreamTestKit.PublisherProbe[Int]() val subscriber = StreamTestKit.SubscriberProbe[(Seq[Int], Publisher[Int])]() - Flow(publisher).prefixAndTail(3).produceTo(subscriber, m) + Flow(publisher).prefixAndTail(3).produceTo(subscriber) val upstream = publisher.expectSubscription() val downstream = subscriber.expectSubscription() @@ -120,7 +120,7 @@ class FlowPrefixAndTailSpec extends AkkaSpec { val publisher = StreamTestKit.PublisherProbe[Int]() val subscriber = StreamTestKit.SubscriberProbe[(Seq[Int], Publisher[Int])]() - Flow(publisher).prefixAndTail(1).produceTo(subscriber, m) + Flow(publisher).prefixAndTail(1).produceTo(subscriber) val upstream = publisher.expectSubscription() val downstream = subscriber.expectSubscription() @@ -135,7 +135,7 @@ class FlowPrefixAndTailSpec extends AkkaSpec { subscriber.expectComplete() val substreamSubscriber = StreamTestKit.SubscriberProbe[Int]() - Flow(tail).produceTo(substreamSubscriber, m) + Flow(tail).produceTo(substreamSubscriber) substreamSubscriber.expectSubscription().cancel() upstream.expectCancellation() diff --git a/akka-stream/src/test/scala/akka/stream/FlowProduceToSubscriberSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowProduceToSubscriberSpec.scala index 012022e213..b0b0909c2f 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowProduceToSubscriberSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowProduceToSubscriberSpec.scala @@ -9,13 +9,13 @@ import akka.stream.testkit.StreamTestKit class FlowProduceToSubscriberSpec extends AkkaSpec { - val materializer = FlowMaterializer(MaterializerSettings(dispatcher = "akka.test.stream-dispatcher")) + implicit val materializer = FlowMaterializer(MaterializerSettings(dispatcher = "akka.test.stream-dispatcher")) "A Flow with toPublisher" must { "produce elements to the subscriber" in { val c = StreamTestKit.SubscriberProbe[Int]() - Flow(List(1, 2, 3)).produceTo(c, materializer) + Flow(List(1, 2, 3)).produceTo(c) val s = c.expectSubscription() s.request(3) c.expectNext(1) diff --git a/akka-stream/src/test/scala/akka/stream/FlowSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowSpec.scala index 501f5ee373..f5698fcd88 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowSpec.scala @@ -27,7 +27,7 @@ class FlowSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.debug.rece initialFanOutBufferSize = 1, maxFanOutBufferSize = 16, dispatcher = "akka.test.stream-dispatcher") - val mat = FlowMaterializer(settings) + implicit val mat = FlowMaterializer(settings) val identity: Flow[Any] ⇒ Flow[Any] = in ⇒ in.map(e ⇒ e) val identity2: Flow[Any] ⇒ Flow[Any] = in ⇒ identity(in) @@ -321,7 +321,7 @@ class FlowSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.debug.rece "be covariant" in { val f1: Flow[Fruit] = Flow(() ⇒ new Apple) - val p1: Publisher[Fruit] = Flow(() ⇒ new Apple).toPublisher(mat) + val p1: Publisher[Fruit] = Flow(() ⇒ new Apple).toPublisher() val f2: Flow[Publisher[Fruit]] = Flow(() ⇒ new Apple).splitWhen(_ ⇒ true) val f3: Flow[(Boolean, Publisher[Fruit])] = Flow(() ⇒ new Apple).groupBy(_ ⇒ true) val f4: Flow[(immutable.Seq[Apple], Publisher[Fruit])] = Flow(() ⇒ new Apple).prefixAndTail(1) diff --git a/akka-stream/src/test/scala/akka/stream/FlowSplitWhenSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowSplitWhenSpec.scala index 962af313ad..8ea77c70da 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowSplitWhenSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowSplitWhenSpec.scala @@ -12,7 +12,7 @@ import akka.stream.scaladsl.Flow @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class FlowSplitWhenSpec extends AkkaSpec { - val materializer = FlowMaterializer(MaterializerSettings( + implicit val materializer = FlowMaterializer(MaterializerSettings( initialInputBufferSize = 2, maximumInputBufferSize = 2, initialFanOutBufferSize = 2, @@ -32,8 +32,8 @@ class FlowSplitWhenSpec extends AkkaSpec { } class SubstreamsSupport(splitWhen: Int = 3, elementCount: Int = 6) { - val source = Flow((1 to elementCount).iterator).toPublisher(materializer) - val groupStream = Flow(source).splitWhen(_ == splitWhen).toPublisher(materializer) + val source = Flow((1 to elementCount).iterator).toPublisher() + val groupStream = Flow(source).splitWhen(_ == splitWhen).toPublisher() val masterSubscriber = StreamTestKit.SubscriberProbe[Publisher[Int]]() groupStream.subscribe(masterSubscriber) diff --git a/akka-stream/src/test/scala/akka/stream/FlowTakeWithinSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowTakeWithinSpec.scala index b423fb2777..69c7e5cfc7 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowTakeWithinSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowTakeWithinSpec.scala @@ -11,7 +11,7 @@ import akka.stream.scaladsl.Flow @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class FlowTakeWithinSpec extends AkkaSpec { - val materializer = FlowMaterializer(MaterializerSettings( + implicit val materializer = FlowMaterializer(MaterializerSettings( dispatcher = "akka.test.stream-dispatcher")) "A TakeWithin" must { @@ -20,7 +20,7 @@ class FlowTakeWithinSpec extends AkkaSpec { val input = Iterator.from(1) val p = StreamTestKit.PublisherProbe[Int]() val c = StreamTestKit.SubscriberProbe[Int]() - Flow(p).takeWithin(1.second).produceTo(c, materializer) + Flow(p).takeWithin(1.second).produceTo(c) val pSub = p.expectSubscription val cSub = c.expectSubscription cSub.request(100) @@ -40,7 +40,7 @@ class FlowTakeWithinSpec extends AkkaSpec { "deliver bufferd elements onComplete before the timeout" in { val c = StreamTestKit.SubscriberProbe[Int]() - Flow(1 to 3).takeWithin(1.second).produceTo(c, materializer) + Flow(1 to 3).takeWithin(1.second).produceTo(c) val cSub = c.expectSubscription c.expectNoMsg(200.millis) cSub.request(100) diff --git a/akka-stream/src/test/scala/akka/stream/FlowTeeSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowTeeSpec.scala index d95450b791..c0bad11047 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowTeeSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowTeeSpec.scala @@ -11,7 +11,7 @@ import akka.stream.testkit.StreamTestKit @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class FlowBroadcastSpec extends AkkaSpec { - val materializer = FlowMaterializer(MaterializerSettings( + implicit val materializer = FlowMaterializer(MaterializerSettings( initialInputBufferSize = 2, maximumInputBufferSize = 16, initialFanOutBufferSize = 1, @@ -25,7 +25,7 @@ class FlowBroadcastSpec extends AkkaSpec { val c2 = StreamTestKit.SubscriberProbe[Int]() val p = Flow(List(1, 2, 3)). broadcast(c2). - toPublisher(materializer) + toPublisher() p.subscribe(c1) val sub1 = c1.expectSubscription() val sub2 = c2.expectSubscription() @@ -50,7 +50,7 @@ class FlowBroadcastSpec extends AkkaSpec { val c2 = StreamTestKit.SubscriberProbe[Int]() val p = Flow(List(1, 2, 3)). broadcast(c2). - toPublisher(materializer) + toPublisher() p.subscribe(c1) val sub1 = c1.expectSubscription() sub1.cancel() @@ -67,7 +67,7 @@ class FlowBroadcastSpec extends AkkaSpec { val c2 = StreamTestKit.SubscriberProbe[Int]() val p = Flow(List(1, 2, 3)). broadcast(c1). - toPublisher(materializer) + toPublisher() p.subscribe(c2) val sub1 = c1.expectSubscription() sub1.cancel() diff --git a/akka-stream/src/test/scala/akka/stream/FlowTimerTransformerSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowTimerTransformerSpec.scala index 39bdf78729..9b142086ad 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowTimerTransformerSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowTimerTransformerSpec.scala @@ -18,7 +18,7 @@ class FlowTimerTransformerSpec extends AkkaSpec { import system.dispatcher - val materializer = FlowMaterializer(MaterializerSettings(dispatcher = "akka.test.stream-dispatcher")) + implicit val materializer = FlowMaterializer(MaterializerSettings(dispatcher = "akka.test.stream-dispatcher")) "A Flow with TimerTransformer operations" must { "produce scheduled ticks as expected" in { @@ -35,7 +35,7 @@ class FlowTimerTransformerSpec extends AkkaSpec { } override def isComplete: Boolean = !isTimerActive("tick") }). - toPublisher(materializer) + toPublisher() val subscriber = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(subscriber) val subscription = subscriber.expectSubscription() @@ -61,7 +61,7 @@ class FlowTimerTransformerSpec extends AkkaSpec { } override def isComplete: Boolean = !isTimerActive("tick") }). - consume(materializer) + consume() val pSub = p.expectSubscription expectMsg("tick-1") expectMsg("tick-2") @@ -79,7 +79,7 @@ class FlowTimerTransformerSpec extends AkkaSpec { def onNext(element: Int) = Nil override def onTimer(timerKey: Any) = throw exception - }).toPublisher(materializer) + }).toPublisher() val subscriber = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(subscriber) diff --git a/akka-stream/src/test/scala/akka/stream/FlowToFutureSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowToFutureSpec.scala index 7bbec76b01..6c1a4c83cb 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowToFutureSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowToFutureSpec.scala @@ -14,7 +14,7 @@ import akka.stream.scaladsl.Flow class FlowToFutureSpec extends AkkaSpec with ScriptedTest { - val materializer = FlowMaterializer(MaterializerSettings( + implicit val materializer = FlowMaterializer(MaterializerSettings( initialInputBufferSize = 2, maximumInputBufferSize = 16, initialFanOutBufferSize = 1, @@ -25,7 +25,7 @@ class FlowToFutureSpec extends AkkaSpec with ScriptedTest { "yield the first value" in { val p = StreamTestKit.PublisherProbe[Int]() - val f = Flow(p).toFuture(materializer) + val f = Flow(p).toFuture() val proc = p.expectSubscription proc.expectRequest() proc.sendNext(42) @@ -35,7 +35,7 @@ class FlowToFutureSpec extends AkkaSpec with ScriptedTest { "yield the first error" in { val p = StreamTestKit.PublisherProbe[Int]() - val f = Flow(p).toFuture(materializer) + val f = Flow(p).toFuture() val proc = p.expectSubscription proc.expectRequest() val ex = new RuntimeException("ex") @@ -46,7 +46,7 @@ class FlowToFutureSpec extends AkkaSpec with ScriptedTest { "yield NoSuchElementExcption for empty stream" in { val p = StreamTestKit.PublisherProbe[Int]() - val f = Flow(p).toFuture(materializer) + val f = Flow(p).toFuture() val proc = p.expectSubscription proc.expectRequest() proc.sendComplete() diff --git a/akka-stream/src/test/scala/akka/stream/FlowTransformRecoverSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowTransformRecoverSpec.scala index 5d761aea51..5837de84f6 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowTransformRecoverSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowTransformRecoverSpec.scala @@ -31,7 +31,7 @@ object FlowTransformRecoverSpec { class FlowTransformRecoverSpec extends AkkaSpec { import FlowTransformRecoverSpec._ - val materializer = FlowMaterializer(MaterializerSettings( + implicit val materializer = FlowMaterializer(MaterializerSettings( initialInputBufferSize = 2, maximumInputBufferSize = 2, initialFanOutBufferSize = 2, @@ -40,7 +40,7 @@ class FlowTransformRecoverSpec extends AkkaSpec { "A Flow with transformRecover operations" must { "produce one-to-one transformation as expected" in { - val p = Flow(List(1, 2, 3).iterator).toPublisher(materializer) + val p = Flow(List(1, 2, 3).iterator).toPublisher() val p2 = Flow(p). transform(new Transformer[Int, Int] { var tot = 0 @@ -54,7 +54,7 @@ class FlowTransformRecoverSpec extends AkkaSpec { case Some(_) ⇒ List(-1) } }). - toPublisher(materializer) + toPublisher() val subscriber = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(subscriber) val subscription = subscriber.expectSubscription() @@ -68,7 +68,7 @@ class FlowTransformRecoverSpec extends AkkaSpec { } "produce one-to-several transformation as expected" in { - val p = Flow(List(1, 2, 3).iterator).toPublisher(materializer) + val p = Flow(List(1, 2, 3).iterator).toPublisher() val p2 = Flow(p). transform(new Transformer[Int, Int] { var tot = 0 @@ -82,7 +82,7 @@ class FlowTransformRecoverSpec extends AkkaSpec { case Some(_) ⇒ List(-1) } }). - toPublisher(materializer) + toPublisher() val subscriber = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(subscriber) val subscription = subscriber.expectSubscription() @@ -99,7 +99,7 @@ class FlowTransformRecoverSpec extends AkkaSpec { } "produce dropping transformation as expected" in { - val p = Flow(List(1, 2, 3, 4).iterator).toPublisher(materializer) + val p = Flow(List(1, 2, 3, 4).iterator).toPublisher() val p2 = Flow(p). transform(new Transformer[Int, Int] { var tot = 0 @@ -113,7 +113,7 @@ class FlowTransformRecoverSpec extends AkkaSpec { case Some(_) ⇒ List(-1) } }). - toPublisher(materializer) + toPublisher() val subscriber = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(subscriber) val subscription = subscriber.expectSubscription() @@ -127,7 +127,7 @@ class FlowTransformRecoverSpec extends AkkaSpec { } "produce multi-step transformation as expected" in { - val p = Flow(List("a", "bc", "def").iterator).toPublisher(materializer) + val p = Flow(List("a", "bc", "def").iterator).toPublisher() val p2 = Flow(p). transform(new TryRecoveryTransformer[String, Int] { var concat = "" @@ -148,7 +148,7 @@ class FlowTransformRecoverSpec extends AkkaSpec { case Some(_) ⇒ List(-1) } }). - toPublisher(materializer) + toPublisher() val c1 = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(c1) val sub1 = c1.expectSubscription() @@ -171,7 +171,7 @@ class FlowTransformRecoverSpec extends AkkaSpec { } "invoke onComplete when done" in { - val p = Flow(List("a").iterator).toPublisher(materializer) + val p = Flow(List("a").iterator).toPublisher() val p2 = Flow(p). transform(new TryRecoveryTransformer[String, String] { var s = "" @@ -181,7 +181,7 @@ class FlowTransformRecoverSpec extends AkkaSpec { } override def onTermination(e: Option[Throwable]) = List(s + "B") }). - toPublisher(materializer) + toPublisher() val c = StreamTestKit.SubscriberProbe[String]() p2.subscribe(c) val s = c.expectSubscription() @@ -201,7 +201,7 @@ class FlowTransformRecoverSpec extends AkkaSpec { } override def isComplete = s == "Success(1)" }). - toPublisher(materializer) + toPublisher() val proc = p.expectSubscription val c = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(c) @@ -226,7 +226,7 @@ class FlowTransformRecoverSpec extends AkkaSpec { override def isComplete = s == "Success(1)" override def onTermination(e: Option[Throwable]) = List(s.length + 10) }). - toPublisher(materializer) + toPublisher() val proc = p.expectSubscription val c = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(c) @@ -241,7 +241,7 @@ class FlowTransformRecoverSpec extends AkkaSpec { } "report error when exception is thrown" in { - val p = Flow(List(1, 2, 3).iterator).toPublisher(materializer) + val p = Flow(List(1, 2, 3).iterator).toPublisher() val p2 = Flow(p). transform(new Transformer[Int, Int] { override def onNext(elem: Int) = { @@ -250,7 +250,7 @@ class FlowTransformRecoverSpec extends AkkaSpec { } override def onError(e: Throwable) = List(-1) }). - toPublisher(materializer) + toPublisher() val subscriber = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(subscriber) val subscription = subscriber.expectSubscription() @@ -280,7 +280,7 @@ class FlowTransformRecoverSpec extends AkkaSpec { case Some(_) ⇒ List(-1, -2, -3) } }). - toPublisher(materializer) + toPublisher() val subscriber = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(subscriber) val subscription = subscriber.expectSubscription() @@ -333,7 +333,7 @@ class FlowTransformRecoverSpec extends AkkaSpec { } } }). - toPublisher(materializer) + toPublisher() val proc = p.expectSubscription() val c = StreamTestKit.SubscriberProbe[String]() p2.subscribe(c) @@ -354,7 +354,7 @@ class FlowTransformRecoverSpec extends AkkaSpec { override def onNext(in: Int) = List(in) override def onError(e: Throwable) = throw e }). - toPublisher(materializer) + toPublisher() val proc = p.expectSubscription() val c = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(c) @@ -367,13 +367,13 @@ class FlowTransformRecoverSpec extends AkkaSpec { } "support cancel as expected" in { - val p = Flow(List(1, 2, 3).iterator).toPublisher(materializer) + val p = Flow(List(1, 2, 3).iterator).toPublisher() val p2 = Flow(p). transform(new Transformer[Int, Int] { override def onNext(elem: Int) = List(elem, elem) override def onError(e: Throwable) = List(-1) }). - toPublisher(materializer) + toPublisher() val subscriber = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(subscriber) val subscription = subscriber.expectSubscription() diff --git a/akka-stream/src/test/scala/akka/stream/FlowTransformSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowTransformSpec.scala index 3266ca7acf..7c67033310 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowTransformSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowTransformSpec.scala @@ -18,7 +18,7 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d import system.dispatcher - val materializer = FlowMaterializer(MaterializerSettings( + implicit val materializer = FlowMaterializer(MaterializerSettings( initialInputBufferSize = 2, maximumInputBufferSize = 2, initialFanOutBufferSize = 2, @@ -27,7 +27,7 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d "A Flow with transform operations" must { "produce one-to-one transformation as expected" in { - val p = Flow(List(1, 2, 3).iterator).toPublisher(materializer) + val p = Flow(List(1, 2, 3).iterator).toPublisher() val p2 = Flow(p). transform(new Transformer[Int, Int] { var tot = 0 @@ -36,7 +36,7 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d List(tot) } }). - toPublisher(materializer) + toPublisher() val subscriber = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(subscriber) val subscription = subscriber.expectSubscription() @@ -50,7 +50,7 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d } "produce one-to-several transformation as expected" in { - val p = Flow(List(1, 2, 3).iterator).toPublisher(materializer) + val p = Flow(List(1, 2, 3).iterator).toPublisher() val p2 = Flow(p). transform(new Transformer[Int, Int] { var tot = 0 @@ -59,7 +59,7 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d Vector.fill(elem)(tot) } }). - toPublisher(materializer) + toPublisher() val subscriber = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(subscriber) val subscription = subscriber.expectSubscription() @@ -76,7 +76,7 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d } "produce dropping transformation as expected" in { - val p = Flow(List(1, 2, 3, 4).iterator).toPublisher(materializer) + val p = Flow(List(1, 2, 3, 4).iterator).toPublisher() val p2 = Flow(p). transform(new Transformer[Int, Int] { var tot = 0 @@ -85,7 +85,7 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d if (elem % 2 == 0) Nil else List(tot) } }). - toPublisher(materializer) + toPublisher() val subscriber = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(subscriber) val subscription = subscriber.expectSubscription() @@ -99,7 +99,7 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d } "produce multi-step transformation as expected" in { - val p = Flow(List("a", "bc", "def").iterator).toPublisher(materializer) + val p = Flow(List("a", "bc", "def").iterator).toPublisher() val p2 = Flow(p). transform(new Transformer[String, Int] { var concat = "" @@ -115,7 +115,7 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d List(tot) } }). - toPublisher(materializer) + toPublisher() val c1 = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(c1) val sub1 = c1.expectSubscription() @@ -138,7 +138,7 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d } "invoke onComplete when done" in { - val p = Flow(List("a").iterator).toPublisher(materializer) + val p = Flow(List("a").iterator).toPublisher() val p2 = Flow(p). transform(new Transformer[String, String] { var s = "" @@ -148,7 +148,7 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d } override def onTermination(e: Option[Throwable]) = List(s + "B") }). - toPublisher(materializer) + toPublisher() val c = StreamTestKit.SubscriberProbe[String]() p2.subscribe(c) val s = c.expectSubscription() @@ -159,7 +159,7 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d "invoke cleanup when done" in { val cleanupProbe = TestProbe() - val p = Flow(List("a").iterator).toPublisher(materializer) + val p = Flow(List("a").iterator).toPublisher() val p2 = Flow(p). transform(new Transformer[String, String] { var s = "" @@ -170,7 +170,7 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d override def onTermination(e: Option[Throwable]) = List(s + "B") override def cleanup() = cleanupProbe.ref ! s }). - toPublisher(materializer) + toPublisher() val c = StreamTestKit.SubscriberProbe[String]() p2.subscribe(c) val s = c.expectSubscription() @@ -182,7 +182,7 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d "invoke cleanup when done consume" in { val cleanupProbe = TestProbe() - val p = Flow(List("a").iterator).toPublisher(materializer) + val p = Flow(List("a").iterator).toPublisher() Flow(p). transform(new Transformer[String, String] { var s = "x" @@ -192,13 +192,13 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d } override def cleanup() = cleanupProbe.ref ! s }). - consume(materializer) + consume() cleanupProbe.expectMsg("a") } "invoke cleanup when done after error" in { val cleanupProbe = TestProbe() - val p = Flow(List("a", "b", "c").iterator).toPublisher(materializer) + val p = Flow(List("a", "b", "c").iterator).toPublisher() val p2 = Flow(p). transform(new Transformer[String, String] { var s = "" @@ -213,7 +213,7 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d override def onTermination(e: Option[Throwable]) = List(s + "B") override def cleanup() = cleanupProbe.ref ! s }). - toPublisher(materializer) + toPublisher() val c = StreamTestKit.SubscriberProbe[String]() p2.subscribe(c) val s = c.expectSubscription() @@ -235,7 +235,7 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d } override def isComplete = s == "1" }). - toPublisher(materializer) + toPublisher() val proc = p.expectSubscription val c = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(c) @@ -262,7 +262,7 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d override def onTermination(e: Option[Throwable]) = List(s.length + 10) override def cleanup() = cleanupProbe.ref ! s }). - toPublisher(materializer) + toPublisher() val proc = p.expectSubscription val c = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(c) @@ -278,7 +278,7 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d } "report error when exception is thrown" in { - val p = Flow(List(1, 2, 3).iterator).toPublisher(materializer) + val p = Flow(List(1, 2, 3).iterator).toPublisher() val p2 = Flow(p). transform(new Transformer[Int, Int] { override def onNext(elem: Int) = { @@ -286,7 +286,7 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d else List(elem, elem) } }). - toPublisher(materializer) + toPublisher() val subscriber = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(subscriber) val subscription = subscriber.expectSubscription() @@ -300,12 +300,12 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d } "support cancel as expected" in { - val p = Flow(List(1, 2, 3).iterator).toPublisher(materializer) + val p = Flow(List(1, 2, 3).iterator).toPublisher() val p2 = Flow(p). transform(new Transformer[Int, Int] { override def onNext(elem: Int) = List(elem, elem) }). - toPublisher(materializer) + toPublisher() val subscriber = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(subscriber) val subscription = subscriber.expectSubscription() @@ -319,13 +319,13 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d } "support producing elements from empty inputs" in { - val p = Flow(List.empty[Int].iterator).toPublisher(materializer) + val p = Flow(List.empty[Int].iterator).toPublisher() val p2 = Flow(p). transform(new Transformer[Int, Int] { override def onNext(elem: Int) = Nil override def onTermination(e: Option[Throwable]) = List(1, 2, 3) }). - toPublisher(materializer) + toPublisher() val subscriber = StreamTestKit.SubscriberProbe[Int]() p2.subscribe(subscriber) val subscription = subscriber.expectSubscription() @@ -359,7 +359,7 @@ class FlowTransformSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.d case _ ⇒ Nil } } - }).produceTo(subscriber, materializer) + }).produceTo(subscriber) val subscription = subscriber.expectSubscription() subscription.request(10) diff --git a/akka-stream/src/test/scala/akka/stream/FlowZipSpec.scala b/akka-stream/src/test/scala/akka/stream/FlowZipSpec.scala index 0a0d478f31..32c32a5aca 100644 --- a/akka-stream/src/test/scala/akka/stream/FlowZipSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/FlowZipSpec.scala @@ -16,9 +16,9 @@ class FlowZipSpec extends TwoStreamsSetup { "work in the happy case" in { // Different input sizes (4 and 6) - val source1 = Flow((1 to 4).iterator).toPublisher(materializer) - val source2 = Flow(List("A", "B", "C", "D", "E", "F").iterator).toPublisher(materializer) - val p = Flow(source1).zip(source2).toPublisher(materializer) + val source1 = Flow((1 to 4).iterator).toPublisher() + val source2 = Flow(List("A", "B", "C", "D", "E", "F").iterator).toPublisher() + val p = Flow(source1).zip(source2).toPublisher() val probe = StreamTestKit.SubscriberProbe[(Int, String)]() p.subscribe(probe) diff --git a/akka-stream/src/test/scala/akka/stream/ImplicitFlowMaterializerSpec.scala b/akka-stream/src/test/scala/akka/stream/ImplicitFlowMaterializerSpec.scala new file mode 100644 index 0000000000..d21955d19c --- /dev/null +++ b/akka-stream/src/test/scala/akka/stream/ImplicitFlowMaterializerSpec.scala @@ -0,0 +1,43 @@ +/** + * Copyright (C) 2014 Typesafe Inc. + */ +package akka.stream + +import akka.actor.Actor +import akka.actor.Props +import akka.pattern.pipe +import akka.stream.scaladsl.ImplicitFlowMaterializer +import akka.stream.scaladsl.Flow +import akka.stream.testkit.AkkaSpec +import akka.testkit._ + +object ImplicitFlowMaterializerSpec { + class SomeActor(input: List[String]) extends Actor with ImplicitFlowMaterializer { + + override def flowMaterializerSettings = MaterializerSettings(dispatcher = "akka.test.stream-dispatcher") + + val flow = Flow(input).map(_.toUpperCase()).fold("")(_ + _) + + def receive = { + case "run" ⇒ + // toFuture takes an implicit FlowMaterializer parameter, which is provided by ImplicitFlowMaterializer + val futureResult = flow.toFuture() + import context.dispatcher + futureResult pipeTo sender() + } + } +} + +@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) +class ImplicitFlowMaterializerSpec extends AkkaSpec with ImplicitSender { + import ImplicitFlowMaterializerSpec._ + + "An ImplicitFlowMaterializer" must { + + "provide implicit FlowMaterializer" in { + val actor = system.actorOf(Props(classOf[SomeActor], List("a", "b", "c")).withDispatcher("akka.test.stream-dispatcher")) + actor ! "run" + expectMsg("ABC") + } + } +} \ No newline at end of file diff --git a/akka-stream/src/test/scala/akka/stream/TickPublisherSpec.scala b/akka-stream/src/test/scala/akka/stream/TickPublisherSpec.scala index 9988fe9f33..b64971686a 100644 --- a/akka-stream/src/test/scala/akka/stream/TickPublisherSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/TickPublisherSpec.scala @@ -12,14 +12,14 @@ import scala.util.control.NoStackTrace @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class TickPublisherSpec extends AkkaSpec { - val materializer = FlowMaterializer(MaterializerSettings( + implicit val materializer = FlowMaterializer(MaterializerSettings( dispatcher = "akka.test.stream-dispatcher")) "A Flow based on tick publisher" must { "produce ticks" in { val tickGen = Iterator from 1 val c = StreamTestKit.SubscriberProbe[String]() - Flow(1.second, 500.millis, () ⇒ "tick-" + tickGen.next()).produceTo(c, materializer) + Flow(1.second, 500.millis, () ⇒ "tick-" + tickGen.next()).produceTo(c) val sub = c.expectSubscription() sub.request(3) c.expectNoMsg(600.millis) @@ -35,7 +35,7 @@ class TickPublisherSpec extends AkkaSpec { "drop ticks when not requested" in { val tickGen = Iterator from 1 val c = StreamTestKit.SubscriberProbe[String]() - Flow(1.second, 1.second, () ⇒ "tick-" + tickGen.next()).produceTo(c, materializer) + Flow(1.second, 1.second, () ⇒ "tick-" + tickGen.next()).produceTo(c) val sub = c.expectSubscription() sub.request(2) c.expectNext("tick-1") @@ -52,7 +52,7 @@ class TickPublisherSpec extends AkkaSpec { "produce ticks with multiple subscribers" in { val tickGen = Iterator from 1 - val p = Flow(1.second, 1.second, () ⇒ "tick-" + tickGen.next()).toPublisher(materializer) + val p = Flow(1.second, 1.second, () ⇒ "tick-" + tickGen.next()).toPublisher() val c1 = StreamTestKit.SubscriberProbe[String]() val c2 = StreamTestKit.SubscriberProbe[String]() p.subscribe(c1) @@ -76,7 +76,7 @@ class TickPublisherSpec extends AkkaSpec { "signal onError when tick closure throws" in { val c = StreamTestKit.SubscriberProbe[String]() - Flow(1.second, 1.second, () ⇒ throw new RuntimeException("tick err") with NoStackTrace).produceTo(c, materializer) + Flow(1.second, 1.second, () ⇒ throw new RuntimeException("tick err") with NoStackTrace).produceTo(c) val sub = c.expectSubscription() sub.request(3) c.expectError.getMessage should be("tick err") @@ -84,8 +84,8 @@ class TickPublisherSpec extends AkkaSpec { "be usable with zip for a simple form of rate limiting" in { val c = StreamTestKit.SubscriberProbe[Int]() - val rate = Flow(1.second, 1.second, () ⇒ "tick").toPublisher(materializer) - Flow(1 to 100).zip(rate).map { case (n, _) ⇒ n }.produceTo(c, materializer) + val rate = Flow(1.second, 1.second, () ⇒ "tick").toPublisher() + Flow(1 to 100).zip(rate).map { case (n, _) ⇒ n }.produceTo(c) val sub = c.expectSubscription() sub.request(1000) c.expectNext(1) diff --git a/akka-stream/src/test/scala/akka/stream/TwoStreamsSetup.scala b/akka-stream/src/test/scala/akka/stream/TwoStreamsSetup.scala index e454d4775c..934f172657 100644 --- a/akka-stream/src/test/scala/akka/stream/TwoStreamsSetup.scala +++ b/akka-stream/src/test/scala/akka/stream/TwoStreamsSetup.scala @@ -11,7 +11,7 @@ import scala.util.control.NoStackTrace abstract class TwoStreamsSetup extends AkkaSpec { - val materializer = FlowMaterializer(MaterializerSettings( + implicit val materializer = FlowMaterializer(MaterializerSettings( initialInputBufferSize = 2, maximumInputBufferSize = 2, initialFanOutBufferSize = 2, @@ -28,7 +28,7 @@ abstract class TwoStreamsSetup extends AkkaSpec { def setup(p1: Publisher[Int], p2: Publisher[Int]) = { val subscriber = StreamTestKit.SubscriberProbe[Outputs]() - operationUnderTest(Flow(p1), p2).toPublisher(materializer).subscribe(subscriber) + operationUnderTest(Flow(p1), p2).toPublisher().subscribe(subscriber) subscriber } @@ -36,7 +36,7 @@ abstract class TwoStreamsSetup extends AkkaSpec { def completedPublisher[T]: Publisher[T] = StreamTestKit.emptyPublisher[T] - def nonemptyPublisher[T](elems: Iterator[T]): Publisher[T] = Flow(elems).toPublisher(materializer) + def nonemptyPublisher[T](elems: Iterator[T]): Publisher[T] = Flow(elems).toPublisher() def soonToFailPublisher[T]: Publisher[T] = StreamTestKit.lazyErrorPublisher[T](TestException) diff --git a/akka-stream/src/test/scala/akka/stream/actor/ActorPublisherSpec.scala b/akka-stream/src/test/scala/akka/stream/actor/ActorPublisherSpec.scala index ce381b1071..c37bdb4861 100644 --- a/akka-stream/src/test/scala/akka/stream/actor/ActorPublisherSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/actor/ActorPublisherSpec.scala @@ -226,13 +226,13 @@ class ActorPublisherSpec extends AkkaSpec with ImplicitSender { } "work together with Flow and ActorSubscriber" in { - val materializer = FlowMaterializer(MaterializerSettings(dispatcher = "akka.test.stream-dispatcher")) + implicit val materializer = FlowMaterializer(MaterializerSettings(dispatcher = "akka.test.stream-dispatcher")) val probe = TestProbe() val snd = system.actorOf(senderProps) val rcv = system.actorOf(receiverProps(probe.ref)) Flow(ActorPublisher[Int](snd)).collect { case n if n % 2 == 0 ⇒ "elem-" + n - }.produceTo(ActorSubscriber(rcv), materializer) + }.produceTo(ActorSubscriber(rcv)) (1 to 3) foreach { snd ! _ } probe.expectMsg("elem-2") diff --git a/akka-stream/src/test/scala/akka/stream/actor/ActorSubscriberSpec.scala b/akka-stream/src/test/scala/akka/stream/actor/ActorSubscriberSpec.scala index d5803cb223..6540f42561 100644 --- a/akka-stream/src/test/scala/akka/stream/actor/ActorSubscriberSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/actor/ActorSubscriberSpec.scala @@ -100,13 +100,13 @@ class ActorSubscriberSpec extends AkkaSpec with ImplicitSender { import ActorSubscriberSpec._ import ActorSubscriber._ - val materializer = FlowMaterializer(MaterializerSettings(dispatcher = "akka.test.stream-dispatcher")) + implicit val materializer = FlowMaterializer(MaterializerSettings(dispatcher = "akka.test.stream-dispatcher")) "An ActorSubscriber" must { "receive requested elements" in { val ref = system.actorOf(manualSubscriberProps(testActor)) - Flow(List(1, 2, 3)).produceTo(ActorSubscriber(ref), materializer) + Flow(List(1, 2, 3)).produceTo(ActorSubscriber(ref)) expectNoMsg(200.millis) ref ! "ready" // requesting 2 expectMsg(OnNext(1)) @@ -120,14 +120,14 @@ class ActorSubscriberSpec extends AkkaSpec with ImplicitSender { "signal error" in { val ref = system.actorOf(manualSubscriberProps(testActor)) val e = new RuntimeException("simulated") with NoStackTrace - Flow(() ⇒ throw e).produceTo(ActorSubscriber(ref), materializer) + Flow(() ⇒ throw e).produceTo(ActorSubscriber(ref)) ref ! "ready" expectMsg(OnError(e)) } "remember requested after restart" in { val ref = system.actorOf(manualSubscriberProps(testActor)) - Flow(1 to 7).produceTo(ActorSubscriber(ref), materializer) + Flow(1 to 7).produceTo(ActorSubscriber(ref)) ref ! "ready" expectMsg(OnNext(1)) expectMsg(OnNext(2)) @@ -145,7 +145,7 @@ class ActorSubscriberSpec extends AkkaSpec with ImplicitSender { "not deliver more after cancel" in { val ref = system.actorOf(manualSubscriberProps(testActor)) - Flow(1 to 5).produceTo(ActorSubscriber(ref), materializer) + Flow(1 to 5).produceTo(ActorSubscriber(ref)) ref ! "ready" expectMsg(OnNext(1)) expectMsg(OnNext(2)) @@ -155,14 +155,14 @@ class ActorSubscriberSpec extends AkkaSpec with ImplicitSender { "work with OneByOneRequestStrategy" in { val ref = system.actorOf(requestStrategySubscriberProps(testActor, OneByOneRequestStrategy)) - Flow(1 to 17).produceTo(ActorSubscriber(ref), materializer) + Flow(1 to 17).produceTo(ActorSubscriber(ref)) for (n ← 1 to 17) expectMsg(OnNext(n)) expectMsg(OnComplete) } "work with WatermarkRequestStrategy" in { val ref = system.actorOf(requestStrategySubscriberProps(testActor, WatermarkRequestStrategy(highWatermark = 10))) - Flow(1 to 17).produceTo(ActorSubscriber(ref), materializer) + Flow(1 to 17).produceTo(ActorSubscriber(ref)) for (n ← 1 to 17) expectMsg(OnNext(n)) expectMsg(OnComplete) } @@ -170,7 +170,7 @@ class ActorSubscriberSpec extends AkkaSpec with ImplicitSender { "suport custom max in flight request strategy with child workers" in { val ref = system.actorOf(streamerProps) val N = 117 - Flow(1 to N).map(Msg(_, testActor)).produceTo(ActorSubscriber(ref), materializer) + Flow(1 to N).map(Msg(_, testActor)).produceTo(ActorSubscriber(ref)) receiveN(N).toSet should be((1 to N).map(Done(_)).toSet) } diff --git a/akka-stream/src/test/scala/akka/stream/extra/FlowTimedSpec.scala b/akka-stream/src/test/scala/akka/stream/extra/FlowTimedSpec.scala index 0067b42b15..fbfd613eec 100644 --- a/akka-stream/src/test/scala/akka/stream/extra/FlowTimedSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/extra/FlowTimedSpec.scala @@ -22,7 +22,7 @@ class FlowTimedSpec extends AkkaSpec with ScriptedTest { lazy val metricsConfig = system.settings.config - val materializer = FlowMaterializer(settings) + implicit val materializer = FlowMaterializer(settings) "Timed Flow" must { @@ -83,9 +83,9 @@ class FlowTimedSpec extends AkkaSpec with ScriptedTest { val duct: Duct[Int, Long] = Duct[Int].map(_.toLong).timedIntervalBetween(in ⇒ in % 2 == 1, d ⇒ probe.ref ! d) val c1 = StreamTestKit.SubscriberProbe[Long]() - val c2: Subscriber[Int] = duct.produceTo(c1, materializer) + val c2: Subscriber[Int] = duct.produceTo(c1) - val p = Flow(List(1, 2, 3)).toPublisher(materializer) + val p = Flow(List(1, 2, 3)).toPublisher() p.subscribe(c2) val s = c1.expectSubscription() @@ -111,12 +111,12 @@ class FlowTimedSpec extends AkkaSpec with ScriptedTest { map(_.toString), duration ⇒ probe.ref ! duration). map { s: String ⇒ s + "!" } - val (ductIn: Subscriber[Int], ductOut: Publisher[String]) = duct.build(materializer) + val (ductIn: Subscriber[Int], ductOut: Publisher[String]) = duct.build() val c1 = StreamTestKit.SubscriberProbe[String]() val c2 = ductOut.subscribe(c1) - val p = Flow(0 to 100).toPublisher(materializer) + val p = Flow(0 to 100).toPublisher() p.subscribe(ductIn) val s = c1.expectSubscription() diff --git a/akka-stream/src/test/scala/akka/stream/io/TcpFlowSpec.scala b/akka-stream/src/test/scala/akka/stream/io/TcpFlowSpec.scala index a8e161d992..a4fe85a8d3 100644 --- a/akka-stream/src/test/scala/akka/stream/io/TcpFlowSpec.scala +++ b/akka-stream/src/test/scala/akka/stream/io/TcpFlowSpec.scala @@ -116,7 +116,7 @@ class TcpFlowSpec extends AkkaSpec { maxFanOutBufferSize = 2, dispatcher = "akka.test.stream-dispatcher") - val materializer = FlowMaterializer(settings) + implicit val materializer = FlowMaterializer(settings) // FIXME: get it from TestUtil def temporaryServerAddress: InetSocketAddress = { @@ -202,9 +202,9 @@ class TcpFlowSpec extends AkkaSpec { } def echoServer(serverAddress: InetSocketAddress = temporaryServerAddress): Future[Unit] = - Flow(bind(serverAddress).connectionStream).foreach({ conn ⇒ + Flow(bind(serverAddress).connectionStream).foreach { conn ⇒ conn.inputStream.subscribe(conn.outputStream) - }, materializer) + } "Outgoing TCP stream" must { @@ -239,9 +239,9 @@ class TcpFlowSpec extends AkkaSpec { val expectedOutput = ByteString(Array.tabulate(256)(_.asInstanceOf[Byte])) serverConnection.read(256) - Flow(tcpProcessor).consume(materializer) + Flow(tcpProcessor).consume() - Flow(testInput).toPublisher(materializer).subscribe(tcpProcessor) + Flow(testInput).toPublisher().subscribe(tcpProcessor) serverConnection.waitRead() should be(expectedOutput) } @@ -256,7 +256,7 @@ class TcpFlowSpec extends AkkaSpec { for (in ← testInput) serverConnection.write(in) new TcpWriteProbe(tcpProcessor) // Just register an idle upstream - val resultFuture = Flow(tcpProcessor).fold(ByteString.empty)((acc, in) ⇒ acc ++ in).toFuture(materializer) + val resultFuture = Flow(tcpProcessor).fold(ByteString.empty)((acc, in) ⇒ acc ++ in).toFuture() serverConnection.confirmedClose() Await.result(resultFuture, 3.seconds) should be(expectedOutput) @@ -349,8 +349,8 @@ class TcpFlowSpec extends AkkaSpec { val testInput = Iterator.range(0, 256).map(ByteString(_)) val expectedOutput = ByteString(Array.tabulate(256)(_.asInstanceOf[Byte])) - Flow(testInput).toPublisher(materializer).subscribe(conn.outputStream) - val resultFuture = Flow(conn.inputStream).fold(ByteString.empty)((acc, in) ⇒ acc ++ in).toFuture(materializer) + Flow(testInput).toPublisher().subscribe(conn.outputStream) + val resultFuture = Flow(conn.inputStream).fold(ByteString.empty)((acc, in) ⇒ acc ++ in).toFuture() Await.result(resultFuture, 3.seconds) should be(expectedOutput) @@ -368,10 +368,10 @@ class TcpFlowSpec extends AkkaSpec { val testInput = Iterator.range(0, 256).map(ByteString(_)) val expectedOutput = ByteString(Array.tabulate(256)(_.asInstanceOf[Byte])) - Flow(testInput).toPublisher(materializer).subscribe(conn1.outputStream) + Flow(testInput).toPublisher().subscribe(conn1.outputStream) conn1.inputStream.subscribe(conn2.outputStream) conn2.inputStream.subscribe(conn3.outputStream) - val resultFuture = Flow(conn3.inputStream).fold(ByteString.empty)((acc, in) ⇒ acc ++ in).toFuture(materializer) + val resultFuture = Flow(conn3.inputStream).fold(ByteString.empty)((acc, in) ⇒ acc ++ in).toFuture() Await.result(resultFuture, 3.seconds) should be(expectedOutput) diff --git a/akka-stream/src/test/scala/akka/stream/testkit/ChainSetup.scala b/akka-stream/src/test/scala/akka/stream/testkit/ChainSetup.scala index 3928616c1f..a53af89f38 100644 --- a/akka-stream/src/test/scala/akka/stream/testkit/ChainSetup.scala +++ b/akka-stream/src/test/scala/akka/stream/testkit/ChainSetup.scala @@ -12,7 +12,7 @@ class ChainSetup[I, O](stream: Flow[I] ⇒ Flow[O], val settings: MaterializerSe val downstream = StreamTestKit.SubscriberProbe[O]() private val s = stream(Flow(upstream)) - val publisher = s.toPublisher(FlowMaterializer(settings)) + val publisher = s.toPublisher()(FlowMaterializer(settings)) val upstreamSubscription = upstream.expectSubscription() publisher.subscribe(downstream) val downstreamSubscription = downstream.expectSubscription()