diff --git a/akka-actor/src/main/scala/akka/Done.scala b/akka-actor/src/main/scala/akka/Done.scala index 99cc1aeaf0..f372eeb2c1 100644 --- a/akka-actor/src/main/scala/akka/Done.scala +++ b/akka-actor/src/main/scala/akka/Done.scala @@ -14,6 +14,5 @@ case object Done extends Done { /** * Java API: the singleton instance */ - def getInstance() = this + def getInstance(): Done = this } - diff --git a/akka-actor/src/main/scala/akka/dispatch/Future.scala b/akka-actor/src/main/scala/akka/dispatch/Future.scala index f753967487..823369f308 100644 --- a/akka-actor/src/main/scala/akka/dispatch/Future.scala +++ b/akka-actor/src/main/scala/akka/dispatch/Future.scala @@ -11,6 +11,8 @@ import java.lang.{ Iterable ⇒ JIterable } import java.util.{ LinkedList ⇒ JLinkedList } import java.util.concurrent.{ Executor, ExecutorService, ExecutionException, Callable, TimeoutException } import scala.util.{ Try, Success, Failure } +import java.util.concurrent.CompletionStage +import java.util.concurrent.CompletableFuture /** * ExecutionContexts is the Java API for ExecutionContexts @@ -111,6 +113,15 @@ object Futures { */ def successful[T](result: T): Future[T] = Future.successful(result) + /** + * Creates an already completed CompletionStage with the specified exception + */ + def failedCompletionStage[T](ex: Throwable): CompletionStage[T] = { + val f = CompletableFuture.completedFuture[T](null.asInstanceOf[T]) + f.obtrudeException(ex) + f + } + /** * Returns a Future that will hold the optional result of the first Future with a result that matches the predicate */ diff --git a/akka-actor/src/main/scala/akka/pattern/FutureTimeoutSupport.scala b/akka-actor/src/main/scala/akka/pattern/FutureTimeoutSupport.scala index fbec79d4b2..86161184be 100644 --- a/akka-actor/src/main/scala/akka/pattern/FutureTimeoutSupport.scala +++ b/akka-actor/src/main/scala/akka/pattern/FutureTimeoutSupport.scala @@ -8,6 +8,10 @@ import scala.concurrent.{ ExecutionContext, Promise, Future } import akka.actor._ import scala.util.control.NonFatal import scala.concurrent.duration.FiniteDuration +import java.util.concurrent.CompletionStage +import java.util.concurrent.CompletableFuture +import akka.dispatch.Futures +import java.util.function.BiConsumer trait FutureTimeoutSupport { /** @@ -22,4 +26,29 @@ trait FutureTimeoutSupport { using.scheduleOnce(duration) { p completeWith { try value catch { case NonFatal(t) ⇒ Future.failed(t) } } } p.future } + + /** + * Returns a [[scala.concurrent.Future]] that will be completed with the success or failure of the provided value + * after the specified duration. + */ + def afterCompletionStage[T](duration: FiniteDuration, using: Scheduler)(value: ⇒ CompletionStage[T])(implicit ec: ExecutionContext): CompletionStage[T] = + if (duration.isFinite() && duration.length < 1) { + try value catch { case NonFatal(t) ⇒ Futures.failedCompletionStage(t) } + } else { + val p = new CompletableFuture[T] + using.scheduleOnce(duration) { + try { + val future = value + future.whenComplete(new BiConsumer[T, Throwable] { + override def accept(t: T, ex: Throwable): Unit = { + if (t != null) p.complete(t) + if (ex != null) p.completeExceptionally(ex) + } + }) + } catch { + case NonFatal(ex) ⇒ p.completeExceptionally(ex) + } + } + p + } } diff --git a/akka-actor/src/main/scala/akka/pattern/Patterns.scala b/akka-actor/src/main/scala/akka/pattern/Patterns.scala index 17d2814134..bb5d302cfb 100644 --- a/akka-actor/src/main/scala/akka/pattern/Patterns.scala +++ b/akka-actor/src/main/scala/akka/pattern/Patterns.scala @@ -7,6 +7,8 @@ import akka.actor.{ ActorSelection, Scheduler } import java.util.concurrent.{ Callable, TimeUnit } import scala.concurrent.ExecutionContext import scala.concurrent.duration.FiniteDuration +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters._ object Patterns { import akka.japi @@ -250,9 +252,262 @@ object Patterns { scalaAfter(duration, scheduler)(value.call())(context) /** - * Returns a [[scala.concurrent.Future]] that will be completed with the success or failure of the provided value + * Returns a [[scala.concurrent.Future]] that will be completed with the success or failure of the provided Callable * after the specified duration. */ def after[T](duration: FiniteDuration, scheduler: Scheduler, context: ExecutionContext, value: Future[T]): Future[T] = scalaAfter(duration, scheduler)(value)(context) } + +object PatternsCS { + import akka.japi + import akka.actor.{ ActorRef, ActorSystem } + import akka.pattern.{ ask ⇒ scalaAsk, pipe ⇒ scalaPipe, gracefulStop ⇒ scalaGracefulStop, after ⇒ scalaAfter } + import akka.util.Timeout + import scala.concurrent.Future + import scala.concurrent.duration._ + + /** + * Java API for `akka.pattern.ask`: + * Sends a message asynchronously and returns a [[java.util.concurrent.CompletionStage]] + * holding the eventual reply message; this means that the target actor + * needs to send the result to the `sender` reference provided. The CompletionStage + * will be completed with an [[akka.pattern.AskTimeoutException]] after the + * given timeout has expired; this is independent from any timeout applied + * while awaiting a result for this future (i.e. in + * `Await.result(..., timeout)`). + * + * Warning: + * When using future callbacks, inside actors you need to carefully avoid closing over + * the containing actor’s object, i.e. do not call methods or access mutable state + * on the enclosing actor from within the callback. This would break the actor + * encapsulation and may introduce synchronization bugs and race conditions because + * the callback will be scheduled concurrently to the enclosing actor. Unfortunately + * there is not yet a way to detect these illegal accesses at compile time. + * + * Recommended usage: + * + * {{{ + * final CompletionStage f = Patterns.ask(worker, request, timeout); + * f.onSuccess(new Procedure() { + * public void apply(Object o) { + * nextActor.tell(new EnrichedResult(request, o)); + * } + * }); + * }}} + */ + def ask(actor: ActorRef, message: Any, timeout: Timeout): CompletionStage[AnyRef] = + scalaAsk(actor, message)(timeout).toJava.asInstanceOf[CompletionStage[AnyRef]] + + /** + * A variation of ask which allows to implement "replyTo" pattern by including + * sender reference in message. + * + * {{{ + * final CompletionStage f = Patterns.ask( + * worker, + * new akka.japi.Function { + * Object apply(ActorRef askSender) { + * return new Request(askSender); + * } + * }, + * timeout); + * }}} + */ + def ask(actor: ActorRef, messageFactory: japi.Function[ActorRef, Any], timeout: Timeout): CompletionStage[AnyRef] = + scalaAsk(actor, messageFactory.apply _)(timeout).toJava.asInstanceOf[CompletionStage[AnyRef]] + + /** + * Java API for `akka.pattern.ask`: + * Sends a message asynchronously and returns a [[java.util.concurrent.CompletionStage]] + * holding the eventual reply message; this means that the target actor + * needs to send the result to the `sender` reference provided. The CompletionStage + * will be completed with an [[akka.pattern.AskTimeoutException]] after the + * given timeout has expired; this is independent from any timeout applied + * while awaiting a result for this future (i.e. in + * `Await.result(..., timeout)`). + * + * Warning: + * When using future callbacks, inside actors you need to carefully avoid closing over + * the containing actor’s object, i.e. do not call methods or access mutable state + * on the enclosing actor from within the callback. This would break the actor + * encapsulation and may introduce synchronization bugs and race conditions because + * the callback will be scheduled concurrently to the enclosing actor. Unfortunately + * there is not yet a way to detect these illegal accesses at compile time. + * + * Recommended usage: + * + * {{{ + * final CompletionStage f = Patterns.ask(worker, request, timeout); + * f.onSuccess(new Procedure() { + * public void apply(Object o) { + * nextActor.tell(new EnrichedResult(request, o)); + * } + * }); + * }}} + */ + def ask(actor: ActorRef, message: Any, timeoutMillis: Long): CompletionStage[AnyRef] = + scalaAsk(actor, message)(new Timeout(timeoutMillis, TimeUnit.MILLISECONDS)).toJava.asInstanceOf[CompletionStage[AnyRef]] + + /** + * A variation of ask which allows to implement "replyTo" pattern by including + * sender reference in message. + * + * {{{ + * final CompletionStage f = Patterns.ask( + * worker, + * new akka.japi.Function { + * Object apply(ActorRef askSender) { + * return new Request(askSender); + * } + * }, + * timeout); + * }}} + */ + def ask(actor: ActorRef, messageFactory: japi.Function[ActorRef, Any], timeoutMillis: Long): CompletionStage[AnyRef] = + scalaAsk(actor, messageFactory.apply _)(Timeout(timeoutMillis.millis)).toJava.asInstanceOf[CompletionStage[AnyRef]] + + /** + * Java API for `akka.pattern.ask`: + * Sends a message asynchronously and returns a [[java.util.concurrent.CompletionStage]] + * holding the eventual reply message; this means that the target [[akka.actor.ActorSelection]] + * needs to send the result to the `sender` reference provided. The CompletionStage + * will be completed with an [[akka.pattern.AskTimeoutException]] after the + * given timeout has expired; this is independent from any timeout applied + * while awaiting a result for this future (i.e. in + * `Await.result(..., timeout)`). + * + * Warning: + * When using future callbacks, inside actors you need to carefully avoid closing over + * the containing actor’s object, i.e. do not call methods or access mutable state + * on the enclosing actor from within the callback. This would break the actor + * encapsulation and may introduce synchronization bugs and race conditions because + * the callback will be scheduled concurrently to the enclosing actor. Unfortunately + * there is not yet a way to detect these illegal accesses at compile time. + * + * Recommended usage: + * + * {{{ + * final CompletionStage f = Patterns.ask(selection, request, timeout); + * f.onSuccess(new Procedure() { + * public void apply(Object o) { + * nextActor.tell(new EnrichedResult(request, o)); + * } + * }); + * }}} + */ + def ask(selection: ActorSelection, message: Any, timeout: Timeout): CompletionStage[AnyRef] = + scalaAsk(selection, message)(timeout).toJava.asInstanceOf[CompletionStage[AnyRef]] + + /** + * Java API for `akka.pattern.ask`: + * Sends a message asynchronously and returns a [[java.util.concurrent.CompletionStage]] + * holding the eventual reply message; this means that the target [[akka.actor.ActorSelection]] + * needs to send the result to the `sender` reference provided. The CompletionStage + * will be completed with an [[akka.pattern.AskTimeoutException]] after the + * given timeout has expired; this is independent from any timeout applied + * while awaiting a result for this future (i.e. in + * `Await.result(..., timeout)`). + * + * Warning: + * When using future callbacks, inside actors you need to carefully avoid closing over + * the containing actor’s object, i.e. do not call methods or access mutable state + * on the enclosing actor from within the callback. This would break the actor + * encapsulation and may introduce synchronization bugs and race conditions because + * the callback will be scheduled concurrently to the enclosing actor. Unfortunately + * there is not yet a way to detect these illegal accesses at compile time. + * + * Recommended usage: + * + * {{{ + * final CompletionStage f = Patterns.ask(selection, request, timeout); + * f.onSuccess(new Procedure() { + * public void apply(Object o) { + * nextActor.tell(new EnrichedResult(request, o)); + * } + * }); + * }}} + */ + def ask(selection: ActorSelection, message: Any, timeoutMillis: Long): CompletionStage[AnyRef] = + scalaAsk(selection, message)(new Timeout(timeoutMillis, TimeUnit.MILLISECONDS)).toJava.asInstanceOf[CompletionStage[AnyRef]] + + /** + * A variation of ask which allows to implement "replyTo" pattern by including + * sender reference in message. + * + * {{{ + * final CompletionStage f = Patterns.ask( + * selection, + * new akka.japi.Function { + * Object apply(ActorRef askSender) { + * return new Request(askSender); + * } + * }, + * timeout); + * }}} + */ + def ask(selection: ActorSelection, messageFactory: japi.Function[ActorRef, Any], timeoutMillis: Long): CompletionStage[AnyRef] = + scalaAsk(selection, messageFactory.apply _)(Timeout(timeoutMillis.millis)).toJava.asInstanceOf[CompletionStage[AnyRef]] + + /** + * Register an onComplete callback on this [[java.util.concurrent.CompletionStage]] to send + * the result to the given [[akka.actor.ActorRef]] or [[akka.actor.ActorSelection]]. + * Returns the original CompletionStage to allow method chaining. + * If the future was completed with failure it is sent as a [[akka.actor.Status.Failure]] + * to the recipient. + * + * Recommended usage example: + * + * {{{ + * final CompletionStage f = Patterns.ask(worker, request, timeout); + * // apply some transformation (i.e. enrich with request info) + * final CompletionStage transformed = f.map(new akka.japi.Function() { ... }); + * // send it on to the next stage + * Patterns.pipe(transformed).to(nextActor); + * }}} + */ + def pipe[T](future: CompletionStage[T], context: ExecutionContext): PipeableCompletionStage[T] = pipeCompletionStage(future)(context) + + /** + * Returns a [[java.util.concurrent.CompletionStage]] that will be completed with success (value `true`) when + * existing messages of the target actor has been processed and the actor has been + * terminated. + * + * Useful when you need to wait for termination or compose ordered termination of several actors. + * + * If the target actor isn't terminated within the timeout the [[java.util.concurrent.CompletionStage]] + * is completed with failure [[akka.pattern.AskTimeoutException]]. + */ + def gracefulStop(target: ActorRef, timeout: FiniteDuration): CompletionStage[java.lang.Boolean] = + scalaGracefulStop(target, timeout).toJava.asInstanceOf[CompletionStage[java.lang.Boolean]] + + /** + * Returns a [[java.util.concurrent.CompletionStage]] that will be completed with success (value `true`) when + * existing messages of the target actor has been processed and the actor has been + * terminated. + * + * Useful when you need to wait for termination or compose ordered termination of several actors. + * + * If you want to invoke specialized stopping logic on your target actor instead of PoisonPill, you can pass your + * stop command as `stopMessage` parameter + * + * If the target actor isn't terminated within the timeout the [[java.util.concurrent.CompletionStage]] + * is completed with failure [[akka.pattern.AskTimeoutException]]. + */ + def gracefulStop(target: ActorRef, timeout: FiniteDuration, stopMessage: Any): CompletionStage[java.lang.Boolean] = + scalaGracefulStop(target, timeout, stopMessage).toJava.asInstanceOf[CompletionStage[java.lang.Boolean]] + + /** + * Returns a [[java.util.concurrent.CompletionStage]] that will be completed with the success or failure of the provided Callable + * after the specified duration. + */ + def after[T](duration: FiniteDuration, scheduler: Scheduler, context: ExecutionContext, value: Callable[CompletionStage[T]]): CompletionStage[T] = + afterCompletionStage(duration, scheduler)(value.call())(context) + + /** + * Returns a [[java.util.concurrent.CompletionStage]] that will be completed with the success or failure of the provided value + * after the specified duration. + */ + def after[T](duration: FiniteDuration, scheduler: Scheduler, context: ExecutionContext, value: CompletionStage[T]): CompletionStage[T] = + afterCompletionStage(duration, scheduler)(value)(context) +} diff --git a/akka-actor/src/main/scala/akka/pattern/PipeToSupport.scala b/akka-actor/src/main/scala/akka/pattern/PipeToSupport.scala index de34e22b77..d84b8caec7 100644 --- a/akka-actor/src/main/scala/akka/pattern/PipeToSupport.scala +++ b/akka-actor/src/main/scala/akka/pattern/PipeToSupport.scala @@ -8,23 +8,23 @@ import scala.concurrent.{ Future, ExecutionContext } import scala.util.{ Failure, Success } import akka.actor.{ Status, ActorRef, Actor } import akka.actor.ActorSelection +import java.util.concurrent.CompletionStage +import java.util.function.BiConsumer trait PipeToSupport { final class PipeableFuture[T](val future: Future[T])(implicit executionContext: ExecutionContext) { def pipeTo(recipient: ActorRef)(implicit sender: ActorRef = Actor.noSender): Future[T] = { - future onComplete { + future andThen { case Success(r) ⇒ recipient ! r case Failure(f) ⇒ recipient ! Status.Failure(f) } - future } def pipeToSelection(recipient: ActorSelection)(implicit sender: ActorRef = Actor.noSender): Future[T] = { - future onComplete { + future andThen { case Success(r) ⇒ recipient ! r case Failure(f) ⇒ recipient ! Status.Failure(f) } - future } def to(recipient: ActorRef): PipeableFuture[T] = to(recipient, Actor.noSender) def to(recipient: ActorRef, sender: ActorRef): PipeableFuture[T] = { @@ -38,6 +38,35 @@ trait PipeToSupport { } } + final class PipeableCompletionStage[T](val future: CompletionStage[T])(implicit executionContext: ExecutionContext) { + def pipeTo(recipient: ActorRef)(implicit sender: ActorRef = Actor.noSender): CompletionStage[T] = { + future whenComplete new BiConsumer[T, Throwable] { + override def accept(t: T, ex: Throwable) { + if (t != null) recipient ! t + if (ex != null) recipient ! Status.Failure(ex) + } + } + } + def pipeToSelection(recipient: ActorSelection)(implicit sender: ActorRef = Actor.noSender): CompletionStage[T] = { + future whenComplete new BiConsumer[T, Throwable] { + override def accept(t: T, ex: Throwable) { + if (t != null) recipient ! t + if (ex != null) recipient ! Status.Failure(ex) + } + } + } + def to(recipient: ActorRef): PipeableCompletionStage[T] = to(recipient, Actor.noSender) + def to(recipient: ActorRef, sender: ActorRef): PipeableCompletionStage[T] = { + pipeTo(recipient)(sender) + this + } + def to(recipient: ActorSelection): PipeableCompletionStage[T] = to(recipient, Actor.noSender) + def to(recipient: ActorSelection, sender: ActorRef): PipeableCompletionStage[T] = { + pipeToSelection(recipient)(sender) + this + } + } + /** * Import this implicit conversion to gain the `pipeTo` method on [[scala.concurrent.Future]]: * @@ -56,4 +85,23 @@ trait PipeToSupport { * the failure is sent in a [[akka.actor.Status.Failure]] to the recipient. */ implicit def pipe[T](future: Future[T])(implicit executionContext: ExecutionContext): PipeableFuture[T] = new PipeableFuture(future) + + /** + * Import this implicit conversion to gain the `pipeTo` method on [[scala.concurrent.Future]]: + * + * {{{ + * import akka.pattern.pipe + * + * Future { doExpensiveCalc() } pipeTo nextActor + * + * or + * + * pipe(someFuture) to nextActor + * + * }}} + * + * The successful result of the future is sent as a message to the recipient, or + * the failure is sent in a [[akka.actor.Status.Failure]] to the recipient. + */ + implicit def pipeCompletionStage[T](future: CompletionStage[T])(implicit executionContext: ExecutionContext): PipeableCompletionStage[T] = new PipeableCompletionStage(future) } diff --git a/akka-docs/rst/java/code/docs/http/javadsl/HttpClientExampleDocTest.java b/akka-docs/rst/java/code/docs/http/javadsl/HttpClientExampleDocTest.java index aaf2ebbe94..da59cc79e1 100644 --- a/akka-docs/rst/java/code/docs/http/javadsl/HttpClientExampleDocTest.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/HttpClientExampleDocTest.java @@ -22,7 +22,9 @@ import akka.http.javadsl.Http; import scala.util.Try; import static akka.http.javadsl.ConnectHttp.toHost; -import static akka.pattern.Patterns.*; +import static akka.pattern.PatternsCS.*; + +import java.util.concurrent.CompletionStage; @SuppressWarnings("unused") public class HttpClientExampleDocTest { @@ -34,9 +36,9 @@ public class HttpClientExampleDocTest { final ActorSystem system = ActorSystem.create(); final ActorMaterializer materializer = ActorMaterializer.create(system); - final Flow> connectionFlow = + final Flow> connectionFlow = Http.get(system).outgoingConnection(toHost("akka.io", 80)); - final Future responseFuture = + final CompletionStage responseFuture = Source.single(HttpRequest.create("/")) .via(connectionFlow) .runWith(Sink.head(), materializer); @@ -58,7 +60,7 @@ public class HttpClientExampleDocTest { // construct a pool client flow with context type `Integer` - final Future, Integer>> responseFuture = + final CompletionStage, Integer>> responseFuture = Source .single(Pair.create(HttpRequest.create("/"), 42)) .via(poolClientFlow) @@ -72,7 +74,7 @@ public class HttpClientExampleDocTest { final ActorSystem system = ActorSystem.create(); final Materializer materializer = ActorMaterializer.create(system); - final Future responseFuture = + final CompletionStage responseFuture = Http.get(system) .singleRequest(HttpRequest.create("http://akka.io"), materializer); //#single-request-example @@ -92,7 +94,7 @@ public class HttpClientExampleDocTest { }).build()); } - Future fetch(String url) { + CompletionStage fetch(String url) { return http.singleRequest(HttpRequest.create(url), materializer); } } diff --git a/akka-docs/rst/java/code/docs/http/javadsl/server/HighLevelServerBindFailureExample.java b/akka-docs/rst/java/code/docs/http/javadsl/server/HighLevelServerBindFailureExample.java index 7b0a750a74..cb509dcac9 100644 --- a/akka-docs/rst/java/code/docs/http/javadsl/server/HighLevelServerBindFailureExample.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/HighLevelServerBindFailureExample.java @@ -6,32 +6,25 @@ package docs.http.javadsl.server; //#binding-failure-high-level-example import akka.actor.ActorSystem; -import akka.dispatch.OnFailure; -import akka.http.javadsl.model.ContentTypes; -import akka.http.javadsl.server.*; -import akka.http.javadsl.server.values.Parameters; import akka.http.scaladsl.Http; -import scala.concurrent.Future; import java.io.IOException; +import java.util.concurrent.CompletionStage; -@SuppressWarnings("unchecked") public class HighLevelServerBindFailureExample { public static void main(String[] args) throws IOException { // boot up server using the route as defined below final ActorSystem system = ActorSystem.create(); // HttpApp.bindRoute expects a route being provided by HttpApp.createRoute - Future bindingFuture = + CompletionStage bindingFuture = new HighLevelServerExample().bindRoute("localhost", 8080, system); - bindingFuture.onFailure(new OnFailure() { - @Override - public void onFailure(Throwable failure) throws Throwable { + bindingFuture.exceptionally(failure -> { System.err.println("Something very bad happened! " + failure.getMessage()); system.terminate(); - } - }, system.dispatcher()); + return null; + }); system.terminate(); } diff --git a/akka-docs/rst/java/code/docs/http/javadsl/server/HttpBasicAuthenticatorExample.java b/akka-docs/rst/java/code/docs/http/javadsl/server/HttpBasicAuthenticatorExample.java index 624b34e441..8c6b3beb27 100644 --- a/akka-docs/rst/java/code/docs/http/javadsl/server/HttpBasicAuthenticatorExample.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/HttpBasicAuthenticatorExample.java @@ -13,6 +13,10 @@ import akka.http.javadsl.server.values.BasicCredentials; import akka.http.javadsl.server.values.HttpBasicAuthenticator; import akka.http.javadsl.testkit.JUnitRouteTest; import akka.http.scaladsl.model.headers.Authorization; + +import java.util.Optional; +import java.util.concurrent.CompletionStage; + import org.junit.Test; import scala.Option; import scala.concurrent.Future; @@ -27,7 +31,7 @@ public class HttpBasicAuthenticatorExample extends JUnitRouteTest { private final String hardcodedPassword = "correcthorsebatterystaple"; - public Future> authenticate(BasicCredentials credentials) { + public CompletionStage> authenticate(BasicCredentials credentials) { // this is where your actual authentication logic would go if (credentials.available() && // no anonymous access credentials.verify(hardcodedPassword)) { diff --git a/akka-docs/rst/java/code/docs/http/javadsl/server/HttpServerExampleDocTest.java b/akka-docs/rst/java/code/docs/http/javadsl/server/HttpServerExampleDocTest.java index c737b21095..83e7608ff2 100644 --- a/akka-docs/rst/java/code/docs/http/javadsl/server/HttpServerExampleDocTest.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/HttpServerExampleDocTest.java @@ -30,13 +30,10 @@ import akka.stream.stage.PushStage; import akka.stream.stage.SyncDirective; import akka.stream.stage.TerminationDirective; import akka.util.ByteString; -import scala.concurrent.Await; -import scala.concurrent.Future; -import scala.concurrent.duration.FiniteDuration; -import scala.runtime.BoxedUnit; import java.io.BufferedReader; import java.io.InputStreamReader; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; @SuppressWarnings("unused") @@ -47,20 +44,17 @@ public class HttpServerExampleDocTest { ActorSystem system = ActorSystem.create(); Materializer materializer = ActorMaterializer.create(system); - Source> serverSource = + Source> serverSource = Http.get(system).bind("localhost", 8080, materializer); - Future serverBindingFuture = - serverSource.to(Sink.foreach( - new Procedure() { - @Override - public void apply(IncomingConnection connection) throws Exception { - System.out.println("Accepted new connection from " + connection.remoteAddress()); - // ... and then actually handle the connection - } - })).run(materializer); + CompletionStage serverBindingFuture = + serverSource.to(Sink.foreach(connection -> { + System.out.println("Accepted new connection from " + connection.remoteAddress()); + // ... and then actually handle the connection + } + )).run(materializer); //#binding-example - Await.result(serverBindingFuture, new FiniteDuration(3, TimeUnit.SECONDS)); + serverBindingFuture.toCompletableFuture().get(3, TimeUnit.SECONDS); } public static void bindingFailureExample() throws Exception { @@ -68,27 +62,21 @@ public class HttpServerExampleDocTest { ActorSystem system = ActorSystem.create(); Materializer materializer = ActorMaterializer.create(system); - Source> serverSource = + Source> serverSource = Http.get(system).bind("localhost", 80, materializer); - Future serverBindingFuture = - serverSource.to(Sink.foreach( - new Procedure() { - @Override - public void apply(IncomingConnection connection) throws Exception { - System.out.println("Accepted new connection from " + connection.remoteAddress()); - // ... and then actually handle the connection - } - })).run(materializer); + CompletionStage serverBindingFuture = + serverSource.to(Sink.foreach(connection -> { + System.out.println("Accepted new connection from " + connection.remoteAddress()); + // ... and then actually handle the connection + } + )).run(materializer); - serverBindingFuture.onFailure(new OnFailure() { - @Override - public void onFailure(Throwable failure) throws Throwable { + serverBindingFuture.whenCompleteAsync((binding, failure) -> { // possibly report the failure somewhere... - } }, system.dispatcher()); //#binding-failure-handling - Await.result(serverBindingFuture, new FiniteDuration(3, TimeUnit.SECONDS)); + serverBindingFuture.toCompletableFuture().get(3, TimeUnit.SECONDS); } public static void connectionSourceFailureExample() throws Exception { @@ -96,7 +84,7 @@ public class HttpServerExampleDocTest { ActorSystem system = ActorSystem.create(); Materializer materializer = ActorMaterializer.create(system); - Source> serverSource = + Source> serverSource = Http.get(system).bind("localhost", 8080, materializer); Flow failureDetection = @@ -114,19 +102,16 @@ public class HttpServerExampleDocTest { } }); - Future serverBindingFuture = + CompletionStage serverBindingFuture = serverSource .via(failureDetection) // feed signals through our custom stage - .to(Sink.foreach( - new Procedure() { - @Override - public void apply(IncomingConnection connection) throws Exception { - System.out.println("Accepted new connection from " + connection.remoteAddress()); - // ... and then actually handle the connection - } - })).run(materializer); + .to(Sink.foreach(connection -> { + System.out.println("Accepted new connection from " + connection.remoteAddress()); + // ... and then actually handle the connection + })) + .run(materializer); //#incoming-connections-source-failure-handling - Await.result(serverBindingFuture, new FiniteDuration(3, TimeUnit.SECONDS)); + serverBindingFuture.toCompletableFuture().get(3, TimeUnit.SECONDS); } public static void connectionStreamFailureExample() throws Exception { @@ -134,7 +119,7 @@ public class HttpServerExampleDocTest { ActorSystem system = ActorSystem.create(); Materializer materializer = ActorMaterializer.create(system); - Source> serverSource = + Source> serverSource = Http.get(system).bind("localhost", 8080, materializer); Flow failureDetection = @@ -163,14 +148,14 @@ public class HttpServerExampleDocTest { .withEntity(entity); }); - Future serverBindingFuture = - serverSource.to(Sink.foreach(con -> { - System.out.println("Accepted new connection from " + con.remoteAddress()); - con.handleWith(httpEcho, materializer); + CompletionStage serverBindingFuture = + serverSource.to(Sink.foreach(conn -> { + System.out.println("Accepted new connection from " + conn.remoteAddress()); + conn.handleWith(httpEcho, materializer); } )).run(materializer); //#connection-stream-failure-handling - Await.result(serverBindingFuture, new FiniteDuration(3, TimeUnit.SECONDS)); + serverBindingFuture.toCompletableFuture().get(3, TimeUnit.SECONDS); } public static void fullServerExample() throws Exception { @@ -181,7 +166,7 @@ public class HttpServerExampleDocTest { //#full-server-example final Materializer materializer = ActorMaterializer.create(system); - Source> serverSource = + Source> serverSource = Http.get(system).bind("localhost", 8080, materializer); //#request-handler @@ -219,21 +204,17 @@ public class HttpServerExampleDocTest { }; //#request-handler - Future serverBindingFuture = - serverSource.to(Sink.foreach( - new Procedure() { - @Override - public void apply(IncomingConnection connection) throws Exception { - System.out.println("Accepted new connection from " + connection.remoteAddress()); + CompletionStage serverBindingFuture = + serverSource.to(Sink.foreach(connection -> { + System.out.println("Accepted new connection from " + connection.remoteAddress()); - connection.handleWithSyncHandler(requestHandler, materializer); - // this is equivalent to - //connection.handleWith(Flow.of(HttpRequest.class).map(requestHandler), materializer); - } + connection.handleWithSyncHandler(requestHandler, materializer); + // this is equivalent to + //connection.handleWith(Flow.of(HttpRequest.class).map(requestHandler), materializer); })).run(materializer); //#full-server-example - Await.result(serverBindingFuture, new FiniteDuration(1, TimeUnit.SECONDS)); // will throw if binding fails + serverBindingFuture.toCompletableFuture().get(1, TimeUnit.SECONDS); // will throw if binding fails System.out.println("Press ENTER to stop."); new BufferedReader(new InputStreamReader(System.in)).readLine(); } finally { diff --git a/akka-docs/rst/java/code/docs/http/javadsl/server/OAuth2AuthenticatorExample.java b/akka-docs/rst/java/code/docs/http/javadsl/server/OAuth2AuthenticatorExample.java index 3152179791..17f4973daa 100644 --- a/akka-docs/rst/java/code/docs/http/javadsl/server/OAuth2AuthenticatorExample.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/OAuth2AuthenticatorExample.java @@ -16,6 +16,10 @@ import akka.http.javadsl.server.values.OAuth2Authenticator; import akka.http.javadsl.server.values.OAuth2Credentials; import akka.http.javadsl.testkit.JUnitRouteTest; import akka.http.scaladsl.model.headers.Authorization; + +import java.util.Optional; +import java.util.concurrent.CompletionStage; + import org.junit.Test; import scala.Option; import scala.concurrent.Future; @@ -31,7 +35,7 @@ public class OAuth2AuthenticatorExample extends JUnitRouteTest { private final String hardcodedToken = "token"; @Override - public Future> authenticate(OAuth2Credentials credentials) { + public CompletionStage> authenticate(OAuth2Credentials credentials) { // this is where your actual authentication logic would go, looking up the user // based on the token or something in that direction if (credentials.available() && // no anonymous access diff --git a/akka-docs/rst/java/code/docs/http/javadsl/server/WebSocketCoreExample.java b/akka-docs/rst/java/code/docs/http/javadsl/server/WebSocketCoreExample.java index 2b162a70d9..fad2fad273 100644 --- a/akka-docs/rst/java/code/docs/http/javadsl/server/WebSocketCoreExample.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/WebSocketCoreExample.java @@ -7,6 +7,7 @@ package docs.http.javadsl.server; //#websocket-example-using-core import java.io.BufferedReader; import java.io.InputStreamReader; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; import akka.NotUsed; @@ -50,7 +51,7 @@ public class WebSocketCoreExample { try { final Materializer materializer = ActorMaterializer.create(system); - Future serverBindingFuture = + CompletionStage serverBindingFuture = Http.get(system).bindAndHandleSync( new Function() { public HttpResponse apply(HttpRequest request) throws Exception { @@ -59,7 +60,7 @@ public class WebSocketCoreExample { }, "localhost", 8080, materializer); // will throw if binding fails - Await.result(serverBindingFuture, new FiniteDuration(1, TimeUnit.SECONDS)); + serverBindingFuture.toCompletableFuture().get(1, TimeUnit.SECONDS); System.out.println("Press ENTER to stop."); new BufferedReader(new InputStreamReader(System.in)).readLine(); } finally { diff --git a/akka-docs/rst/java/code/docs/persistence/PersistenceQueryDocTest.java b/akka-docs/rst/java/code/docs/persistence/PersistenceQueryDocTest.java index 220b8ec1fc..906ffec871 100644 --- a/akka-docs/rst/java/code/docs/persistence/PersistenceQueryDocTest.java +++ b/akka-docs/rst/java/code/docs/persistence/PersistenceQueryDocTest.java @@ -4,7 +4,7 @@ package docs.persistence; -import static akka.pattern.Patterns.ask; +import static akka.pattern.PatternsCS.ask; import java.util.HashSet; import java.util.Set; import java.util.Iterator; @@ -42,6 +42,7 @@ import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; public class PersistenceQueryDocTest { @@ -343,7 +344,7 @@ public class PersistenceQueryDocTest { //#projection-into-different-store-simple-classes class ExampleStore { - Future save(Object any) { + CompletionStage save(Object any) { // ... //#projection-into-different-store-simple-classes return null; @@ -379,13 +380,13 @@ public class PersistenceQueryDocTest { this.name = name; } - public Future saveProgress(long offset) { + public CompletionStage saveProgress(long offset) { // ... //#projection-into-different-store return null; //#projection-into-different-store } - public Future latestOffset() { + public CompletionStage latestOffset() { // ... //#projection-into-different-store return null; @@ -412,17 +413,13 @@ public class PersistenceQueryDocTest { final Props writerProps = Props.create(TheOneWhoWritesToQueryJournal.class, "bid"); final ActorRef writer = system.actorOf(writerProps, "bid-projection-writer"); - long startFromOffset = Await.result(bidProjection.latestOffset(), timeout.duration()); + long startFromOffset = bidProjection.latestOffset().toCompletableFuture().get(3, TimeUnit.SECONDS); readJournal .eventsByTag("bid", startFromOffset) - .mapAsync(8, envelope -> { - final Future f = ask(writer, envelope.event(), timeout); - return f.map(new Mapper() { - @Override public Long apply(Object in) { - return envelope.offset(); - } - }, system.dispatcher()); + .mapAsync(8, envelope -> { + final CompletionStage f = ask(writer, envelope.event(), timeout); + return f.thenApplyAsync(in -> envelope.offset(), system.dispatcher()); }) .mapAsync(1, offset -> bidProjection.saveProgress(offset)) .runWith(Sink.ignore(), mat); diff --git a/akka-docs/rst/java/code/docs/stream/BidiFlowDocTest.java b/akka-docs/rst/java/code/docs/stream/BidiFlowDocTest.java index f8896a9322..884d5f5fb3 100644 --- a/akka-docs/rst/java/code/docs/stream/BidiFlowDocTest.java +++ b/akka-docs/rst/java/code/docs/stream/BidiFlowDocTest.java @@ -6,6 +6,7 @@ package docs.stream; import java.nio.ByteOrder; import java.util.Arrays; import java.util.List; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; import akka.NotUsed; @@ -221,16 +222,15 @@ public class BidiFlowDocTest { ); final Flow flow = stack.atop(stack.reversed()).join(pingpong); - final Future> result = Source + final CompletionStage> result = Source .from(Arrays.asList(0, 1, 2)) . map(id -> new Ping(id)) .via(flow) .grouped(10) .runWith(Sink.> head(), mat); - final FiniteDuration oneSec = Duration.create(1, TimeUnit.SECONDS); assertArrayEquals( new Message[] { new Pong(0), new Pong(1), new Pong(2) }, - Await.result(result, oneSec).toArray(new Message[0])); + result.toCompletableFuture().get(1, TimeUnit.SECONDS).toArray(new Message[0])); //#compose } } diff --git a/akka-docs/rst/java/code/docs/stream/CompositionDocTest.java b/akka-docs/rst/java/code/docs/stream/CompositionDocTest.java index cf24b43786..6f4e5234c9 100644 --- a/akka-docs/rst/java/code/docs/stream/CompositionDocTest.java +++ b/akka-docs/rst/java/code/docs/stream/CompositionDocTest.java @@ -5,6 +5,8 @@ package docs.stream; import java.util.Arrays; import java.util.Optional; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; import akka.NotUsed; import akka.stream.ClosedShape; @@ -215,27 +217,23 @@ public class CompositionDocTest { //#mat-combine-4a static class MyClass { - private Promise> p; + private CompletableFuture> p; private OutgoingConnection conn; - public MyClass(Promise> p, OutgoingConnection conn) { + public MyClass(CompletableFuture> p, OutgoingConnection conn) { this.p = p; this.conn = conn; } public void close() { - p.success(Optional.empty()); + p.complete(Optional.empty()); } } static class Combiner { - static Future f(Promise> p, - Pair, Future> rest) { - return rest.first().map(new Mapper() { - public MyClass apply(OutgoingConnection c) { - return new MyClass(p, c); - } - }, system.dispatcher()); + static CompletionStage f(CompletableFuture> p, + Pair, CompletionStage> rest) { + return rest.first().thenApply(c -> new MyClass(p, c)); } } //#mat-combine-4a @@ -244,13 +242,13 @@ public class CompositionDocTest { public void materializedValues() throws Exception { //#mat-combine-1 // Materializes to Promise (red) - final Source>> source = Source.maybe(); + final Source>> source = Source.maybe(); // Materializes to BoxedUnit (black) final Flow flow1 = Flow.of(Integer.class).take(100); // Materializes to Promise> (red) - final Source>> nestedSource = + final Source>> nestedSource = source.viaMat(flow1, Keep.left()).named("nestedSource"); //#mat-combine-1 @@ -260,27 +258,27 @@ public class CompositionDocTest { .map(i -> ByteString.fromString(i.toString())); // Materializes to Future (yellow) - final Flow> flow3 = + final Flow> flow3 = Tcp.get(system).outgoingConnection("localhost", 8080); // Materializes to Future (yellow) - final Flow> nestedFlow = + final Flow> nestedFlow = flow2.viaMat(flow3, Keep.right()).named("nestedFlow"); //#mat-combine-2 //#mat-combine-3 // Materializes to Future (green) - final Sink> sink = Sink - .fold("", (acc, i) -> acc + i.utf8String()); + final Sink> sink = + Sink. fold("", (acc, i) -> acc + i.utf8String()); // Materializes to Pair, Future> (blue) - final Sink, Future>> nestedSink = + final Sink, CompletionStage>> nestedSink = nestedFlow.toMat(sink, Keep.both()); //#mat-combine-3 //#mat-combine-4b // Materializes to Future (purple) - final RunnableGraph> runnableGraph = + final RunnableGraph> runnableGraph = nestedSource.toMat(nestedSink, Combiner::f); //#mat-combine-4b } diff --git a/akka-docs/rst/java/code/docs/stream/FlowDocTest.java b/akka-docs/rst/java/code/docs/stream/FlowDocTest.java index 2397b4fe8f..10b7ce4831 100644 --- a/akka-docs/rst/java/code/docs/stream/FlowDocTest.java +++ b/akka-docs/rst/java/code/docs/stream/FlowDocTest.java @@ -6,6 +6,8 @@ package docs.stream; import static org.junit.Assert.assertEquals; import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; import java.util.stream.Stream; @@ -55,15 +57,12 @@ public class FlowDocTest { // returns new Source, with `map()` appended final Source zeroes = source.map(x -> 0); - final Sink> fold = - Sink.fold(0, (agg, next) -> agg + next); + final Sink> fold = + Sink. fold(0, (agg, next) -> agg + next); zeroes.runWith(fold, mat); // 0 //#source-immutable - int result = Await.result( - zeroes.runWith(fold, mat), - Duration.create(3, TimeUnit.SECONDS) - ); + int result = zeroes.runWith(fold, mat).toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(0, result); } @@ -73,18 +72,18 @@ public class FlowDocTest { final Source source = Source.from(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)); // note that the Future is scala.concurrent.Future - final Sink> sink = - Sink.fold(0, (aggr, next) -> aggr + next); + final Sink> sink = + Sink. fold(0, (aggr, next) -> aggr + next); // connect the Source to the Sink, obtaining a RunnableFlow - final RunnableGraph> runnable = + final RunnableGraph> runnable = source.toMat(sink, Keep.right()); // materialize the flow - final Future sum = runnable.run(mat); + final CompletionStage sum = runnable.run(mat); //#materialization-in-steps - int result = Await.result(sum, Duration.create(3, TimeUnit.SECONDS)); + int result = sum.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(55, result); } @@ -93,14 +92,14 @@ public class FlowDocTest { //#materialization-runWith final Source source = Source.from(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)); - final Sink> sink = - Sink.fold(0, (aggr, next) -> aggr + next); + final Sink> sink = + Sink. fold(0, (aggr, next) -> aggr + next); // materialize the flow, getting the Sinks materialized value - final Future sum = source.runWith(sink, mat); + final CompletionStage sum = source.runWith(sink, mat); //#materialization-runWith - int result = Await.result(sum, Duration.create(3, TimeUnit.SECONDS)); + int result = sum.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(55, result); } @@ -108,21 +107,21 @@ public class FlowDocTest { public void materializedMapUnique() throws Exception { //#stream-reuse // connect the Source to the Sink, obtaining a RunnableGraph - final Sink> sink = - Sink.fold(0, (aggr, next) -> aggr + next); - final RunnableGraph> runnable = + final Sink> sink = + Sink. fold(0, (aggr, next) -> aggr + next); + final RunnableGraph> runnable = Source.from(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)).toMat(sink, Keep.right()); // get the materialized value of the FoldSink - final Future sum1 = runnable.run(mat); - final Future sum2 = runnable.run(mat); + final CompletionStage sum1 = runnable.run(mat); + final CompletionStage sum2 = runnable.run(mat); // sum1 and sum2 are different Futures! //#stream-reuse - int result1 = Await.result(sum1, Duration.create(3, TimeUnit.SECONDS)); + int result1 = sum1.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(55, result1); - int result2 = Await.result(sum2, Duration.create(3, TimeUnit.SECONDS)); + int result2 = sum2.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(55, result2); } @@ -222,40 +221,40 @@ public class FlowDocTest { //#flow-mat-combine // An empty source that can be shut down explicitly from the outside - Source>> source = Source.maybe(); + Source>> source = Source.maybe(); // A flow that internally throttles elements to 1/second, and returns a Cancellable // which can be used to shut down the stream Flow flow = throttler; // A sink that returns the first element of a stream in the returned Future - Sink> sink = Sink.head(); + Sink> sink = Sink.head(); // By default, the materialized value of the leftmost stage is preserved - RunnableGraph>> r1 = source.via(flow).to(sink); + RunnableGraph>> r1 = source.via(flow).to(sink); // Simple selection of materialized values by using Keep.right RunnableGraph r2 = source.viaMat(flow, Keep.right()).to(sink); - RunnableGraph> r3 = source.via(flow).toMat(sink, Keep.right()); + RunnableGraph> r3 = source.via(flow).toMat(sink, Keep.right()); // Using runWith will always give the materialized values of the stages added // by runWith() itself - Future r4 = source.via(flow).runWith(sink, mat); - Promise> r5 = flow.to(sink).runWith(source, mat); - Pair>, Future> r6 = flow.runWith(source, sink, mat); + CompletionStage r4 = source.via(flow).runWith(sink, mat); + CompletableFuture> r5 = flow.to(sink).runWith(source, mat); + Pair>, CompletionStage> r6 = flow.runWith(source, sink, mat); // Using more complext combinations - RunnableGraph>, Cancellable>> r7 = + RunnableGraph>, Cancellable>> r7 = source.viaMat(flow, Keep.both()).to(sink); - RunnableGraph>, Future>> r8 = + RunnableGraph>, CompletionStage>> r8 = source.via(flow).toMat(sink, Keep.both()); - RunnableGraph>, Cancellable>, Future>> r9 = + RunnableGraph>, Cancellable>, CompletionStage>> r9 = source.viaMat(flow, Keep.both()).toMat(sink, Keep.both()); - RunnableGraph>> r10 = + RunnableGraph>> r10 = source.viaMat(flow, Keep.right()).toMat(sink, Keep.both()); // It is also possible to map over the materialized values. In r9 we had a @@ -264,9 +263,9 @@ public class FlowDocTest { RunnableGraph r11 = r9.mapMaterializedValue( (nestedTuple) -> { - Promise> p = nestedTuple.first().first(); + CompletableFuture> p = nestedTuple.first().first(); Cancellable c = nestedTuple.first().second(); - Future f = nestedTuple.second(); + CompletionStage f = nestedTuple.second(); // Picking the Cancellable, but we could also construct a domain class here return c; diff --git a/akka-docs/rst/java/code/docs/stream/FlowErrorDocTest.java b/akka-docs/rst/java/code/docs/stream/FlowErrorDocTest.java index d575b69ab0..105580bff3 100644 --- a/akka-docs/rst/java/code/docs/stream/FlowErrorDocTest.java +++ b/akka-docs/rst/java/code/docs/stream/FlowErrorDocTest.java @@ -6,6 +6,7 @@ package docs.stream; import static org.junit.Assert.assertEquals; import java.util.Arrays; import java.util.List; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; import akka.NotUsed; @@ -49,14 +50,14 @@ public class FlowErrorDocTest { final Materializer mat = ActorMaterializer.create(system); final Source source = Source.from(Arrays.asList(0, 1, 2, 3, 4, 5)) .map(elem -> 100 / elem); - final Sink> fold = - Sink.fold(0, (acc, elem) -> acc + elem); - final Future result = source.runWith(fold, mat); + final Sink> fold = + Sink. fold(0, (acc, elem) -> acc + elem); + final CompletionStage result = source.runWith(fold, mat); // division by zero will fail the stream and the // result here will be a Future completed with Failure(ArithmeticException) //#stop - Await.result(result, Duration.create(3, TimeUnit.SECONDS)); + result.toCompletableFuture().get(3, TimeUnit.SECONDS); } @Test @@ -73,14 +74,14 @@ public class FlowErrorDocTest { system); final Source source = Source.from(Arrays.asList(0, 1, 2, 3, 4, 5)) .map(elem -> 100 / elem); - final Sink> fold = + final Sink> fold = Sink.fold(0, (acc, elem) -> acc + elem); - final Future result = source.runWith(fold, mat); + final CompletionStage result = source.runWith(fold, mat); // the element causing division by zero will be dropped // result here will be a Future completed with Success(228) //#resume - assertEquals(Integer.valueOf(228), Await.result(result, Duration.create(3, TimeUnit.SECONDS))); + assertEquals(Integer.valueOf(228), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); } @Test @@ -98,14 +99,14 @@ public class FlowErrorDocTest { .withAttributes(ActorAttributes.withSupervisionStrategy(decider)); final Source source = Source.from(Arrays.asList(0, 1, 2, 3, 4, 5)) .via(flow); - final Sink> fold = - Sink.fold(0, (acc, elem) -> acc + elem); - final Future result = source.runWith(fold, mat); + final Sink> fold = + Sink. fold(0, (acc, elem) -> acc + elem); + final CompletionStage result = source.runWith(fold, mat); // the elements causing division by zero will be dropped // result here will be a Future completed with Success(150) //#resume-section - assertEquals(Integer.valueOf(150), Await.result(result, Duration.create(3, TimeUnit.SECONDS))); + assertEquals(Integer.valueOf(150), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); } @Test @@ -126,7 +127,7 @@ public class FlowErrorDocTest { .withAttributes(ActorAttributes.withSupervisionStrategy(decider)); final Source source = Source.from(Arrays.asList(1, 3, -1, 5, 7)) .via(flow); - final Future> result = source.grouped(1000) + final CompletionStage> result = source.grouped(1000) .runWith(Sink.>head(), mat); // the negative element cause the scan stage to be restarted, // i.e. start from 0 again @@ -135,7 +136,7 @@ public class FlowErrorDocTest { assertEquals( Arrays.asList(0, 1, 4, 0, 5, 12), - Await.result(result, Duration.create(3, TimeUnit.SECONDS))); + result.toCompletableFuture().get(3, TimeUnit.SECONDS)); } } diff --git a/akka-docs/rst/java/code/docs/stream/FlowGraphDocTest.java b/akka-docs/rst/java/code/docs/stream/FlowGraphDocTest.java index 0cef3e1bf5..99531726fc 100644 --- a/akka-docs/rst/java/code/docs/stream/FlowGraphDocTest.java +++ b/akka-docs/rst/java/code/docs/stream/FlowGraphDocTest.java @@ -7,6 +7,7 @@ import static org.junit.Assert.*; import java.util.Arrays; import java.util.List; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; import akka.NotUsed; @@ -46,15 +47,15 @@ public class FlowGraphDocTest { public void demonstrateBuildSimpleGraph() throws Exception { //#simple-flow-graph final Source in = Source.from(Arrays.asList(1, 2, 3, 4, 5)); - final Sink, Future>> sink = Sink.head(); - final Sink, Future>> sink2 = Sink.head(); + final Sink, CompletionStage>> sink = Sink.head(); + final Sink, CompletionStage>> sink2 = Sink.head(); final Flow f1 = Flow.of(Integer.class).map(elem -> elem + 10); final Flow f2 = Flow.of(Integer.class).map(elem -> elem + 20); final Flow f3 = Flow.of(Integer.class).map(elem -> elem.toString()); final Flow f4 = Flow.of(Integer.class).map(elem -> elem + 30); - final RunnableGraph>> result = - RunnableGraph.>>fromGraph( + final RunnableGraph>> result = + RunnableGraph.>>fromGraph( GraphDSL .create( sink, @@ -70,7 +71,7 @@ public class FlowGraphDocTest { return ClosedShape.getInstance(); })); //#simple-flow-graph - final List list = Await.result(result.run(mat), Duration.create(3, TimeUnit.SECONDS)); + final List list = result.run(mat).toCompletableFuture().get(3, TimeUnit.SECONDS); final String[] res = list.toArray(new String[] {}); Arrays.sort(res, null); assertArrayEquals(new String[] { "31", "32", "33", "34", "35", "41", "42", "43", "44", "45" }, res); @@ -105,12 +106,12 @@ public class FlowGraphDocTest { @Test public void demonstrateReusingFlowInGraph() throws Exception { //#flow-graph-reusing-a-flow - final Sink> topHeadSink = Sink.head(); - final Sink> bottomHeadSink = Sink.head(); + final Sink> topHeadSink = Sink.head(); + final Sink> bottomHeadSink = Sink.head(); final Flow sharedDoubler = Flow.of(Integer.class).map(elem -> elem * 2); - final RunnableGraph, Future>> g = - RunnableGraph., Future>>fromGraph( + final RunnableGraph, CompletionStage>> g = + RunnableGraph., CompletionStage>>fromGraph( GraphDSL.create( topHeadSink, // import this sink into the graph bottomHeadSink, // and this as well @@ -127,24 +128,22 @@ public class FlowGraphDocTest { ) ); //#flow-graph-reusing-a-flow - final Pair, Future> pair = g.run(mat); - assertEquals(Integer.valueOf(2), Await.result(pair.first(), Duration.create(3, TimeUnit.SECONDS))); - assertEquals(Integer.valueOf(2), Await.result(pair.second(), Duration.create(3, TimeUnit.SECONDS))); + final Pair, CompletionStage> pair = g.run(mat); + assertEquals(Integer.valueOf(2), pair.first().toCompletableFuture().get(3, TimeUnit.SECONDS)); + assertEquals(Integer.valueOf(2), pair.second().toCompletableFuture().get(3, TimeUnit.SECONDS)); } @Test public void demonstrateMatValue() throws Exception { //#flow-graph-matvalue - final Sink> foldSink = Sink. fold(0, (a, b) -> { + final Sink> foldSink = Sink. fold(0, (a, b) -> { return a + b; }); - final Flow, Integer, NotUsed> flatten = Flow.>create() - .mapAsync(4, x -> { - return x; - }); + final Flow, Integer, NotUsed> flatten = + Flow.>create().mapAsync(4, x -> x); - final Flow> foldingFlow = Flow.fromGraph( + final Flow> foldingFlow = Flow.fromGraph( GraphDSL.create(foldSink, (b, fold) -> { return FlowShape.of( @@ -155,7 +154,7 @@ public class FlowGraphDocTest { //#flow-graph-matvalue-cycle // This cannot produce any value: - final Source> cyclicSource = Source.fromGraph( + final Source> cyclicSource = Source.fromGraph( GraphDSL.create(foldSink, (b, fold) -> { // - Fold cannot complete until its upstream mapAsync completes diff --git a/akka-docs/rst/java/code/docs/stream/FlowStagesDocTest.java b/akka-docs/rst/java/code/docs/stream/FlowStagesDocTest.java index 41fac66ccb..91c8433f59 100644 --- a/akka-docs/rst/java/code/docs/stream/FlowStagesDocTest.java +++ b/akka-docs/rst/java/code/docs/stream/FlowStagesDocTest.java @@ -9,6 +9,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.function.Predicate; @@ -154,11 +155,11 @@ public class FlowStagesDocTest { @Test public void demonstrateVariousPushPullStages() throws Exception { - final Sink>> sink = + final Sink>> sink = Flow.of(Integer.class).grouped(10).toMat(Sink.head(), Keep.right()); //#stage-chain - final RunnableGraph>> runnable = + final RunnableGraph>> runnable = Source .from(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)) .transform(() -> new Filter(elem -> elem % 2 == 0)) @@ -168,7 +169,7 @@ public class FlowStagesDocTest { //#stage-chain assertEquals(Arrays.asList(1, 1, 2, 2, 3, 3, 4, 4, 5, 5), - Await.result(runnable.run(mat), FiniteDuration.create(3, TimeUnit.SECONDS))); + runnable.run(mat).toCompletableFuture().get(3, TimeUnit.SECONDS)); } //#detached diff --git a/akka-docs/rst/java/code/docs/stream/GraphStageDocTest.java b/akka-docs/rst/java/code/docs/stream/GraphStageDocTest.java index 6707451cde..9d3425c5e6 100644 --- a/akka-docs/rst/java/code/docs/stream/GraphStageDocTest.java +++ b/akka-docs/rst/java/code/docs/stream/GraphStageDocTest.java @@ -31,6 +31,9 @@ import scala.concurrent.duration.Duration; import scala.concurrent.duration.FiniteDuration; import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertEquals; @@ -101,14 +104,14 @@ public class GraphStageDocTest { Source mySource = Source.fromGraph(sourceGraph); // Returns 55 - Future result1 = mySource.take(10).runFold(0, (sum, next) -> sum + next, mat); + CompletionStage result1 = mySource.take(10).runFold(0, (sum, next) -> sum + next, mat); // The source is reusable. This returns 5050 - Future result2 = mySource.take(100).runFold(0, (sum, next) -> sum + next, mat); + CompletionStage result2 = mySource.take(100).runFold(0, (sum, next) -> sum + next, mat); //#simple-source-usage - assertEquals(Await.result(result1, Duration.create(3, "seconds")), (Integer) 55); - assertEquals(Await.result(result2, Duration.create(3, "seconds")), (Integer) 5050); + assertEquals(result1.toCompletableFuture().get(3, TimeUnit.SECONDS), (Integer) 55); + assertEquals(result2.toCompletableFuture().get(3, TimeUnit.SECONDS), (Integer) 5050); } @@ -169,12 +172,12 @@ public class GraphStageDocTest { } })); - Future result = + CompletionStage result = Source.from(Arrays.asList("one", "two", "three")) .via(stringLength) .runFold(0, (sum, n) -> sum + n, mat); - assertEquals(new Integer(11), Await.result(result, Duration.create(3, "seconds"))); + assertEquals(new Integer(11), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); } //#many-to-one @@ -231,12 +234,12 @@ public class GraphStageDocTest { Graph, NotUsed> evenFilter = Flow.fromGraph(new Filter(n -> n % 2 == 0)); - Future result = + CompletionStage result = Source.from(Arrays.asList(1, 2, 3, 4, 5, 6)) .via(evenFilter) .runFold(0, (elem, sum) -> sum + elem, mat); - assertEquals(new Integer(12), Await.result(result, Duration.create(3, "seconds"))); + assertEquals(new Integer(12), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); } //#one-to-many @@ -300,12 +303,12 @@ public class GraphStageDocTest { Graph, NotUsed> duplicator = Flow.fromGraph(new Duplicator()); - Future result = + CompletionStage result = Source.from(Arrays.asList(1, 2, 3)) .via(duplicator) .runFold(0, (n, sum) -> n + sum, mat); - assertEquals(new Integer(12), Await.result(result, Duration.create(3, "seconds"))); + assertEquals(new Integer(12), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); } @@ -357,20 +360,20 @@ public class GraphStageDocTest { Graph, NotUsed> duplicator = Flow.fromGraph(new Duplicator2()); - Future result = + CompletionStage result = Source.from(Arrays.asList(1, 2, 3)) .via(duplicator) .runFold(0, (n, sum) -> n + sum, mat); - assertEquals(new Integer(12), Await.result(result, Duration.create(3, "seconds"))); + assertEquals(new Integer(12), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); } @Test public void demonstrateChainingOfGraphStages() throws Exception { - Graph, Future> sink = Sink.fold("", (acc, n) -> acc + n.toString()); + Graph, CompletionStage> sink = Sink.fold("", (acc, n) -> acc + n.toString()); //#graph-stage-chain - Future resultFuture = Source.from(Arrays.asList(1,2,3,4,5)) + CompletionStage resultFuture = Source.from(Arrays.asList(1,2,3,4,5)) .via(new Filter((n) -> n % 2 == 0)) .via(new Duplicator()) .via(new Map((n) -> n / 2)) @@ -378,7 +381,7 @@ public class GraphStageDocTest { //#graph-stage-chain - assertEquals("1122", Await.result(resultFuture, Duration.create(3, "seconds"))); + assertEquals("1122", resultFuture.toCompletableFuture().get(3, TimeUnit.SECONDS)); } @@ -386,9 +389,9 @@ public class GraphStageDocTest { // will close upstream when the future completes public class KillSwitch extends GraphStage> { - private final Future switchF; + private final CompletionStage switchF; - public KillSwitch(Future switchF) { + public KillSwitch(CompletionStage switchF) { this.switchF = switchF; } @@ -430,12 +433,7 @@ public class GraphStageDocTest { }); ExecutionContext ec = system.dispatcher(); - switchF.onSuccess(new OnSuccess() { - @Override - public void onSuccess(Done result) throws Throwable { - callback.invoke(Done.getInstance()); - } - }, ec); + switchF.thenAccept(callback::invoke); } }; } @@ -446,29 +444,23 @@ public class GraphStageDocTest { public void demonstrateAnAsynchronousSideChannel() throws Exception{ // tests: - Promise switchF = Futures.promise(); + CompletableFuture switchF = new CompletableFuture<>(); Graph, NotUsed> killSwitch = - Flow.fromGraph(new KillSwitch<>(switchF.future())); + Flow.fromGraph(new KillSwitch<>(switchF)); ExecutionContext ec = system.dispatcher(); - // TODO this is probably racey, is there a way to make sure it happens after? - Future valueAfterKill = switchF.future().flatMap(new Mapper>() { - @Override - public Future apply(Done parameter) { - return Futures.successful(4); - } - }, ec); + CompletionStage valueAfterKill = switchF.thenApply(in -> 4); - Future result = - Source.from(Arrays.asList(1, 2, 3)).concat(Source.fromFuture(valueAfterKill)) + CompletionStage result = + Source.from(Arrays.asList(1, 2, 3)).concat(Source.fromCompletionStage(valueAfterKill)) .via(killSwitch) .runFold(0, (n, sum) -> n + sum, mat); - switchF.success(Done.getInstance()); + switchF.complete(Done.getInstance()); - assertEquals(new Integer(6), Await.result(result, Duration.create(3, "seconds"))); + assertEquals(new Integer(6), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); } @@ -531,18 +523,18 @@ public class GraphStageDocTest { public void demonstrateAGraphStageWithATimer() throws Exception { // tests: - Future result = + CompletionStage result = Source.from(Arrays.asList(1, 2, 3)) .via(new TimedGate<>(Duration.create(2, "seconds"))) .takeWithin(Duration.create(250, "millis")) .runFold(0, (n, sum) -> n + sum, mat); - assertEquals(new Integer(1), Await.result(result, Duration.create(3, "seconds"))); + assertEquals(new Integer(1), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); } //#materialized - public class FirstValue extends GraphStageWithMaterializedValue, Future> { + public class FirstValue extends GraphStageWithMaterializedValue, CompletionStage> { public final Inlet in = Inlet.create("FirstValue.in"); public final Outlet out = Outlet.create("FirstValue.out"); @@ -554,7 +546,7 @@ public class GraphStageDocTest { } @Override - public Tuple2> createLogicAndMaterializedValue(Attributes inheritedAttributes) { + public Tuple2> createLogicAndMaterializedValue(Attributes inheritedAttributes) { Promise promise = Futures.promise(); GraphStageLogic logic = new GraphStageLogic(shape) { @@ -592,13 +584,13 @@ public class GraphStageDocTest { public void demonstrateACustomMaterializedValue() throws Exception { // tests: - RunnableGraph> flow = Source.from(Arrays.asList(1, 2, 3)) + RunnableGraph> flow = Source.from(Arrays.asList(1, 2, 3)) .viaMat(new FirstValue(), Keep.right()) .to(Sink.ignore()); - Future result = flow.run(mat); + CompletionStage result = flow.run(mat); - assertEquals(new Integer(1), Await.result(result, Duration.create(3, "seconds"))); + assertEquals(new Integer(1), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); } @@ -685,11 +677,11 @@ public class GraphStageDocTest { public void demonstrateADetachedGraphStage() throws Exception { // tests: - Future result1 = Source.from(Arrays.asList(1, 2, 3)) + CompletionStage result1 = Source.from(Arrays.asList(1, 2, 3)) .via(new TwoBuffer<>()) .runFold(0, (acc, n) -> acc + n, mat); - assertEquals(new Integer(6), Await.result(result1, Duration.create(3, "seconds"))); + assertEquals(new Integer(6), result1.toCompletableFuture().get(3, TimeUnit.SECONDS)); TestSubscriber.ManualProbe subscriber = TestSubscriber.manualProbe(system); TestPublisher.Probe publisher = TestPublisher.probe(0, system); diff --git a/akka-docs/rst/java/code/docs/stream/IntegrationDocTest.java b/akka-docs/rst/java/code/docs/stream/IntegrationDocTest.java index 96ba9e1fe2..1a13750712 100644 --- a/akka-docs/rst/java/code/docs/stream/IntegrationDocTest.java +++ b/akka-docs/rst/java/code/docs/stream/IntegrationDocTest.java @@ -6,8 +6,6 @@ package docs.stream; import akka.NotUsed; import akka.actor.*; -import akka.dispatch.Futures; -import akka.dispatch.MessageDispatcher; import akka.japi.pf.ReceiveBuilder; import akka.stream.*; import akka.stream.javadsl.*; @@ -21,14 +19,15 @@ import docs.stream.TwitterStreamQuickstartDocTest.Model.Tweet; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; -import scala.concurrent.ExecutionContext; -import scala.concurrent.Future; import java.util.Arrays; import java.util.HashSet; import java.util.Optional; import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicInteger; -import static akka.pattern.Patterns.ask; +import static akka.pattern.PatternsCS.ask; import static docs.stream.TwitterStreamQuickstartDocTest.Model.AKKA; import static docs.stream.TwitterStreamQuickstartDocTest.Model.tweets; import static junit.framework.TestCase.assertTrue; @@ -64,26 +63,26 @@ public class IntegrationDocTest { class AddressSystem { //#email-address-lookup - public Future> lookupEmail(String handle) + public CompletionStage> lookupEmail(String handle) //#email-address-lookup { - return Futures.successful(Optional.of(handle + "@somewhere.com")); + return CompletableFuture.completedFuture(Optional.of(handle + "@somewhere.com")); } //#phone-lookup - public Future> lookupPhoneNumber(String handle) + public CompletionStage> lookupPhoneNumber(String handle) //#phone-lookup { - return Futures.successful(Optional.of("" + handle.hashCode())); + return CompletableFuture.completedFuture(Optional.of("" + handle.hashCode())); } } class AddressSystem2 { //#email-address-lookup2 - public Future lookupEmail(String handle) + public CompletionStage lookupEmail(String handle) //#email-address-lookup2 { - return Futures.successful(handle + "@somewhere.com"); + return CompletableFuture.completedFuture(handle + "@somewhere.com"); } } @@ -177,11 +176,11 @@ public class IntegrationDocTest { } //#email-server-send - public Future send(Email email) { + public CompletionStage send(Email email) { // ... //#email-server-send probe.tell(email.to, ActorRef.noSender()); - return Futures.successful(email); + return CompletableFuture.completedFuture(email); //#email-server-send } //#email-server-send @@ -258,21 +257,21 @@ public class IntegrationDocTest { //#sometimes-slow-service static class SometimesSlowService { - private final ExecutionContext ec; - - public SometimesSlowService(ExecutionContext ec) { + private final Executor ec; + + public SometimesSlowService(Executor ec) { this.ec = ec; } - + private final AtomicInteger runningCount = new AtomicInteger(); - public Future convert(String s) { + public CompletionStage convert(String s) { System.out.println("running: " + s + "(" + runningCount.incrementAndGet() + ")"); - return Futures.future(() -> { + return CompletableFuture.supplyAsync(() -> { if (!s.isEmpty() && Character.isLowerCase(s.charAt(0))) - Thread.sleep(500); + try { Thread.sleep(500); } catch (InterruptedException e) {} else - Thread.sleep(20); + try { Thread.sleep(20); } catch (InterruptedException e) {} System.out.println("completed: " + s + "(" + runningCount.decrementAndGet() + ")"); return s.toUpperCase(); }, ec); @@ -399,15 +398,12 @@ public class IntegrationDocTest { .map(o -> o.get()); //#blocking-mapAsync - final MessageDispatcher blockingEc = system.dispatchers().lookup("blocking-dispatcher"); + final Executor blockingEc = system.dispatchers().lookup("blocking-dispatcher"); final RunnableGraph sendTextMessages = phoneNumbers - .mapAsync(4, phoneNo -> - Futures.future(() -> - smsServer.send(new TextMessage(phoneNo, "I like your tweet")), - blockingEc) - ) + .mapAsync(4, phoneNo -> CompletableFuture.supplyAsync(() -> + smsServer.send(new TextMessage(phoneNo, "I like your tweet")), blockingEc)) .to(Sink.ignore()); sendTextMessages.run(mat); @@ -518,7 +514,7 @@ public class IntegrationDocTest { { //#sometimes-slow-mapAsync - final MessageDispatcher blockingEc = system.dispatchers().lookup("blocking-dispatcher"); + final Executor blockingEc = system.dispatchers().lookup("blocking-dispatcher"); final SometimesSlowService service = new SometimesSlowService(blockingEc); final ActorMaterializer mat = ActorMaterializer.create( @@ -563,7 +559,7 @@ public class IntegrationDocTest { { //#sometimes-slow-mapAsyncUnordered - final MessageDispatcher blockingEc = system.dispatchers().lookup("blocking-dispatcher"); + final Executor blockingEc = system.dispatchers().lookup("blocking-dispatcher"); final SometimesSlowService service = new SometimesSlowService(blockingEc); final ActorMaterializer mat = ActorMaterializer.create( diff --git a/akka-docs/rst/java/code/docs/stream/RateTransformationDocTest.java b/akka-docs/rst/java/code/docs/stream/RateTransformationDocTest.java index 0805cfe6e5..3f93b18b10 100644 --- a/akka-docs/rst/java/code/docs/stream/RateTransformationDocTest.java +++ b/akka-docs/rst/java/code/docs/stream/RateTransformationDocTest.java @@ -7,6 +7,7 @@ package docs.stream; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.DoubleStream; @@ -72,13 +73,12 @@ public class RateTransformationDocTest { }); //#conflate-summarize - final Future>> fut = Source.repeat(0).map(i -> r.nextGaussian()) + final CompletionStage>> fut = Source.repeat(0).map(i -> r.nextGaussian()) .via(statsFlow) .grouped(10) .runWith(Sink.head(), mat); - final Duration timeout = Duration.create(100, TimeUnit.MILLISECONDS); - Await.result(fut, timeout); + fut.toCompletableFuture().get(1, TimeUnit.SECONDS); } @Test @@ -97,12 +97,11 @@ public class RateTransformationDocTest { .mapConcat(d -> d); //#conflate-sample - final Future fut = Source.from(new ArrayList(Collections.nCopies(1000, 1.0))) + final CompletionStage fut = Source.from(new ArrayList(Collections.nCopies(1000, 1.0))) .via(sampleFlow) .runWith(Sink.fold(0.0, (agg, next) -> agg + next), mat); - final Duration timeout = Duration.create(1, TimeUnit.SECONDS); - final Double count = Await.result(fut, timeout); + final Double count = fut.toCompletableFuture().get(1, TimeUnit.SECONDS); } @Test @@ -112,17 +111,16 @@ public class RateTransformationDocTest { .expand(in -> Stream.iterate(in, i -> i).iterator()); //#expand-last - final Pair, Future>> probeFut = TestSource. probe(system) + final Pair, CompletionStage>> probeFut = TestSource. probe(system) .via(lastFlow) .grouped(10) .toMat(Sink.head(), Keep.both()) .run(mat); final TestPublisher.Probe probe = probeFut.first(); - final Future> fut = probeFut.second(); + final CompletionStage> fut = probeFut.second(); probe.sendNext(1.0); - final Duration timeout = Duration.create(1, TimeUnit.SECONDS); - final List expanded = Await.result(fut, timeout); + final List expanded = fut.toCompletableFuture().get(1, TimeUnit.SECONDS); assertEquals(expanded.size(), 10); assertEquals(expanded.stream().mapToDouble(d -> d).sum(), 10, 0.1); } diff --git a/akka-docs/rst/java/code/docs/stream/StreamPartialFlowGraphDocTest.java b/akka-docs/rst/java/code/docs/stream/StreamPartialFlowGraphDocTest.java index cde7c7f3a0..2ecdf631fc 100644 --- a/akka-docs/rst/java/code/docs/stream/StreamPartialFlowGraphDocTest.java +++ b/akka-docs/rst/java/code/docs/stream/StreamPartialFlowGraphDocTest.java @@ -6,6 +6,7 @@ package docs.stream; import static org.junit.Assert.assertEquals; import java.util.*; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; import akka.Done; @@ -58,10 +59,10 @@ public class StreamPartialFlowGraphDocTest { new Inlet[] {zip1.in0(), zip1.in1(), zip2.in1()}); }); - final Sink> resultSink = Sink.head(); + final Sink> resultSink = Sink.head(); - final RunnableGraph> g = - RunnableGraph.>fromGraph( + final RunnableGraph> g = + RunnableGraph.>fromGraph( GraphDSL.create(resultSink, (builder, sink) -> { // import the partial flow graph explicitly final UniformFanInShape pm = builder.add(pickMaxOfThree); @@ -73,9 +74,9 @@ public class StreamPartialFlowGraphDocTest { return ClosedShape.getInstance(); })); - final Future max = g.run(mat); + final CompletionStage max = g.run(mat); //#simple-partial-flow-graph - assertEquals(Integer.valueOf(3), Await.result(max, Duration.create(3, TimeUnit.SECONDS))); + assertEquals(Integer.valueOf(3), max.toCompletableFuture().get(3, TimeUnit.SECONDS)); } //#source-from-partial-flow-graph @@ -110,10 +111,10 @@ public class StreamPartialFlowGraphDocTest { return SourceShape.of(zip.out()); })); - final Future> firstPair = + final CompletionStage> firstPair = pairs.runWith(Sink.>head(), mat); //#source-from-partial-flow-graph - assertEquals(new Pair<>(0, 1), Await.result(firstPair, Duration.create(3, TimeUnit.SECONDS))); + assertEquals(new Pair<>(0, 1), firstPair.toCompletableFuture().get(3, TimeUnit.SECONDS)); } @Test @@ -132,12 +133,12 @@ public class StreamPartialFlowGraphDocTest { })); //#flow-from-partial-flow-graph - final Future> matSink = + final CompletionStage> matSink = //#flow-from-partial-flow-graph Source.single(1).via(pairs).runWith(Sink.>head(), mat); //#flow-from-partial-flow-graph - assertEquals(new Pair<>(1, "1"), Await.result(matSink, Duration.create(3, TimeUnit.SECONDS))); + assertEquals(new Pair<>(1, "1"), matSink.toCompletableFuture().get(3, TimeUnit.SECONDS)); } @@ -150,12 +151,12 @@ public class StreamPartialFlowGraphDocTest { final Source sources = Source.combine(source1, source2, new ArrayList<>(), i -> Merge.create(i)); //#source-combine - final Future result= + final CompletionStage result= //#source-combine sources.runWith(Sink.fold(0, (a,b) -> a + b), mat); //#source-combine - assertEquals(Integer.valueOf(3), Await.result(result, Duration.create(3, TimeUnit.SECONDS))); + assertEquals(Integer.valueOf(3), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); } @Test @@ -165,7 +166,7 @@ public class StreamPartialFlowGraphDocTest { //#sink-combine Sink sendRemotely = Sink.actorRef(actorRef, "Done"); - Sink> localProcessing = Sink.foreach(a -> { /*do something useful*/ } ); + Sink> localProcessing = Sink.foreach(a -> { /*do something useful*/ } ); Sink sinks = Sink.combine(sendRemotely,localProcessing, new ArrayList<>(), a -> Broadcast.create(a)); Source.from(Arrays.asList(new Integer[]{0, 1, 2})).runWith(sinks, mat); diff --git a/akka-docs/rst/java/code/docs/stream/StreamTestKitDocTest.java b/akka-docs/rst/java/code/docs/stream/StreamTestKitDocTest.java index 4d94af4b43..675f3fd342 100644 --- a/akka-docs/rst/java/code/docs/stream/StreamTestKitDocTest.java +++ b/akka-docs/rst/java/code/docs/stream/StreamTestKitDocTest.java @@ -6,6 +6,9 @@ package docs.stream; import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import akka.NotUsed; @@ -13,7 +16,6 @@ import org.junit.*; import static org.junit.Assert.assertEquals; import akka.actor.*; -import akka.dispatch.Futures; import akka.testkit.*; import akka.japi.Pair; import akka.stream.*; @@ -23,7 +25,6 @@ import akka.stream.testkit.javadsl.*; import akka.testkit.TestProbe; import scala.util.*; import scala.concurrent.Await; -import scala.concurrent.Future; import scala.concurrent.duration.Duration; import scala.concurrent.duration.FiniteDuration; @@ -48,13 +49,13 @@ public class StreamTestKitDocTest { @Test public void strictCollection() throws Exception { //#strict-collection - final Sink> sinkUnderTest = Flow.of(Integer.class) + final Sink> sinkUnderTest = Flow.of(Integer.class) .map(i -> i * 2) .toMat(Sink.fold(0, (agg, next) -> agg + next), Keep.right()); - final Future future = Source.from(Arrays.asList(1, 2, 3, 4)) + final CompletionStage future = Source.from(Arrays.asList(1, 2, 3, 4)) .runWith(sinkUnderTest, mat); - final Integer result = Await.result(future, Duration.create(1, TimeUnit.SECONDS)); + final Integer result = future.toCompletableFuture().get(1, TimeUnit.SECONDS); assert(result == 20); //#strict-collection } @@ -65,11 +66,10 @@ public class StreamTestKitDocTest { final Source sourceUnderTest = Source.repeat(1) .map(i -> i * 2); - final Future> future = sourceUnderTest + final CompletionStage> future = sourceUnderTest .grouped(10) .runWith(Sink.head(), mat); - final List result = - Await.result(future, Duration.create(1, TimeUnit.SECONDS)); + final List result = future.toCompletableFuture().get(1, TimeUnit.SECONDS); assertEquals(result, Collections.nCopies(10, 2)); //#grouped-infinite } @@ -80,9 +80,9 @@ public class StreamTestKitDocTest { final Flow flowUnderTest = Flow.of(Integer.class) .takeWhile(i -> i < 5); - final Future future = Source.from(Arrays.asList(1, 2, 3, 4, 5, 6)) + final CompletionStage future = Source.from(Arrays.asList(1, 2, 3, 4, 5, 6)) .via(flowUnderTest).runWith(Sink.fold(0, (agg, next) -> agg + next), mat); - final Integer result = Await.result(future, Duration.create(1, TimeUnit.SECONDS)); + final Integer result = future.toCompletableFuture().get(1, TimeUnit.SECONDS); assert(result == 10); //#folded-stream } @@ -95,10 +95,10 @@ public class StreamTestKitDocTest { .grouped(2); final TestProbe probe = new TestProbe(system); - final Future>> future = sourceUnderTest + final CompletionStage>> future = sourceUnderTest .grouped(2) .runWith(Sink.head(), mat); - akka.pattern.Patterns.pipe(future, system.dispatcher()).to(probe.ref()); + akka.pattern.PatternsCS.pipe(future, system.dispatcher()).to(probe.ref()); probe.expectMsg(Duration.create(1, TimeUnit.SECONDS), Arrays.asList(Arrays.asList(1, 2), Arrays.asList(3, 4)) ); @@ -129,23 +129,23 @@ public class StreamTestKitDocTest { @Test public void sourceActorRef() throws Exception { //#source-actorref - final Sink> sinkUnderTest = Flow.of(Integer.class) + final Sink> sinkUnderTest = Flow.of(Integer.class) .map(i -> i.toString()) .toMat(Sink.fold("", (agg, next) -> agg + next), Keep.right()); - final Pair> refAndFuture = + final Pair> refAndCompletionStage = Source.actorRef(8, OverflowStrategy.fail()) .toMat(sinkUnderTest, Keep.both()) .run(mat); - final ActorRef ref = refAndFuture.first(); - final Future future = refAndFuture.second(); + final ActorRef ref = refAndCompletionStage.first(); + final CompletionStage future = refAndCompletionStage.second(); ref.tell(1, ActorRef.noSender()); ref.tell(2, ActorRef.noSender()); ref.tell(3, ActorRef.noSender()); ref.tell(new akka.actor.Status.Success("done"), ActorRef.noSender()); - final String result = Await.result(future, Duration.create(1, TimeUnit.SECONDS)); + final String result = future.toCompletableFuture().get(1, TimeUnit.SECONDS); assertEquals(result, "123"); //#source-actorref } @@ -180,19 +180,23 @@ public class StreamTestKitDocTest { @Test public void injectingFailure() throws Exception { //#injecting-failure - final Sink> sinkUnderTest = Sink.head(); + final Sink> sinkUnderTest = Sink.head(); - final Pair, Future> probeAndFuture = + final Pair, CompletionStage> probeAndCompletionStage = TestSource.probe(system) .toMat(sinkUnderTest, Keep.both()) .run(mat); - final TestPublisher.Probe probe = probeAndFuture.first(); - final Future future = probeAndFuture.second(); + final TestPublisher.Probe probe = probeAndCompletionStage.first(); + final CompletionStage future = probeAndCompletionStage.second(); probe.sendError(new Exception("boom")); - Await.ready(future, Duration.create(1, TimeUnit.SECONDS)); - final Throwable exception = ((Failure)future.value().get()).exception(); - assertEquals(exception.getMessage(), "boom"); + try { + future.toCompletableFuture().get(1, TimeUnit.SECONDS); + assert false; + } catch (ExecutionException ee) { + final Throwable exception = ee.getCause(); + assertEquals(exception.getMessage(), "boom"); + } //#injecting-failure } @@ -200,11 +204,11 @@ public class StreamTestKitDocTest { public void testSourceAndTestSink() throws Exception { //#test-source-and-sink final Flow flowUnderTest = Flow.of(Integer.class) - .mapAsyncUnordered(2, sleep -> akka.pattern.Patterns.after( + .mapAsyncUnordered(2, sleep -> akka.pattern.PatternsCS.after( Duration.create(10, TimeUnit.MILLISECONDS), system.scheduler(), system.dispatcher(), - Futures.successful(sleep) + CompletableFuture.completedFuture(sleep) )); final Pair, TestSubscriber.Probe> pubAndSub = diff --git a/akka-docs/rst/java/code/docs/stream/TwitterStreamQuickstartDocTest.java b/akka-docs/rst/java/code/docs/stream/TwitterStreamQuickstartDocTest.java index af718e2276..78dfe78fa7 100644 --- a/akka-docs/rst/java/code/docs/stream/TwitterStreamQuickstartDocTest.java +++ b/akka-docs/rst/java/code/docs/stream/TwitterStreamQuickstartDocTest.java @@ -26,6 +26,8 @@ import scala.concurrent.duration.FiniteDuration; import java.util.ArrayList; import java.util.Arrays; import java.util.Set; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.stream.Collectors; @@ -199,14 +201,14 @@ public class TwitterStreamQuickstartDocTest { } static class Example2 { - public void run(final Materializer mat) throws TimeoutException, InterruptedException { + public void run(final Materializer mat) throws TimeoutException, InterruptedException, ExecutionException { //#backpressure-by-readline - final Future completion = + final CompletionStage completion = Source.from(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)) .map(i -> { System.out.println("map => " + i); return i; }) .runForeach(i -> System.console().readLine("Element = %s continue reading? [press enter]\n", i), mat); - Await.ready(completion, FiniteDuration.create(1, TimeUnit.MINUTES)); + completion.toCompletableFuture().get(1, TimeUnit.SECONDS); //#backpressure-by-readline } } @@ -276,8 +278,8 @@ public class TwitterStreamQuickstartDocTest { @Test public void demonstrateBroadcast() { - final Sink> writeAuthors = Sink.ignore(); - final Sink> writeHashtags = Sink.ignore(); + final Sink> writeAuthors = Sink.ignore(); + final Sink> writeHashtags = Sink.ignore(); //#flow-graph-broadcast RunnableGraph.fromGraph(GraphDSL.create(b -> { @@ -317,24 +319,21 @@ public class TwitterStreamQuickstartDocTest { @Test public void demonstrateCountOnFiniteStream() { //#tweets-fold-count - final Sink> sumSink = + final Sink> sumSink = Sink.fold(0, (acc, elem) -> acc + elem); - final RunnableGraph> counter = + final RunnableGraph> counter = tweets.map(t -> 1).toMat(sumSink, Keep.right()); - final Future sum = counter.run(mat); + final CompletionStage sum = counter.run(mat); - sum.foreach(new Foreach() { - public void each(Integer c) { - System.out.println("Total tweets processed: " + c); - } - }, system.dispatcher()); + sum.thenAcceptAsync(c -> System.out.println("Total tweets processed: " + c), + system.dispatcher()); //#tweets-fold-count new Object() { //#tweets-fold-count-oneline - final Future sum = tweets.map(t -> 1).runWith(sumSink, mat); + final CompletionStage sum = tweets.map(t -> 1).runWith(sumSink, mat); //#tweets-fold-count-oneline }; } @@ -344,18 +343,18 @@ public class TwitterStreamQuickstartDocTest { final Source tweetsInMinuteFromNow = tweets; // not really in second, just acting as if //#tweets-runnable-flow-materialized-twice - final Sink> sumSink = + final Sink> sumSink = Sink.fold(0, (acc, elem) -> acc + elem); - final RunnableGraph> counterRunnableGraph = + final RunnableGraph> counterRunnableGraph = tweetsInMinuteFromNow .filter(t -> t.hashtags().contains(AKKA)) .map(t -> 1) .toMat(sumSink, Keep.right()); // materialize the stream once in the morning - final Future morningTweetsCount = counterRunnableGraph.run(mat); + final CompletionStage morningTweetsCount = counterRunnableGraph.run(mat); // and once in the evening, reusing the blueprint - final Future eveningTweetsCount = counterRunnableGraph.run(mat); + final CompletionStage eveningTweetsCount = counterRunnableGraph.run(mat); //#tweets-runnable-flow-materialized-twice } diff --git a/akka-docs/rst/java/code/docs/stream/io/StreamFileDocTest.java b/akka-docs/rst/java/code/docs/stream/io/StreamFileDocTest.java index 67d885db8e..997e790aef 100644 --- a/akka-docs/rst/java/code/docs/stream/io/StreamFileDocTest.java +++ b/akka-docs/rst/java/code/docs/stream/io/StreamFileDocTest.java @@ -3,10 +3,9 @@ */ package docs.stream.io; -import static org.junit.Assert.assertEquals; - import java.io.File; import java.io.IOException; +import java.util.concurrent.CompletionStage; import akka.Done; import akka.actor.ActorSystem; @@ -15,7 +14,6 @@ import akka.stream.io.IOResult; import akka.stream.javadsl.Sink; import akka.stream.javadsl.FileIO; import docs.stream.SilenceSystemOut; -import docs.stream.cookbook.RecipeParseLines; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -56,10 +54,10 @@ public class StreamFileDocTest { try { //#file-source - Sink> printlnSink = - Sink.foreach(chunk -> System.out.println(chunk.utf8String())); + Sink> printlnSink = + Sink. foreach(chunk -> System.out.println(chunk.utf8String())); - Future ioResult = + CompletionStage ioResult = FileIO.fromFile(file) .to(printlnSink) .run(mat); @@ -74,7 +72,7 @@ public class StreamFileDocTest { final File file = File.createTempFile(getClass().getName(), ".tmp"); try { - Sink> fileSink = + Sink> fileSink = //#custom-dispatcher-code FileIO.toFile(file) .withAttributes(ActorAttributes.dispatcher("custom-blocking-io-dispatcher")); diff --git a/akka-docs/rst/java/code/docs/stream/io/StreamTcpDocTest.java b/akka-docs/rst/java/code/docs/stream/io/StreamTcpDocTest.java index c2d16de3d7..aaca1ef7cc 100644 --- a/akka-docs/rst/java/code/docs/stream/io/StreamTcpDocTest.java +++ b/akka-docs/rst/java/code/docs/stream/io/StreamTcpDocTest.java @@ -3,6 +3,7 @@ */ package docs.stream.io; +import java.util.concurrent.CompletionStage; import java.util.concurrent.ConcurrentLinkedQueue; import akka.NotUsed; @@ -60,14 +61,14 @@ public class StreamTcpDocTest { { //#echo-server-simple-bind // IncomingConnection and ServerBinding imported from Tcp - final Source> connections = + final Source> connections = Tcp.get(system).bind("127.0.0.1", 8889); //#echo-server-simple-bind } { final InetSocketAddress localhost = SocketUtils.temporaryServerAddress(); - final Source> connections = + final Source> connections = Tcp.get(system).bind(localhost.getHostName(), localhost.getPort()); // TODO getHostString in Java7 //#echo-server-simple-handle @@ -93,7 +94,7 @@ public class StreamTcpDocTest { final TestProbe serverProbe = new TestProbe(system); - final Source> connections = + final Source> connections = Tcp.get(system).bind(localhost.getHostName(), localhost.getPort()); // TODO getHostString in Java7 //#welcome-banner-chat-server connections.runForeach(connection -> { @@ -146,14 +147,14 @@ public class StreamTcpDocTest { { //#repl-client - final Flow> connection = + final Flow> connection = Tcp.get(system).outgoingConnection("127.0.0.1", 8889); //#repl-client } { - final Flow> connection = - Tcp.get(system).outgoingConnection(localhost.getHostName(), localhost.getPort()); // TODO getHostString in Java7 + final Flow> connection = + Tcp.get(system).outgoingConnection(localhost.getHostString(), localhost.getPort()); //#repl-client final PushStage replParser = new PushStage() { diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeByteStrings.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeByteStrings.java index 0e9c53938d..5fbca4bb19 100644 --- a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeByteStrings.java +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeByteStrings.java @@ -20,12 +20,10 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import scala.Tuple2; -import scala.concurrent.Await; -import scala.concurrent.Future; -import scala.concurrent.duration.FiniteDuration; import java.util.Arrays; import java.util.List; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertEquals; @@ -97,9 +95,9 @@ public class RecipeByteStrings extends RecipeTest { rawBytes.transform(() -> new Chunker(CHUNK_LIMIT)); //#bytestring-chunker2 - Future> chunksFuture = chunksStream.grouped(10).runWith(Sink.head(), mat); + CompletionStage> chunksFuture = chunksStream.grouped(10).runWith(Sink.head(), mat); - List chunks = Await.result(chunksFuture, FiniteDuration.create(3, TimeUnit.SECONDS)); + List chunks = chunksFuture.toCompletableFuture().get(3, TimeUnit.SECONDS); for (ByteString chunk : chunks) { assertTrue(chunk.size() <= 2); @@ -159,9 +157,7 @@ public class RecipeByteStrings extends RecipeTest { ByteString.fromArray(new byte[] { 4, 5, 6 }), ByteString.fromArray(new byte[] { 7, 8, 9, 10 }))); - FiniteDuration threeSeconds = FiniteDuration.create(3, TimeUnit.SECONDS); - - List got = Await.result(bytes1.via(limiter).grouped(10).runWith(Sink.head(), mat), threeSeconds); + List got = bytes1.via(limiter).grouped(10).runWith(Sink.head(), mat).toCompletableFuture().get(3, TimeUnit.SECONDS); ByteString acc = ByteString.empty(); for (ByteString b : got) { acc = acc.concat(b); @@ -170,7 +166,7 @@ public class RecipeByteStrings extends RecipeTest { boolean thrown = false; try { - Await.result(bytes2.via(limiter).grouped(10).runWith(Sink.head(), mat), threeSeconds); + bytes2.via(limiter).grouped(10).runWith(Sink.head(), mat).toCompletableFuture().get(3, TimeUnit.SECONDS); } catch (IllegalStateException ex) { thrown = true; } @@ -194,8 +190,7 @@ public class RecipeByteStrings extends RecipeTest { Source compacted = rawBytes.map(bs -> bs.compact()); //#compacting-bytestrings - FiniteDuration timeout = FiniteDuration.create(3, TimeUnit.SECONDS); - List got = Await.result(compacted.grouped(10).runWith(Sink.head(), mat), timeout); + List got = compacted.grouped(10).runWith(Sink.head(), mat).toCompletableFuture().get(3, TimeUnit.SECONDS); for (ByteString byteString : got) { assertTrue(byteString.isCompact()); diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeDigest.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeDigest.java index 249e18b239..2ed44cc1a2 100644 --- a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeDigest.java +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeDigest.java @@ -18,8 +18,6 @@ import akka.util.ByteString; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; -import scala.concurrent.Await; -import scala.concurrent.duration.Duration; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; @@ -89,7 +87,7 @@ public class RecipeDigest extends RecipeTest { .transform(() -> digestCalculator("SHA-256")); //#calculating-digest2 - ByteString got = Await.result(digest.runWith(Sink.head(), mat), Duration.create(3, TimeUnit.SECONDS)); + ByteString got = digest.runWith(Sink.head(), mat).toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(ByteString.fromInts( 0x24, 0x8d, 0x6a, 0x61, 0xd2, 0x06, 0x38, 0xb8, diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeDroppyBroadcast.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeDroppyBroadcast.java index 58d0a81346..cc7be4ef3b 100644 --- a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeDroppyBroadcast.java +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeDroppyBroadcast.java @@ -16,6 +16,7 @@ import scala.concurrent.Future; import java.util.ArrayList; import java.util.List; +import java.util.concurrent.CompletionStage; public class RecipeDroppyBroadcast extends RecipeTest { static ActorSystem system; @@ -38,7 +39,7 @@ public class RecipeDroppyBroadcast extends RecipeTest { new JavaTestKit(system) { //#droppy-bcast // Makes a sink drop elements if too slow - public Sink> droppySink(Sink> sink, int size) { + public Sink> droppySink(Sink> sink, int size) { return Flow. create() .buffer(size, OverflowStrategy.dropHead()) .toMat(sink, Keep.right()); @@ -51,9 +52,9 @@ public class RecipeDroppyBroadcast extends RecipeTest { nums.add(i + 1); } - final Sink> mySink1 = Sink.ignore(); - final Sink> mySink2 = Sink.ignore(); - final Sink> mySink3 = Sink.ignore(); + final Sink> mySink1 = Sink.ignore(); + final Sink> mySink2 = Sink.ignore(); + final Sink> mySink3 = Sink.ignore(); final Source myData = Source.from(nums); diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeFlattenList.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeFlattenList.java index db4305a023..9ceb588be0 100644 --- a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeFlattenList.java +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeFlattenList.java @@ -13,8 +13,6 @@ import akka.testkit.JavaTestKit; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; -import scala.concurrent.Await; -import scala.concurrent.duration.FiniteDuration; import java.util.Arrays; import java.util.List; @@ -50,8 +48,7 @@ public class RecipeFlattenList extends RecipeTest { Source flattened = myData.mapConcat(i -> i); //#flattening-lists - List got = Await.result(flattened.grouped(10).runWith(Sink.head(), mat), - new FiniteDuration(1, TimeUnit.SECONDS)); + List got = flattened.grouped(10).runWith(Sink.head(), mat).toCompletableFuture().get(1, TimeUnit.SECONDS); assertEquals(got.get(0), new Message("1")); assertEquals(got.get(1), new Message("2")); assertEquals(got.get(2), new Message("3")); diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeGlobalRateLimit.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeGlobalRateLimit.java index 3742ed5c8e..ef0673bdd5 100644 --- a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeGlobalRateLimit.java +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeGlobalRateLimit.java @@ -7,7 +7,7 @@ import akka.NotUsed; import akka.actor.*; import akka.dispatch.Mapper; import akka.japi.pf.ReceiveBuilder; -import akka.pattern.Patterns; +import akka.pattern.PatternsCS; import akka.stream.*; import akka.stream.javadsl.*; import akka.stream.testkit.TestSubscriber; @@ -25,6 +25,7 @@ import scala.runtime.BoxedUnit; import java.util.*; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; import static junit.framework.TestCase.assertTrue; @@ -150,14 +151,9 @@ public class RecipeGlobalRateLimit extends RecipeTest { return f.mapAsync(parallelism, element -> { final Timeout triggerTimeout = new Timeout(maxAllowedWait); - final Future limiterTriggerFuture = - Patterns.ask(limiter, Limiter.WANT_TO_PASS, triggerTimeout); - return limiterTriggerFuture.map(new Mapper() { - @Override - public T apply(Object parameter) { - return element; - } - }, system.dispatcher()); + final CompletionStage limiterTriggerFuture = + PatternsCS.ask(limiter, Limiter.WANT_TO_PASS, triggerTimeout); + return limiterTriggerFuture.thenApplyAsync(response -> element, system.dispatcher()); }); } //#global-limiter-flow diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeMultiGroupByTest.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeMultiGroupByTest.java index a4eb450dc6..c87ce3887d 100644 --- a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeMultiGroupByTest.java +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeMultiGroupByTest.java @@ -23,6 +23,7 @@ import scala.concurrent.duration.FiniteDuration; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; import static java.util.stream.Collectors.toList; @@ -125,7 +126,7 @@ public class RecipeMultiGroupByTest extends RecipeTest { }); //#multi-groupby - Future> result = multiGroups + CompletionStage> result = multiGroups .grouped(10) .mergeSubstreams() .map(pair -> { @@ -135,7 +136,7 @@ public class RecipeMultiGroupByTest extends RecipeTest { .grouped(10) .runWith(Sink.head(), mat); - List got = Await.result(result, FiniteDuration.create(3, TimeUnit.SECONDS)); + List got = result.toCompletableFuture().get(3, TimeUnit.SECONDS); assertTrue(got.contains("1[1: a, 1: b, all: c, all: d, 1: e]")); assertTrue(got.contains("2[all: c, all: d]")); } diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeParseLines.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeParseLines.java index 77c09a3e4d..28d39dea23 100644 --- a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeParseLines.java +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeParseLines.java @@ -15,8 +15,6 @@ import akka.util.ByteString; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; -import scala.concurrent.Await; -import scala.concurrent.duration.FiniteDuration; import java.util.Arrays; import java.util.concurrent.TimeUnit; @@ -53,7 +51,7 @@ public class RecipeParseLines extends RecipeTest { .map(b -> b.utf8String()); //#parse-lines - Await.result(lines.grouped(10).runWith(Sink.head(), mat), new FiniteDuration(1, TimeUnit.SECONDS)); + lines.grouped(10).runWith(Sink.head(), mat).toCompletableFuture().get(1, TimeUnit.SECONDS); } } diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeReduceByKeyTest.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeReduceByKeyTest.java index 3770d4804b..a02b13562d 100644 --- a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeReduceByKeyTest.java +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeReduceByKeyTest.java @@ -25,6 +25,8 @@ import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; public class RecipeReduceByKeyTest extends RecipeTest { @@ -63,8 +65,8 @@ public class RecipeReduceByKeyTest extends RecipeTest { .mergeSubstreams(); //#word-count - final Future>> f = counts.grouped(10).runWith(Sink.head(), mat); - final Set> result = Await.result(f, getRemainingTime()).stream().collect(Collectors.toSet()); + final CompletionStage>> f = counts.grouped(10).runWith(Sink.head(), mat); + final Set> result = f.toCompletableFuture().get(3, TimeUnit.SECONDS).stream().collect(Collectors.toSet()); final Set> expected = new HashSet<>(); expected.add(new Pair<>("hello", 2)); expected.add(new Pair<>("world", 1)); @@ -106,8 +108,8 @@ public class RecipeReduceByKeyTest extends RecipeTest { (left, right) -> left + right)); //#reduce-by-key-general2 - final Future>> f = counts.grouped(10).runWith(Sink.head(), mat); - final Set> result = Await.result(f, getRemainingTime()).stream().collect(Collectors.toSet()); + final CompletionStage>> f = counts.grouped(10).runWith(Sink.head(), mat); + final Set> result = f.toCompletableFuture().get(3, TimeUnit.SECONDS).stream().collect(Collectors.toSet()); final Set> expected = new HashSet<>(); expected.add(new Pair<>("hello", 2)); expected.add(new Pair<>("world", 1)); diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeToStrict.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeToStrict.java index 81e31a7d9c..bb5ea60ca9 100644 --- a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeToStrict.java +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeToStrict.java @@ -19,6 +19,7 @@ import scala.concurrent.duration.FiniteDuration; import java.util.Arrays; import java.util.List; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; public class RecipeToStrict extends RecipeTest { @@ -45,11 +46,11 @@ public class RecipeToStrict extends RecipeTest { final int MAX_ALLOWED_SIZE = 100; //#draining-to-list - final Future> strings = myData + final CompletionStage> strings = myData .grouped(MAX_ALLOWED_SIZE).runWith(Sink.head(), mat); //#draining-to-list - Await.result(strings, new FiniteDuration(1, TimeUnit.SECONDS)); + strings.toCompletableFuture().get(3, TimeUnit.SECONDS); } }; } diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeWorkerPool.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeWorkerPool.java index 70b7421115..518c0a6d88 100644 --- a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeWorkerPool.java +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeWorkerPool.java @@ -17,6 +17,7 @@ import scala.concurrent.duration.FiniteDuration; import java.util.Arrays; import java.util.List; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertTrue; @@ -72,9 +73,8 @@ public class RecipeWorkerPool extends RecipeTest { Source processedJobs = data.via(balancer); //#worker-pool2 - FiniteDuration timeout = FiniteDuration.create(200, TimeUnit.MILLISECONDS); - Future> future = processedJobs.map(m -> m.msg).grouped(10).runWith(Sink.head(), mat); - List got = Await.result(future, timeout); + CompletionStage> future = processedJobs.map(m -> m.msg).grouped(10).runWith(Sink.head(), mat); + List got = future.toCompletableFuture().get(1, TimeUnit.SECONDS); assertTrue(got.contains("1 done")); assertTrue(got.contains("2 done")); assertTrue(got.contains("3 done")); diff --git a/akka-docs/rst/java/http/client-side/host-level.rst b/akka-docs/rst/java/http/client-side/host-level.rst index 51fe6d3e12..889e52d553 100644 --- a/akka-docs/rst/java/http/client-side/host-level.rst +++ b/akka-docs/rst/java/http/client-side/host-level.rst @@ -137,11 +137,11 @@ re-materialized the respective pool is automatically and transparently restarted In addition to the automatic shutdown via the configured idle timeouts it's also possible to trigger the immediate shutdown of a specific pool by calling ``shutdown()`` on the :class:`HostConnectionPool` instance that the pool client -flow materializes into. This ``shutdown()`` call produces a ``Future[Unit]`` which is fulfilled when the pool +flow materializes into. This ``shutdown()`` call produces a ``CompletionStage`` which is fulfilled when the pool termination has been completed. It's also possible to trigger the immediate termination of *all* connection pools in the ``ActorSystem`` at the same -time by calling ``Http.get(system).shutdownAllConnectionPools()``. This call too produces a ``Future[Unit]`` which is fulfilled when +time by calling ``Http.get(system).shutdownAllConnectionPools()``. This call too produces a ``CompletionStage`` which is fulfilled when all pools have terminated. diff --git a/akka-docs/rst/java/http/client-side/request-level.rst b/akka-docs/rst/java/http/client-side/request-level.rst index c731e13ed4..5af297b02d 100644 --- a/akka-docs/rst/java/http/client-side/request-level.rst +++ b/akka-docs/rst/java/http/client-side/request-level.rst @@ -38,7 +38,7 @@ Sometimes your HTTP client needs are very basic. You simply need the HTTP respon want to bother with setting up a full-blown streaming infrastructure. For these cases Akka HTTP offers the ``Http().singleRequest(...)`` method, which simply turns an ``HttpRequest`` instance -into ``Future``. Internally the request is dispatched across the (cached) host connection pool for the +into ``CompletionStage``. Internally the request is dispatched across the (cached) host connection pool for the request's effective URI. Just like in the case of the super-pool flow described above the request must have either an absolute URI or a valid @@ -48,7 +48,7 @@ Just like in the case of the super-pool flow described above the request must ha Using the Future-Based API in Actors ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -When using the ``Future`` based API from inside an ``Actor``, all the usual caveats apply to how one should deal +When using the ``CompletionStage`` based API from inside an ``Actor``, all the usual caveats apply to how one should deal with the futures completion. For example you should not access the Actors state from within the Future's callbacks (such as ``map``, ``onComplete``, ...) and instead you should use the ``pipe`` pattern to pipe the result back to the Actor as a message: diff --git a/akka-docs/rst/java/http/http-model.rst b/akka-docs/rst/java/http/http-model.rst index 7d469146df..595539a7bb 100644 --- a/akka-docs/rst/java/http/http-model.rst +++ b/akka-docs/rst/java/http/http-model.rst @@ -129,7 +129,7 @@ Entity types ``HttpEntityStrict``, ``HttpEntityDefault``, and ``HttpEntityChunke which allows to use them for requests and responses. In contrast, ``HttpEntityCloseDelimited`` can only be used for responses. Streaming entity types (i.e. all but ``HttpEntityStrict``) cannot be shared or serialized. To create a strict, sharable copy of an -entity or message use ``HttpEntity.toStrict`` or ``HttpMessage.toStrict`` which returns a ``Future`` of the object with +entity or message use ``HttpEntity.toStrict`` or ``HttpMessage.toStrict`` which returns a ``CompletionStage`` of the object with the body data collected into a ``ByteString``. The class ``HttpEntities`` contains static methods to create entities from common types easily. diff --git a/akka-docs/rst/java/http/routing-dsl/handlers.rst b/akka-docs/rst/java/http/routing-dsl/handlers.rst index 189b289023..6201f5951e 100644 --- a/akka-docs/rst/java/http/routing-dsl/handlers.rst +++ b/akka-docs/rst/java/http/routing-dsl/handlers.rst @@ -104,8 +104,8 @@ Deferring Result Creation Sometimes a handler cannot directly complete the request but needs to do some processing asynchronously. In this case the completion of a request needs to be deferred until the result has been generated. This is supported by the routing DSL in two ways: either you can use one of the ``handleWithAsyncN`` methods passing an ``AsyncHandlerN`` which -returns a ``Future``, i.e. an eventual ``RouteResult``, or you can also use a regular handler as shown -above and use ``RequestContext.completeWith`` for completion which takes an ``Future`` as an argument. +returns a ``CompletionStage``, i.e. an eventual ``RouteResult``, or you can also use a regular handler as shown +above and use ``RequestContext.completeWith`` for completion which takes an ``CompletionStage`` as an argument. This is demonstrated in the following example. Consider a asynchronous service defined like this (making use of Java 8 lambdas): @@ -117,16 +117,17 @@ Here the calculator runs the actual calculation in the background and only event service should provide a front-end to that service without having to block while waiting for the results. As explained above this can be done in two ways. -First, you can use ``handleWithAsyncN`` to be able to return a ``Future``: +First, you can use ``handleWithAsyncN`` to be able to return a ``CompletionStage``: .. includecode:: /../../akka-http-tests/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java :include: async-handler-1 -The handler invokes the service and then maps the calculation result to a ``RouteResult`` using ``Future.map`` and -returns the resulting ``Future``. +The handler invokes the service and then maps the calculation result to a ``RouteResult`` using ``CompletionStage.thenApplyAsync`` and +returns the resulting ``CompletionStage``. Note that you should always explicitly provide an executor that designates +where the future transformation task is executed, using the JDK’s global ForkJoinPool is not recommended. Otherwise, you can also still use ``handleWithN`` and use ``RequestContext.completeWith`` to "convert" a -``Future`` into a ``RouteResult`` as shown here: +``CompletionStage`` into a ``RouteResult`` as shown here: .. includecode:: /../../akka-http-tests/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java :include: async-handler-2 diff --git a/akka-docs/rst/java/http/routing-dsl/overview.rst b/akka-docs/rst/java/http/routing-dsl/overview.rst index 663f97d6ad..f02af76681 100644 --- a/akka-docs/rst/java/http/routing-dsl/overview.rst +++ b/akka-docs/rst/java/http/routing-dsl/overview.rst @@ -88,7 +88,7 @@ Bind failures ^^^^^^^^^^^^^ For example the server might be unable to bind to the given port. For example when the port is already taken by another application, or if the port is privileged (i.e. only usable by ``root``). -In this case the "binding future" will fail immediatly, and we can react to if by listening on the Future's completion: +In this case the "binding future" will fail immediatly, and we can react to if by listening on the CompletionStage's completion: .. includecode:: ../../code/docs/http/javadsl/server/HighLevelServerBindFailureExample.java :include: binding-failure-high-level-example diff --git a/akka-docs/rst/java/http/routing-dsl/request-vals/http-basic-authenticator.rst b/akka-docs/rst/java/http/routing-dsl/request-vals/http-basic-authenticator.rst index 7ec157fb9e..bd817be74b 100644 --- a/akka-docs/rst/java/http/routing-dsl/request-vals/http-basic-authenticator.rst +++ b/akka-docs/rst/java/http/routing-dsl/request-vals/http-basic-authenticator.rst @@ -12,7 +12,7 @@ Http basic auth allows for protection of one or more routes with a username and To use it you subclass ``HttpBasicAuthenticator`` and provide your authentication logic. There are two factory methods to create the authentication results to return from the authentication logic: ``authenticateAs(T)`` and ``refuseAccess()``. If the authentication is not very quick in memory, for example -calls a database, make sure you do not block the web server thread by executing that in a separate ``Future`` +calls a database, make sure you do not block the web server thread by executing that in a separate ``CompletionStage`` and then ``flatMap`` the result into the authentication result. When you use the authenticator in your routes you must reference the concrete authenticator twice, diff --git a/akka-docs/rst/java/http/routing-dsl/request-vals/oauth2-authenticator.rst b/akka-docs/rst/java/http/routing-dsl/request-vals/oauth2-authenticator.rst index 63adca42d3..6d7f071ee8 100644 --- a/akka-docs/rst/java/http/routing-dsl/request-vals/oauth2-authenticator.rst +++ b/akka-docs/rst/java/http/routing-dsl/request-vals/oauth2-authenticator.rst @@ -18,7 +18,7 @@ the request can either be refused by returning the return value of ``refuseAcces with an object that is application specific by returning the return value of ``authenticateAs(T)``. If the authentication is not very quick in memory, for example calls a separate authentication server -to verify the token, make sure you do not block the web server thread by executing that in a separate ``Future`` +to verify the token, make sure you do not block the web server thread by executing that in a separate ``CompletionStage`` and then ``flatMap`` the result into the authentication result. .. note:: OAuth2 Bearer Token sends the token as clear text and should ONLY EVER be used over diff --git a/akka-docs/rst/java/http/routing-dsl/routes.rst b/akka-docs/rst/java/http/routing-dsl/routes.rst index 84dcbc86f6..46b536e1c3 100644 --- a/akka-docs/rst/java/http/routing-dsl/routes.rst +++ b/akka-docs/rst/java/http/routing-dsl/routes.rst @@ -26,7 +26,7 @@ RouteResult The ``RouteResult`` is an opaque structure that represents possible results of evaluating a route. A ``RouteResult`` can only be created by using one of the methods of the ``RequestContext``. A result can either be a response, if -it was generated by one of the ``completeX`` methods, it can be an eventual result, i.e. a ``Future`` for ``handleWith``, - a function ``Function`` for ``handleWithSyncHandler``, -- a function ``Function>`` for ``handleWithAsyncHandler``. +- a function ``Function>`` for ``handleWithAsyncHandler``. Here is a complete example: @@ -192,7 +192,7 @@ Bind failures The first type of failure is when the server is unable to bind to the given port. For example when the port is already taken by another application, or if the port is privileged (i.e. only usable by ``root``). -In this case the "binding future" will fail immediatly, and we can react to if by listening on the Future's completion: +In this case the "binding future" will fail immediatly, and we can react to if by listening on the CompletionStage’s completion: .. includecode:: ../../code/docs/http/javadsl/server/HttpServerExampleDocTest.java :include: binding-failure-handling diff --git a/akka-docs/rst/java/stream/migration-guide-2.0-2.4-java.rst b/akka-docs/rst/java/stream/migration-guide-2.0-2.4-java.rst index bbd2218004..b723028d99 100644 --- a/akka-docs/rst/java/stream/migration-guide-2.0-2.4-java.rst +++ b/akka-docs/rst/java/stream/migration-guide-2.0-2.4-java.rst @@ -7,6 +7,25 @@ Migration Guide 2.0.x to 2.4.x General notes ============= +Java DSL now uses Java 8 types: CompletionStage and Optional +------------------------------------------------------------ + +In order to provide a top-notch Java API we switched from Scala’s Future and Akka’s +``akka.japi.Option`` interim solutions to the JDK’s own types for deferred computation +and optional results. This has been done throughout Streams & HTTP, most notably changing most +materialized types, but also the signature of the ``mapAsync`` combinator and the +asynchronous route result combinators in the HTTP DSL. + +The ``akka.pattern`` package has been updated with a new set of implementations within +the ``PatternCS`` class that provide the ability to interact between Actors and Futures +(or streams) for ``CompletionStage``. + +Should you have the need to use Scala Futures with these new Java APIs please use +the ``scala-java8-compat`` library that comes as a dependency of Akka. For more +information see `the documentation``_. + +.. _`the documentation`:: https://github.com/scala/scala-java8-compat + akka.Done and akka.NotUsed replacing Unit and BoxedUnit ------------------------------------------------------- diff --git a/akka-docs/rst/java/stream/stream-composition.rst b/akka-docs/rst/java/stream/stream-composition.rst index 593946a24b..113dfc4858 100644 --- a/akka-docs/rst/java/stream/stream-composition.rst +++ b/akka-docs/rst/java/stream/stream-composition.rst @@ -253,7 +253,7 @@ type is of the nested module (indicated by the color *red* on the diagram): .. includecode:: ../code/docs/stream/CompositionDocTest.java#mat-combine-1 Next, we create a composite :class:`Flow` from two smaller components. Here, the second enclosed :class:`Flow` has a -materialized type of :class:`Future`, and we propagate this to the parent by using ``Keep.right()`` +materialized type of :class:`CompletionStage`, and we propagate this to the parent by using ``Keep.right()`` as the combiner function (indicated by the color *yellow* on the diagram): .. includecode:: ../code/docs/stream/CompositionDocTest.java#mat-combine-2 @@ -267,7 +267,7 @@ we use ``Keep.both()`` to get a :class:`Pair` of them as the materialized type o As the last example, we wire together ``nestedSource`` and ``nestedSink`` and we use a custom combiner function to create a yet another materialized type of the resulting :class:`RunnableGraph`. This combiner function just ignores -the :class:`Future` part, and wraps the other two values in a custom case class :class:`MyClass` +the :class:`CompletionStage` part, and wraps the other two values in a custom case class :class:`MyClass` (indicated by color *purple* on the diagram): .. includecode:: ../code/docs/stream/CompositionDocTest.java#mat-combine-4a diff --git a/akka-docs/rst/java/stream/stream-cookbook.rst b/akka-docs/rst/java/stream/stream-cookbook.rst index 18095f5575..4b32a314e3 100644 --- a/akka-docs/rst/java/stream/stream-cookbook.rst +++ b/akka-docs/rst/java/stream/stream-cookbook.rst @@ -60,7 +60,7 @@ In this recipe we will use the ``grouped`` stream operation that groups incoming size collections (it can be seen as the almost opposite version of the "Flattening a stream of sequences" recipe we showed before). By using a ``grouped(MAX_ALLOWED_SIZE)`` we create a stream of groups with maximum size of ``MaxAllowedSeqSize`` and then we take the first element of this stream by attaching a ``Sink.head()``. What we get is a -:class:`Future` containing a sequence with all the elements of the original up to ``MAX_ALLOWED_SIZE`` size (further +:class:`CompletionStage` containing a sequence with all the elements of the original up to ``MAX_ALLOWED_SIZE`` size (further elements are dropped). .. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeToStrict.java#draining-to-list diff --git a/akka-docs/rst/java/stream/stream-error.rst b/akka-docs/rst/java/stream/stream-error.rst index 1a195ec22d..c7dbde6d2d 100644 --- a/akka-docs/rst/java/stream/stream-error.rst +++ b/akka-docs/rst/java/stream/stream-error.rst @@ -67,7 +67,7 @@ Assume that we can lookup their email address using: .. includecode:: ../code/docs/stream/IntegrationDocTest.java#email-address-lookup2 -The ``Future`` is completed with ``Failure`` if the email is not found. +The ``CompletionStage`` is completed normally if the email is not found. Transforming the stream of authors to a stream of email addresses by using the ``lookupEmail`` service can be done with ``mapAsync`` and we use ``Supervision.getResumingDecider`` to drop @@ -76,4 +76,4 @@ unknown email addresses: .. includecode:: ../code/docs/stream/IntegrationDocTest.java#email-addresses-mapAsync-supervision If we would not use ``Resume`` the default stopping strategy would complete the stream -with failure on the first ``Future`` that was completed with ``Failure``. +with failure on the first ``CompletionStage`` that was completed exceptionally. diff --git a/akka-docs/rst/java/stream/stream-flows-and-basics.rst b/akka-docs/rst/java/stream/stream-flows-and-basics.rst index 4df9b64972..1243e517e8 100644 --- a/akka-docs/rst/java/stream/stream-flows-and-basics.rst +++ b/akka-docs/rst/java/stream/stream-flows-and-basics.rst @@ -80,7 +80,7 @@ one actor prepare the work, and then have it be materialized at some completely After running (materializing) the ``RunnableGraph`` we get a special container object, the ``MaterializedMap``. Both sources and sinks are able to put specific objects into this map. Whether they put something in or not is implementation -dependent. For example a ``FoldSink`` will make a ``Future`` available in this map which will represent the result +dependent. For example a ``FoldSink`` will make a ``CompletionStage`` available in this map which will represent the result of the folding process over the stream. In general, a stream can expose multiple materialized values, but it is quite common to be interested in only the value of the Source or the Sink in the stream. For this reason there is a convenience method called ``runWith()`` available for ``Sink``, ``Source`` or ``Flow`` requiring, respectively, @@ -105,7 +105,7 @@ of the given sink or source. Since a stream can be materialized multiple times, the ``MaterializedMap`` returned is different for each materialization. In the example below we create two running materialized instance of the stream that we described in the ``runnable`` -variable, and both materializations give us a different ``Future`` from the map even though we used the same ``sink`` +variable, and both materializations give us a different ``CompletionStage`` from the map even though we used the same ``sink`` to refer to the future: .. includecode:: ../code/docs/stream/FlowDocTest.java#stream-reuse diff --git a/akka-docs/rst/java/stream/stream-graphs.rst b/akka-docs/rst/java/stream/stream-graphs.rst index 97a3429021..aca91f52f2 100644 --- a/akka-docs/rst/java/stream/stream-graphs.rst +++ b/akka-docs/rst/java/stream/stream-graphs.rst @@ -222,7 +222,7 @@ times to acquire the necessary number of outlets. .. includecode:: ../code/docs/stream/FlowGraphDocTest.java#flow-graph-matvalue Be careful not to introduce a cycle where the materialized value actually contributes to the materialized value. -The following example demonstrates a case where the materialized ``Future`` of a fold is fed back to the fold itself. +The following example demonstrates a case where the materialized ``CompletionStage`` of a fold is fed back to the fold itself. .. includecode:: ../code/docs/stream/FlowGraphDocTest.java#flow-graph-matvalue-cycle diff --git a/akka-docs/rst/java/stream/stream-integrations.rst b/akka-docs/rst/java/stream/stream-integrations.rst index 035d6fd291..2615b9c406 100644 --- a/akka-docs/rst/java/stream/stream-integrations.rst +++ b/akka-docs/rst/java/stream/stream-integrations.rst @@ -169,7 +169,7 @@ Finally, sending the emails: .. includecode:: ../code/docs/stream/IntegrationDocTest.java#send-emails ``mapAsync`` is applying the given function that is calling out to the external service to -each of the elements as they pass through this processing step. The function returns a :class:`Future` +each of the elements as they pass through this processing step. The function returns a :class:`CompletionStage` and the value of that future will be emitted downstreams. The number of Futures that shall run in parallel is given as the first argument to ``mapAsync``. These Futures may complete in any order, but the elements that are emitted @@ -190,8 +190,8 @@ is not important and then we can use the more efficient ``mapAsyncUnordered``: .. includecode:: ../code/docs/stream/IntegrationDocTest.java#external-service-mapAsyncUnordered -In the above example the services conveniently returned a :class:`Future` of the result. -If that is not the case you need to wrap the call in a :class:`Future`. If the service call +In the above example the services conveniently returned a :class:`CompletionStage` of the result. +If that is not the case you need to wrap the call in a :class:`CompletionStage`. If the service call involves blocking you must also make sure that you run it on a dedicated execution context, to avoid starvation and disturbance of other tasks in the system. @@ -215,7 +215,7 @@ external service, you can use ``ask``: .. includecode:: ../code/docs/stream/IntegrationDocTest.java#save-tweets Note that if the ``ask`` is not completed within the given timeout the stream is completed with failure. -If that is not desired outcome you can use ``recover`` on the ``ask`` :class:`Future`. +If that is not desired outcome you can use ``recover`` on the ``ask`` :class:`CompletionStage`. Illustrating ordering and parallelism ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/akka-docs/rst/java/stream/stream-io.rst b/akka-docs/rst/java/stream/stream-io.rst index b0563c66a2..17f2bdc12a 100644 --- a/akka-docs/rst/java/stream/stream-io.rst +++ b/akka-docs/rst/java/stream/stream-io.rst @@ -16,7 +16,7 @@ Streaming TCP Accepting connections: Echo Server ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -In order to implement a simple EchoServer we ``bind`` to a given address, which returns a ``Source>``, +In order to implement a simple EchoServer we ``bind`` to a given address, which returns a ``Source>``, which will emit an :class:`IncomingConnection` element for each new connection that the Server should handle: .. includecode:: ../code/docs/stream/io/StreamTcpDocTest.java#echo-server-simple-bind diff --git a/akka-docs/rst/java/stream/stream-quickstart.rst b/akka-docs/rst/java/stream/stream-quickstart.rst index 2a9ad5a4ba..cacdc00a96 100644 --- a/akka-docs/rst/java/stream/stream-quickstart.rst +++ b/akka-docs/rst/java/stream/stream-quickstart.rst @@ -163,21 +163,21 @@ First, let's write such an element counter using ``Flow.of(Class)`` and ``Sink.f First we prepare a reusable ``Flow`` that will change each incoming tweet into an integer of value ``1``. We'll use this in order to combine those with a ``Sink.fold`` that will sum all ``Integer`` elements of the stream and make its result available as -a ``Future``. Next we connect the ``tweets`` stream to ``count`` with ``via``. Finally we connect the Flow to the previously +a ``CompletionStage``. Next we connect the ``tweets`` stream to ``count`` with ``via``. Finally we connect the Flow to the previously prepared Sink using ``toMat``. Remember those mysterious ``Mat`` type parameters on ``Source``, ``Flow`` and ``Sink``? They represent the type of values these processing parts return when materialized. When you chain these together, you can explicitly combine their materialized values: in our example we used the ``Keep.right`` predefined function, which tells the implementation to only care about the materialized type of the stage currently appended to the right. -The materialized type of ``sumSink`` is ``Future`` and because of using ``Keep.right``, the resulting :class:`RunnableGraph` -has also a type parameter of ``Future``. +The materialized type of ``sumSink`` is ``CompletionStage`` and because of using ``Keep.right``, the resulting :class:`RunnableGraph` +has also a type parameter of ``CompletionStage``. This step does *not* yet materialize the processing pipeline, it merely prepares the description of the Flow, which is now connected to a Sink, and therefore can -be ``run()``, as indicated by its type: ``RunnableGraph>``. Next we call ``run()`` which uses the :class:`ActorMaterializer` +be ``run()``, as indicated by its type: ``RunnableGraph>``. Next we call ``run()`` which uses the :class:`ActorMaterializer` to materialize and run the Flow. The value returned by calling ``run()`` on a ``RunnableGraph`` is of type ``T``. -In our case this type is ``Future`` which, when completed, will contain the total length of our tweets stream. +In our case this type is ``CompletionStage`` which, when completed, will contain the total length of our tweets stream. In case of the stream failing, this future would complete with a Failure. A :class:`RunnableGraph` may be reused diff --git a/akka-docs/rst/java/stream/stream-testkit.rst b/akka-docs/rst/java/stream/stream-testkit.rst index 68c62ea747..6f863f0810 100644 --- a/akka-docs/rst/java/stream/stream-testkit.rst +++ b/akka-docs/rst/java/stream/stream-testkit.rst @@ -48,7 +48,7 @@ used for writing stream tests that use familiar :class:`TestProbe` from the :mod:`akka-testkit` API. One of the more straightforward tests would be to materialize stream to a -:class:`Future` and then use ``pipe`` pattern to pipe the result of that future +:class:`CompletionStage` and then use ``PatternsCS.pipe`` pattern to pipe the result of that future to the probe. .. includecode:: ../code/docs/stream/StreamTestKitDocTest.java#pipeto-testprobe diff --git a/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/BasicDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/BasicDirectivesExamplesSpec.scala index b56c8042bd..fed66dc34c 100644 --- a/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/BasicDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/BasicDirectivesExamplesSpec.scala @@ -105,9 +105,9 @@ class BasicDirectivesExamplesSpec extends RoutingSpec { def sample() = path("sample") { - extractExecutionContext { implicit ec => + extractExecutionContext { implicit executor => complete { - Future(s"Run on ${ec.##}!") // uses the `ec` ExecutionContext + Future(s"Run on ${executor.##}!") // uses the `executor` ExecutionContext } } } @@ -132,9 +132,9 @@ class BasicDirectivesExamplesSpec extends RoutingSpec { //#extractExecutionContext-0 def sample() = path("sample") { - extractExecutionContext { implicit ec => + extractExecutionContext { implicit executor => complete { - Future(s"Run on ${ec.##}!") // uses the `ec` ExecutionContext + Future(s"Run on ${executor.##}!") // uses the `executor` ExecutionContext } } } diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpEntity.java b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpEntity.java index a17d7c1c72..0f465cc243 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpEntity.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpEntity.java @@ -9,9 +9,9 @@ import akka.http.scaladsl.model.HttpEntity$; import akka.stream.Materializer; import akka.stream.javadsl.Source; import akka.util.ByteString; -import scala.concurrent.Future; import java.util.OptionalLong; +import java.util.concurrent.CompletionStage; /** * Represents the entity of an Http message. An entity consists of the content-type of the data @@ -133,7 +133,7 @@ public interface HttpEntity { * Use getDataBytes and stream processing instead if the expected data is big or * is likely to take a long time. */ - Future toStrict(long timeoutMillis, Materializer materializer); + CompletionStage toStrict(long timeoutMillis, Materializer materializer); /** * The entity type which consists of a predefined fixed ByteString of data. diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/Multipart.java b/akka-http-core/src/main/java/akka/http/javadsl/model/Multipart.java index 794e15b4c3..93f23aa10e 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/Multipart.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/Multipart.java @@ -6,8 +6,8 @@ package akka.http.javadsl.model; import java.util.Map; import java.util.Optional; +import java.util.concurrent.CompletionStage; -import scala.concurrent.Future; import akka.http.javadsl.model.headers.ContentDisposition; import akka.http.javadsl.model.headers.ContentDispositionType; import akka.http.javadsl.model.headers.RangeUnit; @@ -31,9 +31,9 @@ public interface Multipart { /** * Converts this content into its strict counterpart. * The given `timeout` denotes the max time that an individual part must be read in. - * The Future is failed with an TimeoutException if one part isn't read completely after the given timeout. + * The CompletionStage is failed with an TimeoutException if one part isn't read completely after the given timeout. */ - Future toStrict(long timeoutMillis, Materializer materializer); + CompletionStage toStrict(long timeoutMillis, Materializer materializer); /** * Creates an entity from this multipart object. @@ -59,7 +59,7 @@ public interface Multipart { Optional getDispositionType(); - Future toStrict(long timeoutMillis, Materializer materializer); + CompletionStage toStrict(long timeoutMillis, Materializer materializer); interface Strict extends Multipart.BodyPart { HttpEntity.Strict getEntity(); @@ -72,7 +72,7 @@ public interface Multipart { interface General extends Multipart { Source getParts(); - Future toStrict(long timeoutMillis, Materializer materializer); + CompletionStage toStrict(long timeoutMillis, Materializer materializer); interface Strict extends Multipart.General, Multipart.Strict { Source getParts(); @@ -81,7 +81,7 @@ public interface Multipart { } interface BodyPart extends Multipart.BodyPart { - Future toStrict(long timeoutMillis, Materializer materializer); + CompletionStage toStrict(long timeoutMillis, Materializer materializer); interface Strict extends Multipart.General.BodyPart, Multipart.BodyPart.Strict { } @@ -95,7 +95,7 @@ public interface Multipart { interface FormData extends Multipart { Source getParts(); - Future toStrict(long timeoutMillis, Materializer materializer); + CompletionStage toStrict(long timeoutMillis, Materializer materializer); interface Strict extends Multipart.FormData, Multipart.Strict { Source getParts(); @@ -109,7 +109,7 @@ public interface Multipart { Iterable getAdditionalHeaders(); Optional getFilename(); - Future toStrict(long timeoutMillis, Materializer materializer); + CompletionStage toStrict(long timeoutMillis, Materializer materializer); interface Strict extends Multipart.FormData.BodyPart, Multipart.BodyPart.Strict { } @@ -123,7 +123,7 @@ public interface Multipart { interface ByteRanges extends Multipart { Source getParts(); - Future toStrict(long timeoutMillis, Materializer materializer); + CompletionStage toStrict(long timeoutMillis, Materializer materializer); interface Strict extends Multipart.ByteRanges, Multipart.Strict { Source getParts(); @@ -137,7 +137,7 @@ public interface Multipart { Iterable getAdditionalHeaders(); akka.http.javadsl.model.headers.ContentRange getContentRangeHeader(); - Future toStrict(long timeoutMillis, Materializer materializer); + CompletionStage toStrict(long timeoutMillis, Materializer materializer); interface Strict extends Multipart.ByteRanges.BodyPart, Multipart.BodyPart.Strict { } diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpServerBluePrint.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpServerBluePrint.scala index 90b2dc2a2c..f35906f30f 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpServerBluePrint.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpServerBluePrint.scala @@ -274,7 +274,7 @@ private[http] object HttpServerBluePrint { } } private def schedule(delay: FiniteDuration, handler: HttpRequest ⇒ HttpResponse): Cancellable = - materializer.scheduleOnce(delay, new Runnable { def run() = trigger.invoke(self, handler(request)) }) + materializer.scheduleOnce(delay, new Runnable { def run() = trigger.invoke((self, handler(request))) }) import akka.http.impl.util.JavaMapping.Implicits._ /** JAVA API **/ diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/Http.scala b/akka-http-core/src/main/scala/akka/http/javadsl/Http.scala index 183777255c..ae19a8fc9e 100644 --- a/akka-http-core/src/main/scala/akka/http/javadsl/Http.scala +++ b/akka-http-core/src/main/scala/akka/http/javadsl/Http.scala @@ -11,7 +11,6 @@ import akka.http.impl.util.JavaMapping.HttpsConnectionContext import akka.http.javadsl.model.ws._ import akka.{ stream, NotUsed } import akka.stream.io.{ SslTlsInbound, SslTlsOutbound } - import scala.language.implicitConversions import scala.concurrent.Future import scala.util.Try @@ -21,13 +20,13 @@ import akka.actor.{ ExtendedActorSystem, ActorSystem, ExtensionIdProvider, Exten import akka.event.LoggingAdapter import akka.stream.Materializer import akka.stream.javadsl.{ BidiFlow, Flow, Source } - import akka.http.impl.util.JavaMapping.Implicits._ import akka.http.scaladsl.{ model ⇒ sm } import akka.http.javadsl.model._ import akka.http._ - import scala.compat.java8.OptionConverters._ +import scala.compat.java8.FutureConverters._ +import java.util.concurrent.CompletionStage object Http extends ExtensionId[Http] with ExtensionIdProvider { override def get(system: ActorSystem): Http = super.get(system) @@ -38,6 +37,10 @@ object Http extends ExtensionId[Http] with ExtensionIdProvider { class Http(system: ExtendedActorSystem) extends akka.actor.Extension { import akka.dispatch.ExecutionContexts.{ sameThreadExecutionContext ⇒ ec } + import language.implicitConversions + private implicit def completionStageCovariant[T, U >: T](in: CompletionStage[T]): CompletionStage[U] = in.asInstanceOf[CompletionStage[U]] + private implicit def javaModelIsScalaModel[J <: AnyRef, S <: J](in: Future[J])(implicit ev: JavaMapping.Inherited[J, S]): Future[S] = in.asInstanceOf[Future[S]] + private lazy val delegate = akka.http.scaladsl.Http(system) /** @@ -86,10 +89,10 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * fail, unless the first materialization has already been unbound. Unbinding can be triggered via the materialized * [[ServerBinding]]. */ - def bind(interface: String, port: Int, materializer: Materializer): Source[IncomingConnection, Future[ServerBinding]] = + def bind(interface: String, port: Int, materializer: Materializer): Source[IncomingConnection, CompletionStage[ServerBinding]] = new Source(delegate.bind(interface, port)(materializer) .map(new IncomingConnection(_)) - .mapMaterializedValue(_.map(new ServerBinding(_))(ec))) + .mapMaterializedValue(_.map(new ServerBinding(_))(ec).toJava)) /** * Creates a [[Source]] of [[IncomingConnection]] instances which represents a prospective HTTP server binding @@ -106,10 +109,10 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { def bind(interface: String, port: Int, connectionContext: ConnectionContext, settings: ServerSettings, - materializer: Materializer): Source[IncomingConnection, Future[ServerBinding]] = + materializer: Materializer): Source[IncomingConnection, CompletionStage[ServerBinding]] = new Source(delegate.bind(interface, port, settings = settings, connectionContext = ConnectionContext.noEncryption().asScala)(materializer) .map(new IncomingConnection(_)) - .mapMaterializedValue(_.map(new ServerBinding(_))(ec))) + .mapMaterializedValue(_.map(new ServerBinding(_))(ec).toJava)) /** * Creates a [[Source]] of [[IncomingConnection]] instances which represents a prospective HTTP server binding @@ -125,10 +128,10 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { */ def bind(interface: String, port: Int, connectionContext: ConnectionContext, - materializer: Materializer): Source[IncomingConnection, Future[ServerBinding]] = + materializer: Materializer): Source[IncomingConnection, CompletionStage[ServerBinding]] = new Source(delegate.bind(interface, port, connectionContext = connectionContext.asScala)(materializer) .map(new IncomingConnection(_)) - .mapMaterializedValue(_.map(new ServerBinding(_))(ec))) + .mapMaterializedValue(_.map(new ServerBinding(_))(ec).toJava)) /** * Creates a [[Source]] of [[IncomingConnection]] instances which represents a prospective HTTP server binding @@ -146,10 +149,10 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { connectionContext: ConnectionContext, settings: ServerSettings, log: LoggingAdapter, - materializer: Materializer): Source[IncomingConnection, Future[ServerBinding]] = + materializer: Materializer): Source[IncomingConnection, CompletionStage[ServerBinding]] = new Source(delegate.bind(interface, port, ConnectionContext.noEncryption().asScala, settings, log)(materializer) .map(new IncomingConnection(_)) - .mapMaterializedValue(_.map(new ServerBinding(_))(ec))) + .mapMaterializedValue(_.map(new ServerBinding(_))(ec).toJava)) /** * Convenience method which starts a new HTTP server at the given endpoint and uses the given `handler` @@ -160,10 +163,10 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { */ def bindAndHandle(handler: Flow[HttpRequest, HttpResponse, _], interface: String, port: Int, - materializer: Materializer): Future[ServerBinding] = + materializer: Materializer): CompletionStage[ServerBinding] = delegate.bindAndHandle(handler.asInstanceOf[Flow[sm.HttpRequest, sm.HttpResponse, _]].asScala, interface, port)(materializer) - .map(new ServerBinding(_))(ec) + .map(new ServerBinding(_))(ec).toJava /** * Convenience method which starts a new HTTP server at the given endpoint and uses the given `handler` @@ -175,10 +178,10 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { def bindAndHandle(handler: Flow[HttpRequest, HttpResponse, _], interface: String, port: Int, connectionContext: ConnectionContext, - materializer: Materializer): Future[ServerBinding] = + materializer: Materializer): CompletionStage[ServerBinding] = delegate.bindAndHandle(handler.asInstanceOf[Flow[sm.HttpRequest, sm.HttpResponse, _]].asScala, interface, port, connectionContext.asScala)(materializer) - .map(new ServerBinding(_))(ec) + .map(new ServerBinding(_))(ec).toJava /** * Convenience method which starts a new HTTP server at the given endpoint and uses the given `handler` @@ -192,10 +195,10 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { settings: ServerSettings, connectionContext: ConnectionContext, log: LoggingAdapter, - materializer: Materializer): Future[ServerBinding] = + materializer: Materializer): CompletionStage[ServerBinding] = delegate.bindAndHandle(handler.asInstanceOf[Flow[sm.HttpRequest, sm.HttpResponse, _]].asScala, interface, port, connectionContext.asScala, settings, log)(materializer) - .map(new ServerBinding(_))(ec) + .map(new ServerBinding(_))(ec).toJava /** * Convenience method which starts a new HTTP server at the given endpoint and uses the given `handler` @@ -206,9 +209,9 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { */ def bindAndHandleSync(handler: Function[HttpRequest, HttpResponse], interface: String, port: Int, - materializer: Materializer): Future[ServerBinding] = + materializer: Materializer): CompletionStage[ServerBinding] = delegate.bindAndHandleSync(handler.apply(_).asScala, interface, port)(materializer) - .map(new ServerBinding(_))(ec) + .map(new ServerBinding(_))(ec).toJava /** * Convenience method which starts a new HTTP server at the given endpoint and uses the given `handler` @@ -220,9 +223,9 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { def bindAndHandleSync(handler: Function[HttpRequest, HttpResponse], interface: String, port: Int, connectionContext: ConnectionContext, - materializer: Materializer): Future[ServerBinding] = + materializer: Materializer): CompletionStage[ServerBinding] = delegate.bindAndHandleSync(handler.apply(_).asScala, interface, port, connectionContext.asScala)(materializer) - .map(new ServerBinding(_))(ec) + .map(new ServerBinding(_))(ec).toJava /** * Convenience method which starts a new HTTP server at the given endpoint and uses the given `handler` @@ -236,10 +239,10 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { settings: ServerSettings, connectionContext: ConnectionContext, log: LoggingAdapter, - materializer: Materializer): Future[ServerBinding] = + materializer: Materializer): CompletionStage[ServerBinding] = delegate.bindAndHandleSync(handler.apply(_).asScala, interface, port, connectionContext.asScala, settings, log)(materializer) - .map(new ServerBinding(_))(ec) + .map(new ServerBinding(_))(ec).toJava /** * Convenience method which starts a new HTTP server at the given endpoint and uses the given `handler` @@ -248,11 +251,11 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * The number of concurrently accepted connections can be configured by overriding * the `akka.http.server.max-connections` setting. */ - def bindAndHandleAsync(handler: Function[HttpRequest, Future[HttpResponse]], + def bindAndHandleAsync(handler: Function[HttpRequest, CompletionStage[HttpResponse]], interface: String, port: Int, - materializer: Materializer): Future[ServerBinding] = - delegate.bindAndHandleAsync(handler.apply(_).asInstanceOf[Future[sm.HttpResponse]], interface, port)(materializer) - .map(new ServerBinding(_))(ec) + materializer: Materializer): CompletionStage[ServerBinding] = + delegate.bindAndHandleAsync(handler.apply(_).toScala, interface, port)(materializer) + .map(new ServerBinding(_))(ec).toJava /** * Convenience method which starts a new HTTP server at the given endpoint and uses the given `handler` @@ -261,12 +264,12 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * The number of concurrently accepted connections can be configured by overriding * the `akka.http.server.max-connections` setting. */ - def bindAndHandleAsync(handler: Function[HttpRequest, Future[HttpResponse]], + def bindAndHandleAsync(handler: Function[HttpRequest, CompletionStage[HttpResponse]], interface: String, port: Int, connectionContext: ConnectionContext, - materializer: Materializer): Future[ServerBinding] = - delegate.bindAndHandleAsync(handler.apply(_).asInstanceOf[Future[sm.HttpResponse]], interface, port, connectionContext.asScala)(materializer) - .map(new ServerBinding(_))(ec) + materializer: Materializer): CompletionStage[ServerBinding] = + delegate.bindAndHandleAsync(handler.apply(_).toScala, interface, port, connectionContext.asScala)(materializer) + .map(new ServerBinding(_))(ec).toJava /** * Convenience method which starts a new HTTP server at the given endpoint and uses the given `handler` @@ -275,14 +278,14 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * The number of concurrently accepted connections can be configured by overriding * the `akka.http.server.max-connections` setting. */ - def bindAndHandleAsync(handler: Function[HttpRequest, Future[HttpResponse]], + def bindAndHandleAsync(handler: Function[HttpRequest, CompletionStage[HttpResponse]], interface: String, port: Int, settings: ServerSettings, connectionContext: ConnectionContext, parallelism: Int, log: LoggingAdapter, - materializer: Materializer): Future[ServerBinding] = - delegate.bindAndHandleAsync(handler.apply(_).asInstanceOf[Future[sm.HttpResponse]], + materializer: Materializer): CompletionStage[ServerBinding] = + delegate.bindAndHandleAsync(handler.apply(_).toScala, interface, port, connectionContext.asScala, settings, parallelism, log)(materializer) - .map(new ServerBinding(_))(ec) + .map(new ServerBinding(_))(ec).toJava /** * Constructs a client layer stage using the configured default [[ClientConnectionSettings]]. @@ -311,7 +314,7 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * * If the hostname is given with an `https://` prefix, the default [[HttpsConnectionContext]] will be used. */ - def outgoingConnection(host: String): Flow[HttpRequest, HttpResponse, Future[OutgoingConnection]] = + def outgoingConnection(host: String): Flow[HttpRequest, HttpResponse, CompletionStage[OutgoingConnection]] = outgoingConnection(ConnectHttp.toHost(host)) /** @@ -320,7 +323,7 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * * Use the [[ConnectHttp]] DSL to configure target host and whether HTTPS should be used. */ - def outgoingConnection(to: ConnectHttp): Flow[HttpRequest, HttpResponse, Future[OutgoingConnection]] = + def outgoingConnection(to: ConnectHttp): Flow[HttpRequest, HttpResponse, CompletionStage[OutgoingConnection]] = adaptOutgoingFlow { if (to.isHttps) delegate.outgoingConnectionHttps(to.host, to.port, to.effectiveConnectionContext(defaultClientHttpsContext).asScala) else delegate.outgoingConnection(to.host, to.port) @@ -334,7 +337,7 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { connectionContext: ConnectionContext, localAddress: Optional[InetSocketAddress], settings: ClientConnectionSettings, - log: LoggingAdapter): Flow[HttpRequest, HttpResponse, Future[OutgoingConnection]] = + log: LoggingAdapter): Flow[HttpRequest, HttpResponse, CompletionStage[OutgoingConnection]] = adaptOutgoingFlow { connectionContext match { case https: HttpsConnectionContext ⇒ delegate.outgoingConnectionHttps(host, port, https.asScala, localAddress.asScala, settings, log) @@ -504,8 +507,8 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * Note that the request must have either an absolute URI or a valid `Host` header, otherwise * the future will be completed with an error. */ - def singleRequest(request: HttpRequest, materializer: Materializer): Future[HttpResponse] = - delegate.singleRequest(request.asScala)(materializer) + def singleRequest(request: HttpRequest, materializer: Materializer): CompletionStage[HttpResponse] = + delegate.singleRequest(request.asScala)(materializer).toJava /** * Fires a single [[HttpRequest]] across the (cached) host connection pool for the request's @@ -516,8 +519,8 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * Note that the request must have either an absolute URI or a valid `Host` header, otherwise * the future will be completed with an error. */ - def singleRequest(request: HttpRequest, connectionContext: HttpsConnectionContext, materializer: Materializer): Future[HttpResponse] = - delegate.singleRequest(request.asScala, connectionContext.asScala)(materializer) + def singleRequest(request: HttpRequest, connectionContext: HttpsConnectionContext, materializer: Materializer): CompletionStage[HttpResponse] = + delegate.singleRequest(request.asScala, connectionContext.asScala)(materializer).toJava /** * Fires a single [[HttpRequest]] across the (cached) host connection pool for the request's @@ -531,15 +534,15 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { def singleRequest(request: HttpRequest, connectionContext: HttpsConnectionContext, settings: ConnectionPoolSettings, - log: LoggingAdapter, materializer: Materializer): Future[HttpResponse] = - delegate.singleRequest(request.asScala, connectionContext.asScala, settings, log)(materializer) + log: LoggingAdapter, materializer: Materializer): CompletionStage[HttpResponse] = + delegate.singleRequest(request.asScala, connectionContext.asScala, settings, log)(materializer).toJava /** * Constructs a WebSocket [[BidiFlow]]. * * The layer is not reusable and must only be materialized once. */ - def webSocketClientLayer(request: WebSocketRequest): BidiFlow[Message, SslTlsOutbound, SslTlsInbound, Message, Future[WebSocketUpgradeResponse]] = + def webSocketClientLayer(request: WebSocketRequest): BidiFlow[Message, SslTlsOutbound, SslTlsInbound, Message, CompletionStage[WebSocketUpgradeResponse]] = adaptWsBidiFlow(delegate.webSocketClientLayer(request.asScala)) /** @@ -549,7 +552,7 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * The layer is not reusable and must only be materialized once. */ def webSocketClientLayer(request: WebSocketRequest, - settings: ClientConnectionSettings): BidiFlow[Message, SslTlsOutbound, SslTlsInbound, Message, Future[WebSocketUpgradeResponse]] = + settings: ClientConnectionSettings): BidiFlow[Message, SslTlsOutbound, SslTlsInbound, Message, CompletionStage[WebSocketUpgradeResponse]] = adaptWsBidiFlow(delegate.webSocketClientLayer(request.asScala, settings)) /** @@ -560,7 +563,7 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { */ def webSocketClientLayer(request: WebSocketRequest, settings: ClientConnectionSettings, - log: LoggingAdapter): BidiFlow[Message, SslTlsOutbound, SslTlsInbound, Message, Future[WebSocketUpgradeResponse]] = + log: LoggingAdapter): BidiFlow[Message, SslTlsOutbound, SslTlsInbound, Message, CompletionStage[WebSocketUpgradeResponse]] = adaptWsBidiFlow(delegate.webSocketClientLayer(request.asScala, settings, log)) /** @@ -568,7 +571,7 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * * The layer is not reusable and must only be materialized once. */ - def webSocketClientFlow(request: WebSocketRequest): Flow[Message, Message, Future[WebSocketUpgradeResponse]] = + def webSocketClientFlow(request: WebSocketRequest): Flow[Message, Message, CompletionStage[WebSocketUpgradeResponse]] = adaptWsFlow { delegate.webSocketClientFlow(request.asScala) } @@ -582,7 +585,7 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { connectionContext: ConnectionContext, localAddress: Optional[InetSocketAddress], settings: ClientConnectionSettings, - log: LoggingAdapter): Flow[Message, Message, Future[WebSocketUpgradeResponse]] = + log: LoggingAdapter): Flow[Message, Message, CompletionStage[WebSocketUpgradeResponse]] = adaptWsFlow { delegate.webSocketClientFlow(request.asScala, connectionContext.asScala, localAddress.asScala, settings, log) } @@ -595,7 +598,7 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { */ def singleWebSocketRequest[T](request: WebSocketRequest, clientFlow: Flow[Message, Message, T], - materializer: Materializer): Pair[Future[WebSocketUpgradeResponse], T] = + materializer: Materializer): Pair[CompletionStage[WebSocketUpgradeResponse], T] = adaptWsResultTuple { delegate.singleWebSocketRequest( request.asScala, @@ -611,7 +614,7 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { def singleWebSocketRequest[T](request: WebSocketRequest, clientFlow: Flow[Message, Message, T], connectionContext: ConnectionContext, - materializer: Materializer): Pair[Future[WebSocketUpgradeResponse], T] = + materializer: Materializer): Pair[CompletionStage[WebSocketUpgradeResponse], T] = adaptWsResultTuple { delegate.singleWebSocketRequest( request.asScala, @@ -629,7 +632,7 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { localAddress: Optional[InetSocketAddress], settings: ClientConnectionSettings, log: LoggingAdapter, - materializer: Materializer): Pair[Future[WebSocketUpgradeResponse], T] = + materializer: Materializer): Pair[CompletionStage[WebSocketUpgradeResponse], T] = adaptWsResultTuple { delegate.singleWebSocketRequest( request.asScala, @@ -648,7 +651,7 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * If existing pool client flows are re-used or new ones materialized concurrently with or after this * method call the respective connection pools will be restarted and not contribute to the returned future. */ - def shutdownAllConnectionPools(): Future[Unit] = delegate.shutdownAllConnectionPools() + def shutdownAllConnectionPools(): CompletionStage[Unit] = delegate.shutdownAllConnectionPools().toJava /** * Gets the default @@ -673,11 +676,11 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { JavaMapping.toJava(scalaFlow)(JavaMapping.flowMapping[Pair[HttpRequest, T], (scaladsl.model.HttpRequest, T), Pair[Try[HttpResponse], T], (Try[scaladsl.model.HttpResponse], T), Mat]) } - private def adaptOutgoingFlow[T, Mat](scalaFlow: stream.scaladsl.Flow[scaladsl.model.HttpRequest, scaladsl.model.HttpResponse, Future[scaladsl.Http.OutgoingConnection]]): Flow[HttpRequest, HttpResponse, Future[OutgoingConnection]] = + private def adaptOutgoingFlow[T, Mat](scalaFlow: stream.scaladsl.Flow[scaladsl.model.HttpRequest, scaladsl.model.HttpResponse, Future[scaladsl.Http.OutgoingConnection]]): Flow[HttpRequest, HttpResponse, CompletionStage[OutgoingConnection]] = Flow.fromGraph { akka.stream.scaladsl.Flow[HttpRequest].map(_.asScala) .viaMat(scalaFlow)(Keep.right) - .mapMaterializedValue(_.map(new OutgoingConnection(_))(ec)) + .mapMaterializedValue(_.map(new OutgoingConnection(_))(ec).toJava) } private def adaptServerLayer(serverLayer: scaladsl.Http.ServerLayer): BidiFlow[HttpResponse, SslTlsOutbound, SslTlsInbound, HttpRequest, NotUsed] = @@ -690,12 +693,12 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { JavaMapping.adapterBidiFlow[HttpRequest, sm.HttpRequest, sm.HttpResponse, HttpResponse] .atop(clientLayer)) - private def adaptWsBidiFlow(wsLayer: scaladsl.Http.WebSocketClientLayer): BidiFlow[Message, SslTlsOutbound, SslTlsInbound, Message, Future[WebSocketUpgradeResponse]] = + private def adaptWsBidiFlow(wsLayer: scaladsl.Http.WebSocketClientLayer): BidiFlow[Message, SslTlsOutbound, SslTlsInbound, Message, CompletionStage[WebSocketUpgradeResponse]] = new BidiFlow( JavaMapping.adapterBidiFlow[Message, sm.ws.Message, sm.ws.Message, Message] .atopMat(wsLayer)((_, s) ⇒ adaptWsUpgradeResponse(s))) - private def adaptWsFlow(wsLayer: stream.scaladsl.Flow[sm.ws.Message, sm.ws.Message, Future[scaladsl.model.ws.WebSocketUpgradeResponse]]): Flow[Message, Message, Future[WebSocketUpgradeResponse]] = + private def adaptWsFlow(wsLayer: stream.scaladsl.Flow[sm.ws.Message, sm.ws.Message, Future[scaladsl.model.ws.WebSocketUpgradeResponse]]): Flow[Message, Message, CompletionStage[WebSocketUpgradeResponse]] = Flow.fromGraph(JavaMapping.adapterBidiFlow[Message, sm.ws.Message, sm.ws.Message, Message].joinMat(wsLayer)(Keep.right).mapMaterializedValue(adaptWsUpgradeResponse _)) private def adaptWsFlow[Mat](javaFlow: Flow[Message, Message, Mat]): stream.scaladsl.Flow[scaladsl.model.ws.Message, scaladsl.model.ws.Message, Mat] = @@ -704,10 +707,10 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { .viaMat(javaFlow.asScala)(Keep.right) .map(_.asScala) - private def adaptWsResultTuple[T](result: (Future[scaladsl.model.ws.WebSocketUpgradeResponse], T)): Pair[Future[WebSocketUpgradeResponse], T] = + private def adaptWsResultTuple[T](result: (Future[scaladsl.model.ws.WebSocketUpgradeResponse], T)): Pair[CompletionStage[WebSocketUpgradeResponse], T] = result match { case (fut, tMat) ⇒ Pair(adaptWsUpgradeResponse(fut), tMat) } - private def adaptWsUpgradeResponse(responseFuture: Future[scaladsl.model.ws.WebSocketUpgradeResponse]): Future[WebSocketUpgradeResponse] = - responseFuture.map(WebSocketUpgradeResponse.adapt)(system.dispatcher) + private def adaptWsUpgradeResponse(responseFuture: Future[scaladsl.model.ws.WebSocketUpgradeResponse]): CompletionStage[WebSocketUpgradeResponse] = + responseFuture.map(WebSocketUpgradeResponse.adapt)(system.dispatcher).toJava } diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/IncomingConnection.scala b/akka-http-core/src/main/scala/akka/http/javadsl/IncomingConnection.scala index 6c8ec601ac..e8ac70807c 100644 --- a/akka-http-core/src/main/scala/akka/http/javadsl/IncomingConnection.scala +++ b/akka-http-core/src/main/scala/akka/http/javadsl/IncomingConnection.scala @@ -6,12 +6,14 @@ package akka.http.javadsl import java.net.InetSocketAddress import akka.NotUsed -import akka.japi.function.Function; -import scala.concurrent.Future +import akka.japi.function.Function import akka.stream.Materializer import akka.stream.javadsl.Flow import akka.http.javadsl.model._ import akka.http.scaladsl.{ model ⇒ sm } +import java.util.concurrent.CompletionStage +import scala.concurrent.Future +import scala.compat.java8.FutureConverters._ /** * Represents one accepted incoming HTTP connection. @@ -50,12 +52,12 @@ class IncomingConnection private[http] (delegate: akka.http.scaladsl.Http.Incomi /** * Handles the connection with the given handler function. */ - def handleWithAsyncHandler(handler: Function[HttpRequest, Future[HttpResponse]], materializer: Materializer): Unit = - delegate.handleWithAsyncHandler(handler.apply(_).asInstanceOf[Future[sm.HttpResponse]])(materializer) + def handleWithAsyncHandler(handler: Function[HttpRequest, CompletionStage[HttpResponse]], materializer: Materializer): Unit = + delegate.handleWithAsyncHandler(handler.apply(_).toScala.asInstanceOf[Future[sm.HttpResponse]])(materializer) /** * Handles the connection with the given handler function. */ - def handleWithAsyncHandler(handler: Function[HttpRequest, Future[HttpResponse]], parallelism: Int, materializer: Materializer): Unit = - delegate.handleWithAsyncHandler(handler.apply(_).asInstanceOf[Future[sm.HttpResponse]], parallelism)(materializer) + def handleWithAsyncHandler(handler: Function[HttpRequest, CompletionStage[HttpResponse]], parallelism: Int, materializer: Materializer): Unit = + delegate.handleWithAsyncHandler(handler.apply(_).toScala.asInstanceOf[Future[sm.HttpResponse]], parallelism)(materializer) } diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/ServerBinding.scala b/akka-http-core/src/main/scala/akka/http/javadsl/ServerBinding.scala index 35f8496f6e..3bf13c748c 100644 --- a/akka-http-core/src/main/scala/akka/http/javadsl/ServerBinding.scala +++ b/akka-http-core/src/main/scala/akka/http/javadsl/ServerBinding.scala @@ -5,7 +5,8 @@ package akka.http.javadsl import java.net.InetSocketAddress -import scala.concurrent.Future +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters._ /** * Represents a prospective HTTP server binding. @@ -20,7 +21,7 @@ class ServerBinding private[http] (delegate: akka.http.scaladsl.Http.ServerBindi * Asynchronously triggers the unbinding of the port that was bound by the materialization of the `connections` * [[Source]] * - * The produced [[Future]] is fulfilled when the unbinding has been completed. + * The produced [[java.util.concurrent.CompletionStage]] is fulfilled when the unbinding has been completed. */ - def unbind(): Future[Unit] = delegate.unbind() + def unbind(): CompletionStage[Unit] = delegate.unbind().toJava } diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpEntity.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpEntity.scala index f8f466920a..4ab9d757b3 100755 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpEntity.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpEntity.scala @@ -25,11 +25,16 @@ import akka.http.impl.util.StreamUtils import akka.http.impl.util.JavaMapping.Implicits._ import scala.compat.java8.OptionConverters._ +import scala.compat.java8.FutureConverters._ +import java.util.concurrent.CompletionStage /** * Models the entity (aka "body" or "content) of an HTTP message. */ sealed trait HttpEntity extends jm.HttpEntity { + import language.implicitConversions + private implicit def completionStageCovariant[T, U >: T](in: CompletionStage[T]): CompletionStage[U] = in.asInstanceOf[CompletionStage[U]] + /** * Determines whether this entity is known to be empty. */ @@ -96,8 +101,8 @@ sealed trait HttpEntity extends jm.HttpEntity { override def isChunked: Boolean = false /** Java API */ - override def toStrict(timeoutMillis: Long, materializer: Materializer): Future[jm.HttpEntity.Strict] = - toStrict(timeoutMillis.millis)(materializer) + override def toStrict(timeoutMillis: Long, materializer: Materializer): CompletionStage[jm.HttpEntity.Strict] = + toStrict(timeoutMillis.millis)(materializer).toJava } /* An entity that can be used for body parts */ diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/Multipart.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/Multipart.scala index 0f7102c49d..8dfc3d6bb8 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/Multipart.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/Multipart.scala @@ -7,7 +7,6 @@ package akka.http.scaladsl.model import java.io.File import java.util.Optional import akka.http.impl.util.Util - import scala.collection.immutable.VectorBuilder import scala.concurrent.duration.FiniteDuration import scala.concurrent.Future @@ -24,8 +23,9 @@ import akka.http.scaladsl.model.headers._ import akka.http.impl.engine.rendering.BodyPartRenderer import akka.http.javadsl.{ model ⇒ jm } import FastFuture._ - import scala.compat.java8.OptionConverters._ +import scala.compat.java8.FutureConverters._ +import java.util.concurrent.CompletionStage /** * The model of multipart content for media-types `multipart/\*` (general multipart content), @@ -74,8 +74,8 @@ sealed trait Multipart extends jm.Multipart { JSource.fromGraph(parts.asInstanceOf[Source[Multipart.BodyPart, AnyRef]]) /** Java API */ - def toStrict(timeoutMillis: Long, materializer: Materializer): Future[_ <: jm.Multipart.Strict] = - toStrict(FiniteDuration(timeoutMillis, concurrent.duration.MILLISECONDS))(materializer) + def toStrict(timeoutMillis: Long, materializer: Materializer): CompletionStage[_ <: jm.Multipart.Strict] = + toStrict(FiniteDuration(timeoutMillis, concurrent.duration.MILLISECONDS))(materializer).toJava /** Java API */ def toEntity(charset: jm.HttpCharset, boundary: String): jm.RequestEntity = @@ -170,8 +170,8 @@ object Multipart { def getDispositionType: Optional[jm.headers.ContentDispositionType] = Util.convertOption(dispositionType) /** Java API */ - def toStrict(timeoutMillis: Long, materializer: Materializer): Future[_ <: jm.Multipart.BodyPart.Strict] = - toStrict(FiniteDuration(timeoutMillis, concurrent.duration.MILLISECONDS))(materializer) + def toStrict(timeoutMillis: Long, materializer: Materializer): CompletionStage[_ <: jm.Multipart.BodyPart.Strict] = + toStrict(FiniteDuration(timeoutMillis, concurrent.duration.MILLISECONDS))(materializer).toJava } object BodyPart { @@ -211,8 +211,8 @@ object Multipart { super.getParts.asInstanceOf[JSource[_ <: jm.Multipart.General.BodyPart, AnyRef]] /** Java API */ - override def toStrict(timeoutMillis: Long, materializer: Materializer): Future[jm.Multipart.General.Strict] = - super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.General.Strict]] + override def toStrict(timeoutMillis: Long, materializer: Materializer): CompletionStage[jm.Multipart.General.Strict] = + super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.General.Strict]].toJava } object General { def apply(mediaType: MediaType.Multipart, parts: BodyPart.Strict*): Strict = Strict(mediaType, parts.toVector) @@ -257,8 +257,8 @@ object Multipart { def toByteRangesBodyPart: Try[Multipart.ByteRanges.BodyPart] /** Java API */ - override def toStrict(timeoutMillis: Long, materializer: Materializer): Future[jm.Multipart.General.BodyPart.Strict] = - super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.General.BodyPart.Strict]] + override def toStrict(timeoutMillis: Long, materializer: Materializer): CompletionStage[jm.Multipart.General.BodyPart.Strict] = + super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.General.BodyPart.Strict]].toJava private[BodyPart] def tryCreateFormDataBodyPart[T](f: (String, Map[String, String], immutable.Seq[HttpHeader]) ⇒ T): Try[T] = { val params = dispositionParams @@ -322,8 +322,8 @@ object Multipart { super.getParts.asInstanceOf[JSource[_ <: jm.Multipart.FormData.BodyPart, AnyRef]] /** Java API */ - override def toStrict(timeoutMillis: Long, materializer: Materializer): Future[jm.Multipart.FormData.Strict] = - super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.FormData.Strict]] + override def toStrict(timeoutMillis: Long, materializer: Materializer): CompletionStage[jm.Multipart.FormData.Strict] = + super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.FormData.Strict]].toJava } object FormData { def apply(parts: Multipart.FormData.BodyPart.Strict*): Multipart.FormData.Strict = Strict(parts.toVector) @@ -415,8 +415,8 @@ object Multipart { def getFilename: Optional[String] = filename.asJava /** Java API */ - override def toStrict(timeoutMillis: Long, materializer: Materializer): Future[jm.Multipart.FormData.BodyPart.Strict] = - super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.FormData.BodyPart.Strict]] + override def toStrict(timeoutMillis: Long, materializer: Materializer): CompletionStage[jm.Multipart.FormData.BodyPart.Strict] = + super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.FormData.BodyPart.Strict]].toJava } object BodyPart { def apply(_name: String, _entity: BodyPartEntity, @@ -470,8 +470,8 @@ object Multipart { super.getParts.asInstanceOf[JSource[_ <: jm.Multipart.ByteRanges.BodyPart, AnyRef]] /** Java API */ - override def toStrict(timeoutMillis: Long, materializer: Materializer): Future[jm.Multipart.ByteRanges.Strict] = - super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.ByteRanges.Strict]] + override def toStrict(timeoutMillis: Long, materializer: Materializer): CompletionStage[jm.Multipart.ByteRanges.Strict] = + super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.ByteRanges.Strict]].toJava } object ByteRanges { def apply(parts: Multipart.ByteRanges.BodyPart.Strict*): Strict = Strict(parts.toVector) @@ -545,8 +545,8 @@ object Multipart { def getContentRangeHeader: jm.headers.ContentRange = contentRangeHeader /** Java API */ - override def toStrict(timeoutMillis: Long, materializer: Materializer): Future[jm.Multipart.ByteRanges.BodyPart.Strict] = - super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.ByteRanges.BodyPart.Strict]] + override def toStrict(timeoutMillis: Long, materializer: Materializer): CompletionStage[jm.Multipart.ByteRanges.BodyPart.Strict] = + super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.ByteRanges.BodyPart.Strict]].toJava } object BodyPart { def apply(_contentRange: ContentRange, _entity: BodyPartEntity, _rangeUnit: RangeUnit = RangeUnits.Bytes, diff --git a/akka-http-core/src/test/java/akka/http/javadsl/WSEchoTestClientApp.java b/akka-http-core/src/test/java/akka/http/javadsl/WSEchoTestClientApp.java index 57e3b8cb07..dac06d96a2 100644 --- a/akka-http-core/src/test/java/akka/http/javadsl/WSEchoTestClientApp.java +++ b/akka-http-core/src/test/java/akka/http/javadsl/WSEchoTestClientApp.java @@ -17,12 +17,13 @@ import akka.stream.javadsl.Flow; import akka.stream.javadsl.Keep; import akka.stream.javadsl.Sink; import akka.stream.javadsl.Source; -import scala.concurrent.Await; import scala.concurrent.Future; import scala.concurrent.duration.FiniteDuration; import java.util.Arrays; import java.util.List; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; public class WSEchoTestClientApp { private static final Function messageStringifier = new Function() { @@ -57,23 +58,23 @@ public class WSEchoTestClientApp { TextMessage.create("ghi") )).concat(Source.fromFuture(delayedCompletion).drop(1)); - Sink>> echoSink = + Sink>> echoSink = Flow.of(Message.class) .map(messageStringifier) .grouped(1000) - .toMat(Sink.>head(), Keep.>>right()); + .toMat(Sink.>head(), Keep.right()); - Flow>> echoClient = - Flow.fromSinkAndSourceMat(echoSink, echoSource, Keep.>, NotUsed>left()); + Flow>> echoClient = + Flow.fromSinkAndSourceMat(echoSink, echoSource, Keep.left()); - Future> result = + CompletionStage> result = Http.get(system).singleWebSocketRequest( WebSocketRequest.create("ws://echo.websocket.org"), echoClient, materializer ).second(); - List messages = Await.result(result, FiniteDuration.apply(10, "second")); + List messages = result.toCompletableFuture().get(10, TimeUnit.SECONDS); System.out.println("Collected " + messages.size() + " messages:"); for (String msg: messages) System.out.println(msg); diff --git a/akka-http-core/src/test/java/akka/http/javadsl/model/JavaTestServer.java b/akka-http-core/src/test/java/akka/http/javadsl/model/JavaTestServer.java index 330f9a7e55..a451057c6c 100644 --- a/akka-http-core/src/test/java/akka/http/javadsl/model/JavaTestServer.java +++ b/akka-http-core/src/test/java/akka/http/javadsl/model/JavaTestServer.java @@ -23,6 +23,7 @@ import scala.concurrent.duration.FiniteDuration; import java.io.BufferedReader; import java.io.InputStreamReader; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; public class JavaTestServer { @@ -32,7 +33,7 @@ public class JavaTestServer { try { final Materializer materializer = ActorMaterializer.create(system); - Future serverBindingFuture = + CompletionStage serverBindingFuture = Http.get(system).bindAndHandleSync( new Function() { public HttpResponse apply(HttpRequest request) throws Exception { @@ -47,7 +48,7 @@ public class JavaTestServer { } }, "localhost", 8080, materializer); - Await.result(serverBindingFuture, new FiniteDuration(1, TimeUnit.SECONDS)); // will throw if binding fails + serverBindingFuture.toCompletableFuture().get(1, TimeUnit.SECONDS); // will throw if binding fails System.out.println("Press ENTER to stop."); new BufferedReader(new InputStreamReader(System.in)).readLine(); } finally { diff --git a/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/RouteTest.scala b/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/RouteTest.scala index 94c99ad2e0..bdc507e975 100644 --- a/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/RouteTest.scala +++ b/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/RouteTest.scala @@ -5,7 +5,7 @@ package akka.http.javadsl.testkit import scala.annotation.varargs -import scala.concurrent.ExecutionContext +import scala.concurrent.ExecutionContextExecutor import scala.concurrent.duration._ import akka.stream.Materializer import akka.http.scaladsl.server @@ -31,7 +31,7 @@ import akka.http.impl.util._ abstract class RouteTest extends AllDirectives { implicit def system: ActorSystem implicit def materializer: Materializer - implicit def executionContext: ExecutionContext = system.dispatcher + implicit def executionContext: ExecutionContextExecutor = system.dispatcher protected def awaitDuration: FiniteDuration = 500.millis diff --git a/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/simple/SimpleServerApp.java b/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/simple/SimpleServerApp.java index f7cf210867..13048c29f1 100644 --- a/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/simple/SimpleServerApp.java +++ b/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/simple/SimpleServerApp.java @@ -5,16 +5,16 @@ package akka.http.javadsl.server.examples.simple; import akka.actor.ActorSystem; -import akka.dispatch.Futures; import akka.http.javadsl.server.*; import akka.http.javadsl.server.values.Parameter; import akka.http.javadsl.server.values.Parameters; import akka.http.javadsl.server.values.PathMatcher; import akka.http.javadsl.server.values.PathMatchers; -import scala.concurrent.Future; import java.io.IOException; import java.util.concurrent.Callable; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; public class SimpleServerApp extends HttpApp { static Parameter x = Parameters.intValue("x"); @@ -29,12 +29,8 @@ public class SimpleServerApp extends HttpApp { int result = x * y; return ctx.complete(String.format("%d * %d = %d", x, y, result)); } - public static Future multiplyAsync(final RequestContext ctx, final int x, final int y) { - return Futures.future(new Callable() { - public RouteResult call() throws Exception { - return multiply(ctx, x, y); - } - }, ctx.executionContext()); + public static CompletionStage multiplyAsync(final RequestContext ctx, final int x, final int y) { + return CompletableFuture.supplyAsync(() -> multiply(ctx, x, y), ctx.executionContext()); } @Override diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/client/HttpAPIsTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/client/HttpAPIsTest.java index 46bb448ce8..42c47a5f00 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/client/HttpAPIsTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/client/HttpAPIsTest.java @@ -19,6 +19,8 @@ import javax.net.ssl.SSLContext; import static akka.http.javadsl.ConnectHttp.*; import static akka.http.javadsl.ConnectHttp.toHostHttps; +import java.util.concurrent.CompletionStage; + @SuppressWarnings("ConstantConditions") public class HttpAPIsTest extends JUnitRouteTest { @@ -43,7 +45,7 @@ public class HttpAPIsTest extends JUnitRouteTest { http.bindAndHandle(handler, "127.0.0.1", 8080, materializer()); http.bindAndHandle(handler, "127.0.0.1", 8080, httpsContext, materializer()); - final Function> handler1 = null; + final Function> handler1 = null; http.bindAndHandleAsync(handler1, "127.0.0.1", 8080, materializer()); http.bindAndHandleAsync(handler1, "127.0.0.1", 8080, httpsContext, materializer()); diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/CompleteTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/CompleteTest.java index e5a72c777f..0a4da12fc7 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/CompleteTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/CompleteTest.java @@ -6,8 +6,8 @@ package akka.http.javadsl.server; import org.junit.Test; -import java.util.concurrent.Callable; -import akka.dispatch.Futures; +import java.util.concurrent.CompletableFuture; + import akka.http.javadsl.testkit.*; import akka.http.javadsl.marshallers.jackson.Jackson; @@ -52,13 +52,10 @@ public class CompleteTest extends JUnitRouteTest { Handler2 slowCalc = new Handler2() { @Override public RouteResult apply(final RequestContext ctx, final Integer x, final Integer y) { - return ctx.completeWith(Futures.future(new Callable() { - @Override - public RouteResult call() throws Exception { - int result = x + y; - return ctx.complete(String.format("%d + %d = %d",x, y, result)); - } - }, executionContext())); + return ctx.completeWith(CompletableFuture.supplyAsync(() -> { + int result = x + y; + return ctx.complete(String.format("%d + %d = %d",x, y, result)); + }, ctx.executionContext())); } }; diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/CodingDirectivesTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/CodingDirectivesTest.java index 3ff90a4ab3..0bf57f2407 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/CodingDirectivesTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/CodingDirectivesTest.java @@ -67,7 +67,7 @@ public class CodingDirectivesTest extends JUnitRouteTest { .assertHeaderExists(ContentEncoding.create(HttpEncodings.DEFLATE)); ByteString decompressed = - Await.result(Coder.Deflate.decode(response.entityBytes(), mat), Duration.apply(3, TimeUnit.SECONDS)); + Coder.Deflate.decode(response.entityBytes(), mat).toCompletableFuture().get(3, TimeUnit.SECONDS); Assert.assertEquals("tester", decompressed.utf8String()); } @Test diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/RouteDirectivesTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/RouteDirectivesTest.java index af570fae28..bc1808f3f1 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/RouteDirectivesTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/RouteDirectivesTest.java @@ -52,32 +52,19 @@ public class RouteDirectivesTest extends JUnitRouteTest { .withoutSizeLimit() .getDataBytes() .runWith(Sink.head(), ctx.materializer()) - .map(new Mapper() { - @Override - public RouteResult apply(ByteString s) { - return ctx.complete(s.utf8String()); - } - }, ctx.executionContext())); + .thenApplyAsync(s -> ctx.complete(s.utf8String()), ctx.executionContext())); } })), path("limit-5") .route( - handleWith(new Function() { - @Override - public RouteResult apply(final RequestContext ctx) throws Exception { + handleWith(ctx -> { final RequestEntity entity = ctx.request().entity(); return ctx.completeWith( entity .withSizeLimit(5) .getDataBytes() .runWith(Sink.head(), ctx.materializer()) - .map(new Mapper() { - @Override - public RouteResult apply(ByteString s) { - return ctx.complete(s.utf8String()); - } - }, ctx.executionContext())); - } + .thenApplyAsync(s -> ctx.complete(s.utf8String()), ctx.executionContext())); })) ); diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/values/HttpBasicAuthenticationTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/values/HttpBasicAuthenticationTest.java index 0369d94d97..f94bad551f 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/values/HttpBasicAuthenticationTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/values/HttpBasicAuthenticationTest.java @@ -4,9 +4,10 @@ package akka.http.javadsl.server.values; +import java.util.Optional; +import java.util.concurrent.CompletionStage; + import org.junit.Test; -import scala.Option; -import scala.concurrent.Future; import akka.http.javadsl.server.*; import akka.http.javadsl.model.HttpRequest; @@ -17,7 +18,7 @@ public class HttpBasicAuthenticationTest extends JUnitRouteTest { HttpBasicAuthenticator authenticatedUser = new HttpBasicAuthenticator("test-realm") { @Override - public Future> authenticate(BasicCredentials credentials) { + public CompletionStage> authenticate(BasicCredentials credentials) { if (credentials.available() && // no anonymous access credentials.identifier().equals("sina") && credentials.verify("1234")) @@ -29,7 +30,7 @@ public class HttpBasicAuthenticationTest extends JUnitRouteTest { OAuth2Authenticator authenticatedToken = new OAuth2Authenticator("test-realm") { @Override - public Future> authenticate(OAuth2Credentials credentials) { + public CompletionStage> authenticate(OAuth2Credentials credentials) { if (credentials.available() && // no anonymous access credentials.identifier().equals("myToken") && credentials.verify("myToken")) diff --git a/akka-http-tests/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java b/akka-http-tests/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java index 9847b85bb7..d01ecfe3fe 100644 --- a/akka-http-tests/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java +++ b/akka-http-tests/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java @@ -13,9 +13,11 @@ import akka.http.javadsl.server.values.Parameters; import akka.http.javadsl.server.values.PathMatchers; import akka.http.javadsl.testkit.JUnitRouteTest; import akka.http.javadsl.testkit.TestRoute; + +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; + import org.junit.Test; -import scala.concurrent.ExecutionContext; -import scala.concurrent.Future; public class HandlerExampleDocTest extends JUnitRouteTest { @Test @@ -236,12 +238,12 @@ public class HandlerExampleDocTest extends JUnitRouteTest { //#async-example-full //#async-service-definition class CalculatorService { - public Future multiply(final int x, final int y, ExecutionContext ec) { - return akka.dispatch.Futures.future(() -> x * y, ec); + public CompletionStage multiply(final int x, final int y) { + return CompletableFuture.supplyAsync(() -> x * y); } - public Future add(final int x, final int y, ExecutionContext ec) { - return akka.dispatch.Futures.future(() -> x + y, ec); + public CompletionStage add(final int x, final int y) { + return CompletableFuture.supplyAsync(() -> x + y); } } //#async-service-definition @@ -253,15 +255,10 @@ public class HandlerExampleDocTest extends JUnitRouteTest { //#async-handler-1 // would probably be injected or passed at construction time in real code CalculatorService calculatorService = new CalculatorService(); - public Future multiplyAsync(final RequestContext ctx, int x, int y) { - Future result = calculatorService.multiply(x, y, ctx.executionContext()); - Mapper func = new Mapper() { - @Override - public RouteResult apply(Integer product) { - return ctx.complete("x * y = " + product); - } - }; // cannot be written as lambda, unfortunately - return result.map(func, ctx.executionContext()); + public CompletionStage multiplyAsync(final RequestContext ctx, int x, int y) { + CompletionStage result = calculatorService.multiply(x, y); + return result.thenApplyAsync(product -> ctx.complete("x * y = " + product), + ctx.executionContext()); } Route multiplyAsyncRoute = path("multiply").route( @@ -271,14 +268,9 @@ public class HandlerExampleDocTest extends JUnitRouteTest { //#async-handler-2 public RouteResult addAsync(final RequestContext ctx, int x, int y) { - Future result = calculatorService.add(x, y, ctx.executionContext()); - Mapper func = new Mapper() { - @Override - public RouteResult apply(Integer sum) { - return ctx.complete("x + y = " + sum); - } - }; // cannot be written as lambda, unfortunately - return ctx.completeWith(result.map(func, ctx.executionContext())); + CompletionStage result = calculatorService.add(x, y); + return ctx.completeWith(result.thenApplyAsync(sum -> ctx.complete("x + y = " + sum), + ctx.executionContext())); } Route addAsyncRoute = path("add").route( diff --git a/akka-http/src/main/boilerplate/akka/http/javadsl/server/Handlers.scala.template b/akka-http/src/main/boilerplate/akka/http/javadsl/server/Handlers.scala.template index 1bcf458e23..7cf3cea93f 100644 --- a/akka-http/src/main/boilerplate/akka/http/javadsl/server/Handlers.scala.template +++ b/akka-http/src/main/boilerplate/akka/http/javadsl/server/Handlers.scala.template @@ -3,7 +3,7 @@ */ package akka.http.javadsl.server -import scala.concurrent.Future +import java.util.concurrent.CompletionStage [..21#/** * A route Handler that handles a request (that is encapsulated in a [[RequestContext]]) @@ -21,7 +21,7 @@ trait Handler1[[#T1#]] extends akka.japi.function.Function2[RequestContext, [#T1 } /** * A route Handler that handles a request (that is encapsulated in a [[RequestContext]]) - * and returns a [[scala.concurrent.Future]] of [[RouteResult]] with the response (or the rejection). + * and returns a [[java.util.concurrent.CompletionStage]] of [[RouteResult]] with the response (or the rejection). * * A route `Handler1` is a convenience class that extends Function of arity `N+1`, * since it needs to pass along the [[RequestContext]] as well, yet for readability @@ -30,8 +30,8 @@ trait Handler1[[#T1#]] extends akka.japi.function.Function2[RequestContext, [#T1 * Use the methods in [[RequestContext]] to create a [[RouteResult]]. * A handler MUST NOT return `null` as the result. */ -trait AsyncHandler1[[#T1#]] extends akka.japi.function.Function2[RequestContext, [#T1#], Future[RouteResult]] { - override def apply(ctx: RequestContext, [#t1: T1#]): Future[RouteResult] +trait AsyncHandler1[[#T1#]] extends akka.japi.function.Function2[RequestContext, [#T1#], CompletionStage[RouteResult]] { + override def apply(ctx: RequestContext, [#t1: T1#]): CompletionStage[RouteResult] }# ] diff --git a/akka-http/src/main/boilerplate/akka/http/javadsl/server/directives/BasicDirectivesBase.scala.template b/akka-http/src/main/boilerplate/akka/http/javadsl/server/directives/BasicDirectivesBase.scala.template index 105aceaa47..5404d0af7d 100644 --- a/akka-http/src/main/boilerplate/akka/http/javadsl/server/directives/BasicDirectivesBase.scala.template +++ b/akka-http/src/main/boilerplate/akka/http/javadsl/server/directives/BasicDirectivesBase.scala.template @@ -8,7 +8,7 @@ import akka.http.javadsl.server.RouteResult import akka.http.javadsl.server.RequestVal import akka.http.javadsl.server.RequestContext import scala.annotation.varargs -import scala.concurrent.Future +import java.util.concurrent.CompletionStage abstract class BasicDirectivesBase { /** INTERNAL API */ @@ -24,12 +24,12 @@ abstract class BasicDirectivesBase { handle(extractions: _*)(ctx => handler(ctx)) /** - * Handles the route using the given function, completing the route once the returned [[scala.concurrent.Future]] completes. + * Handles the route using the given function, completing the route once the returned [[java.util.concurrent.CompletionStage]] completes. * The function MUST NOT return `null`. * * If the `handler` is accessing request values these must be passed to this method in order for extraction to be performed. */ - @varargs def handleWithAsync(handler: akka.japi.function.Function[RequestContext, Future[RouteResult]], extractions: RequestVal[_]*): Route = + @varargs def handleWithAsync(handler: akka.japi.function.Function[RequestContext, CompletionStage[RouteResult]], extractions: RequestVal[_]*): Route = handle(extractions: _*)(ctx => ctx.completeWith(handler(ctx))) @@ -45,7 +45,7 @@ abstract class BasicDirectivesBase { handle([#v1#])(ctx => handler(ctx, [#v1.get(ctx)#])) /** - * Handles the route using the given function, completing the route once the returned [[scala.concurrent.Future]] completes. + * Handles the route using the given function, completing the route once the returned [[java.util.concurrent.CompletionStage]] completes. * The function MUST NOT return `null`. * * For convenience, using Java 8 lambda expressions as the `handler` function is recommended. @@ -53,7 +53,7 @@ abstract class BasicDirectivesBase { * [[akka.japi.function.Function2]] should prove to be useful, as it matches naming-wise with the number of * handled request values. */ - def handleWithAsync1[[#T1#]]([#v1: RequestVal[T1]#], handler: akka.japi.function.Function2[RequestContext, [#T1#], Future[RouteResult]]): Route = + def handleWithAsync1[[#T1#]]([#v1: RequestVal[T1]#], handler: akka.japi.function.Function2[RequestContext, [#T1#], CompletionStage[RouteResult]]): Route = handle([#v1#])(ctx => ctx.completeWith(handler(ctx, [#v1.get(ctx)#])))# ] diff --git a/akka-http/src/main/java/akka/http/javadsl/server/Coder.java b/akka-http/src/main/java/akka/http/javadsl/server/Coder.java index 1c929961dd..0564989427 100644 --- a/akka-http/src/main/java/akka/http/javadsl/server/Coder.java +++ b/akka-http/src/main/java/akka/http/javadsl/server/Coder.java @@ -4,12 +4,14 @@ package akka.http.javadsl.server; +import java.util.concurrent.CompletionStage; + import akka.http.scaladsl.coding.Deflate$; import akka.http.scaladsl.coding.Gzip$; import akka.http.scaladsl.coding.NoCoding$; import akka.stream.Materializer; import akka.util.ByteString; -import scala.concurrent.Future; +import scala.compat.java8.FutureConverters; /** * A coder is an implementation of the predefined encoders/decoders defined for HTTP. @@ -26,8 +28,8 @@ public enum Coder { public ByteString encode(ByteString input) { return underlying.encode(input); } - public Future decode(ByteString input, Materializer mat) { - return underlying.decode(input, mat); + public CompletionStage decode(ByteString input, Materializer mat) { + return FutureConverters.toJava(underlying.decode(input, mat)); } public akka.http.scaladsl.coding.Coder _underlyingScalaCoder() { return underlying; diff --git a/akka-http/src/main/scala/akka/http/impl/server/RequestContextImpl.scala b/akka-http/src/main/scala/akka/http/impl/server/RequestContextImpl.scala index 2885cfa374..0f0c49166d 100644 --- a/akka-http/src/main/scala/akka/http/impl/server/RequestContextImpl.scala +++ b/akka-http/src/main/scala/akka/http/impl/server/RequestContextImpl.scala @@ -7,12 +7,13 @@ package akka.http.impl.server import akka.http.javadsl.model.ContentType import akka.http.scaladsl.model.HttpEntity import akka.stream.Materializer - -import scala.concurrent.{ ExecutionContext, Future } +import scala.concurrent.{ ExecutionContextExecutor, Future } import akka.http.javadsl.{ model ⇒ jm } import akka.http.impl.util.JavaMapping.Implicits._ import akka.http.scaladsl.server.{ RequestContext ⇒ ScalaRequestContext } import akka.http.javadsl.server._ +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters._ /** * INTERNAL API @@ -28,6 +29,7 @@ private[http] final case class RequestContextImpl(underlying: ScalaRequestContex futureResult.flatMap { case r: RouteResultImpl ⇒ r.underlying }(executionContext()) + def completeWith(futureResult: CompletionStage[RouteResult]): RouteResult = completeWith(futureResult.toScala) def complete(text: String): RouteResult = underlying.complete(text) def complete(contentType: ContentType.NonBinary, text: String): RouteResult = underlying.complete(HttpEntity(contentType.asScala, text)) @@ -48,6 +50,6 @@ private[http] final case class RequestContextImpl(underlying: ScalaRequestContex def reject(customRejection: CustomRejection): RouteResult = underlying.reject(CustomRejectionWrapper(customRejection)) - def executionContext(): ExecutionContext = underlying.executionContext + def executionContext(): ExecutionContextExecutor = underlying.executionContext def materializer(): Materializer = underlying.materializer } diff --git a/akka-http/src/main/scala/akka/http/impl/server/RouteImplementation.scala b/akka-http/src/main/scala/akka/http/impl/server/RouteImplementation.scala index 8d1ab2dbc9..1055ef9ccd 100644 --- a/akka-http/src/main/scala/akka/http/impl/server/RouteImplementation.scala +++ b/akka-http/src/main/scala/akka/http/impl/server/RouteImplementation.scala @@ -23,6 +23,10 @@ import akka.http.scaladsl.server import akka.http.javadsl.server._ import RouteStructure._ +import scala.compat.java8.FutureConverters._ +import scala.compat.java8.OptionConverters._ +import akka.dispatch.ExecutionContexts.sameThreadExecutionContext + /** * INTERNAL API */ @@ -94,7 +98,7 @@ private[http] object RouteImplementation extends Directives with server.RouteCon } } - authenticator.authenticate(javaCreds) + authenticator.authenticate(javaCreds).toScala.map(_.asScala)(sameThreadExecutionContext) }).flatMap { user ⇒ addExtraction(authenticator.asInstanceOf[RequestVal[Any]], user) } @@ -117,7 +121,7 @@ private[http] object RouteImplementation extends Directives with server.RouteCon } } - authenticator.authenticate(javaCreds) + authenticator.authenticate(javaCreds).toScala.map(_.asScala)(sameThreadExecutionContext) }).flatMap { user ⇒ addExtraction(authenticator.asInstanceOf[RequestVal[Any]], user) } diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/Handler.scala b/akka-http/src/main/scala/akka/http/javadsl/server/Handler.scala index b95e1c3383..c1f32a2a51 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/Handler.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/Handler.scala @@ -3,7 +3,7 @@ */ package akka.http.javadsl.server -import scala.concurrent.Future +import java.util.concurrent.CompletionStage /** * A route Handler that handles a request (that is encapsulated in a [[RequestContext]]) @@ -22,12 +22,11 @@ trait Handler extends akka.japi.function.Function[RequestContext, RouteResult] { /** * A route Handler that handles a request (that is encapsulated in a [[RequestContext]]) - * and returns a [[scala.concurrent.Future]] of [[RouteResult]] with the response (or the rejection). + * and returns a [[java.util.concurrent.CompletionStage]] of [[RouteResult]] with the response (or the rejection). * * Use the methods in [[RequestContext]] to create a [[RouteResult]]. * A handler MUST NOT return `null` as the result. */ -trait AsyncHandler extends akka.japi.function.Function[RequestContext, Future[RouteResult]] { - override def apply(ctx: RequestContext): Future[RouteResult] +trait AsyncHandler extends akka.japi.function.Function[RequestContext, CompletionStage[RouteResult]] { + override def apply(ctx: RequestContext): CompletionStage[RouteResult] } - diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/HttpApp.scala b/akka-http/src/main/scala/akka/http/javadsl/server/HttpApp.scala index 45fe666e31..262756d0fd 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/HttpApp.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/HttpApp.scala @@ -4,9 +4,9 @@ package akka.http.javadsl.server -import scala.concurrent.Future import akka.actor.ActorSystem import akka.http.scaladsl.Http.ServerBinding +import java.util.concurrent.CompletionStage /** * A convenience class to derive from to get everything from HttpService and Directives into scope. @@ -22,6 +22,6 @@ abstract class HttpApp * Starts an HTTP server on the given interface and port. Creates the route by calling the * user-implemented [[createRoute]] method and uses the route to handle requests of the server. */ - def bindRoute(interface: String, port: Int, system: ActorSystem): Future[ServerBinding] = + def bindRoute(interface: String, port: Int, system: ActorSystem): CompletionStage[ServerBinding] = bindRoute(interface, port, createRoute(), system) } diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/HttpService.scala b/akka-http/src/main/scala/akka/http/javadsl/server/HttpService.scala index 5e8e9a80e5..c63d2bafb1 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/HttpService.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/HttpService.scala @@ -4,19 +4,20 @@ package akka.http.javadsl.server -import scala.concurrent.Future import akka.actor.ActorSystem import akka.http.scaladsl.{ server, Http } import akka.http.scaladsl.Http.ServerBinding import akka.http.impl.server.RouteImplementation import akka.stream.{ ActorMaterializer, Materializer } import akka.stream.scaladsl.{ Keep, Sink } +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters._ trait HttpServiceBase { /** * Starts a server on the given interface and port and uses the route to handle incoming requests. */ - def bindRoute(interface: String, port: Int, route: Route, system: ActorSystem): Future[ServerBinding] = { + def bindRoute(interface: String, port: Int, route: Route, system: ActorSystem): CompletionStage[ServerBinding] = { implicit val sys = system implicit val materializer = ActorMaterializer() handleConnectionsWithRoute(interface, port, route, system, materializer) @@ -25,19 +26,19 @@ trait HttpServiceBase { /** * Starts a server on the given interface and port and uses the route to handle incoming requests. */ - def bindRoute(interface: String, port: Int, route: Route, system: ActorSystem, materializer: Materializer): Future[ServerBinding] = + def bindRoute(interface: String, port: Int, route: Route, system: ActorSystem, materializer: Materializer): CompletionStage[ServerBinding] = handleConnectionsWithRoute(interface, port, route, system, materializer) /** * Uses the route to handle incoming connections and requests for the ServerBinding. */ - def handleConnectionsWithRoute(interface: String, port: Int, route: Route, system: ActorSystem, materializer: Materializer): Future[ServerBinding] = { + def handleConnectionsWithRoute(interface: String, port: Int, route: Route, system: ActorSystem, materializer: Materializer): CompletionStage[ServerBinding] = { implicit val s = system implicit val m = materializer import system.dispatcher val r: server.Route = RouteImplementation(route) - Http(system).bind(interface, port).toMat(Sink.foreach(_.handleWith(r)))(Keep.left).run()(materializer) + Http(system).bind(interface, port).toMat(Sink.foreach(_.handleWith(r)))(Keep.left).run()(materializer).toJava } } diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/RequestContext.scala b/akka-http/src/main/scala/akka/http/javadsl/server/RequestContext.scala index e2b41703e8..4d768184ee 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/RequestContext.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/RequestContext.scala @@ -4,9 +4,10 @@ package akka.http.javadsl.server -import scala.concurrent.{ ExecutionContext, Future } +import scala.concurrent.ExecutionContextExecutor import akka.http.javadsl.model._ import akka.stream.Materializer +import java.util.concurrent.CompletionStage /** * The RequestContext represents the state of the request while it is routed through @@ -24,7 +25,7 @@ trait RequestContext { def unmatchedPath: String /** Returns the ExecutionContext of this RequestContext */ - def executionContext(): ExecutionContext + def executionContext(): ExecutionContextExecutor /** Returns the Materializer of this RequestContext */ def materializer(): Materializer @@ -63,7 +64,7 @@ trait RequestContext { /** * Defers completion of the request */ - def completeWith(futureResult: Future[RouteResult]): RouteResult + def completeWith(futureResult: CompletionStage[RouteResult]): RouteResult /** * Explicitly rejects the request as not found. Other route alternatives @@ -75,4 +76,4 @@ trait RequestContext { * Reject this request with an application-defined CustomRejection. */ def reject(customRejection: CustomRejection): RouteResult -} \ No newline at end of file +} diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/directives/BasicDirectives.scala b/akka-http/src/main/scala/akka/http/javadsl/server/directives/BasicDirectives.scala index 0db54eed84..dc6a0588dd 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/directives/BasicDirectives.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/directives/BasicDirectives.scala @@ -13,6 +13,8 @@ import akka.http.javadsl.server._ import scala.annotation.varargs import scala.concurrent.Future +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters._ abstract class BasicDirectives extends BasicDirectivesBase { /** @@ -152,16 +154,22 @@ abstract class BasicDirectives extends BasicDirectivesBase { res } def returnTypeMatches(method: Method): Boolean = - method.getReturnType == classOf[RouteResult] || returnsFuture(method) + method.getReturnType == classOf[RouteResult] || returnsFuture(method) || returnsCompletionStage(method) def returnsFuture(method: Method): Boolean = method.getReturnType == classOf[Future[_]] && method.getGenericReturnType.isInstanceOf[ParameterizedType] && method.getGenericReturnType.asInstanceOf[ParameterizedType].getActualTypeArguments()(0) == classOf[RouteResult] + def returnsCompletionStage(method: Method): Boolean = + method.getReturnType == classOf[CompletionStage[_]] && + method.getGenericReturnType.isInstanceOf[ParameterizedType] && + method.getGenericReturnType.asInstanceOf[ParameterizedType].getActualTypeArguments()(0) == classOf[RouteResult] + /** Makes sure both RouteResult and Future[RouteResult] are acceptable result types. */ def adaptResult(method: Method): (RequestContext, AnyRef) ⇒ RouteResult = - if (returnsFuture(method)) (ctx, v) ⇒ ctx.completeWith(v.asInstanceOf[Future[RouteResult]]) + if (returnsFuture(method)) (ctx, v) ⇒ ctx.completeWith(v.asInstanceOf[Future[RouteResult]].toJava) + else if (returnsCompletionStage(method)) (ctx, v) => ctx.completeWith(v.asInstanceOf[CompletionStage[RouteResult]]) else (_, v) ⇒ v.asInstanceOf[RouteResult] val IdentityAdaptor: (RequestContext, Seq[Any]) ⇒ Seq[Any] = (_, ps) ⇒ ps @@ -192,4 +200,4 @@ abstract class BasicDirectives extends BasicDirectivesBase { handle(extractions: _*)(ctx ⇒ method(ctx, extractions.map(_.get(ctx)))) } -} \ No newline at end of file +} diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/values/HttpBasicAuthenticator.scala b/akka-http/src/main/scala/akka/http/javadsl/server/values/HttpBasicAuthenticator.scala index 9ce8b01d2f..51d2d2b072 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/values/HttpBasicAuthenticator.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/values/HttpBasicAuthenticator.scala @@ -7,9 +7,10 @@ package akka.http.javadsl.server.values import akka.http.impl.server.{ ExtractionImplBase, RouteStructure } import akka.http.javadsl.server.{ AbstractDirective, RequestVal, Route } import akka.http.scaladsl.util.FastFuture - -import scala.concurrent.Future import scala.reflect.ClassTag +import java.util.concurrent.CompletionStage +import java.util.Optional +import java.util.concurrent.CompletableFuture /** * Represents existing or missing Http Basic authentication credentials. @@ -38,18 +39,18 @@ trait BasicCredentials { */ abstract class HttpBasicAuthenticator[T](val realm: String) extends AbstractDirective with ExtractionImplBase[T] with RequestVal[T] { protected[http] implicit def classTag: ClassTag[T] = reflect.classTag[AnyRef].asInstanceOf[ClassTag[T]] - def authenticate(credentials: BasicCredentials): Future[Option[T]] + def authenticate(credentials: BasicCredentials): CompletionStage[Optional[T]] /** * Creates a return value for use in [[authenticate]] that successfully authenticates the requests and provides * the given user. */ - def authenticateAs(user: T): Future[Option[T]] = FastFuture.successful(Some(user)) + def authenticateAs(user: T): CompletionStage[Optional[T]] = CompletableFuture.completedFuture(Optional.of(user)) /** * Refuses access for this user. */ - def refuseAccess(): Future[Option[T]] = FastFuture.successful(None) + def refuseAccess(): CompletionStage[Optional[T]] = CompletableFuture.completedFuture(Optional.empty()) /** * INTERNAL API diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/values/OAuth2Authenticator.scala b/akka-http/src/main/scala/akka/http/javadsl/server/values/OAuth2Authenticator.scala index 6845115048..f72de5fe55 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/values/OAuth2Authenticator.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/values/OAuth2Authenticator.scala @@ -6,10 +6,10 @@ package akka.http.javadsl.server.values import akka.http.impl.server.{ ExtractionImplBase, RouteStructure } import akka.http.javadsl.server.{ AbstractDirective, RequestVal, Route } -import akka.http.scaladsl.util.FastFuture - -import scala.concurrent.Future import scala.reflect.ClassTag +import java.util.concurrent.CompletionStage +import java.util.Optional +import java.util.concurrent.CompletableFuture /** * Represents existing or missing OAuth 2 authentication credentials. @@ -38,18 +38,18 @@ trait OAuth2Credentials { */ abstract class OAuth2Authenticator[T](val realm: String) extends AbstractDirective with ExtractionImplBase[T] with RequestVal[T] { protected[http] implicit def classTag: ClassTag[T] = reflect.classTag[AnyRef].asInstanceOf[ClassTag[T]] - def authenticate(credentials: OAuth2Credentials): Future[Option[T]] + def authenticate(credentials: OAuth2Credentials): CompletionStage[Optional[T]] /** * Creates a return value for use in [[authenticate]] that successfully authenticates the requests and provides * the given user. */ - def authenticateAs(user: T): Future[Option[T]] = FastFuture.successful(Some(user)) + def authenticateAs(user: T): CompletionStage[Optional[T]] = CompletableFuture.completedFuture(Optional.of(user)) /** * Refuses access for this user. */ - def refuseAccess(): Future[Option[T]] = FastFuture.successful(None) + def refuseAccess(): CompletionStage[Optional[T]] = CompletableFuture.completedFuture(Optional.empty()) /** * INTERNAL API diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContext.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContext.scala index a88fea5a70..399583c128 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContext.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContext.scala @@ -4,7 +4,7 @@ package akka.http.scaladsl.server -import scala.concurrent.{ Future, ExecutionContext } +import scala.concurrent.{ Future, ExecutionContextExecutor } import akka.stream.Materializer import akka.event.LoggingAdapter import akka.http.scaladsl.marshalling.ToResponseMarshallable @@ -25,7 +25,7 @@ trait RequestContext { /** * The default ExecutionContext to be used for scheduling asynchronous logic related to this request. */ - implicit def executionContext: ExecutionContext + implicit def executionContext: ExecutionContextExecutor /** * The default Materializer. @@ -46,7 +46,7 @@ trait RequestContext { * Returns a copy of this context with the given fields updated. */ def reconfigure( - executionContext: ExecutionContext = executionContext, + executionContext: ExecutionContextExecutor = executionContext, materializer: Materializer = materializer, log: LoggingAdapter = log, settings: RoutingSettings = settings): RequestContext @@ -76,7 +76,7 @@ trait RequestContext { /** * Returns a copy of this context with the new HttpRequest. */ - def withExecutionContext(ec: ExecutionContext): RequestContext + def withExecutionContext(ec: ExecutionContextExecutor): RequestContext /** * Returns a copy of this context with the new HttpRequest. diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContextImpl.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContextImpl.scala index 76aa562b35..0df8b94a34 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContextImpl.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContextImpl.scala @@ -4,7 +4,7 @@ package akka.http.scaladsl.server -import scala.concurrent.{ Future, ExecutionContext } +import scala.concurrent.{ Future, ExecutionContextExecutor } import akka.stream.Materializer import akka.event.LoggingAdapter import akka.http.scaladsl.marshalling.{ Marshal, ToResponseMarshallable } @@ -18,15 +18,15 @@ import akka.http.scaladsl.util.FastFuture._ private[http] class RequestContextImpl( val request: HttpRequest, val unmatchedPath: Uri.Path, - val executionContext: ExecutionContext, + val executionContext: ExecutionContextExecutor, val materializer: Materializer, val log: LoggingAdapter, val settings: RoutingSettings) extends RequestContext { - def this(request: HttpRequest, log: LoggingAdapter, settings: RoutingSettings)(implicit ec: ExecutionContext, materializer: Materializer) = + def this(request: HttpRequest, log: LoggingAdapter, settings: RoutingSettings)(implicit ec: ExecutionContextExecutor, materializer: Materializer) = this(request, request.uri.path, ec, materializer, log, settings) - def reconfigure(executionContext: ExecutionContext, materializer: Materializer, log: LoggingAdapter, settings: RoutingSettings): RequestContext = + def reconfigure(executionContext: ExecutionContextExecutor, materializer: Materializer, log: LoggingAdapter, settings: RoutingSettings): RequestContext = copy(executionContext = executionContext, materializer = materializer, log = log, settings = settings) override def complete(trm: ToResponseMarshallable): Future[RouteResult] = @@ -47,7 +47,7 @@ private[http] class RequestContextImpl( override def withRequest(request: HttpRequest): RequestContext = if (request != this.request) copy(request = request) else this - override def withExecutionContext(executionContext: ExecutionContext): RequestContext = + override def withExecutionContext(executionContext: ExecutionContextExecutor): RequestContext = if (executionContext != this.executionContext) copy(executionContext = executionContext) else this override def withMaterializer(materializer: Materializer): RequestContext = @@ -83,7 +83,7 @@ private[http] class RequestContextImpl( private def copy(request: HttpRequest = request, unmatchedPath: Uri.Path = unmatchedPath, - executionContext: ExecutionContext = executionContext, + executionContext: ExecutionContextExecutor = executionContext, materializer: Materializer = materializer, log: LoggingAdapter = log, settings: RoutingSettings = settings) = diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/Route.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/Route.scala index e33f152a83..15654e45eb 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/Route.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/Route.scala @@ -7,7 +7,7 @@ package akka.http.scaladsl.server import akka.NotUsed import akka.stream.Materializer -import scala.concurrent.{ ExecutionContext, Future } +import scala.concurrent.{ ExecutionContextExecutor, Future } import akka.stream.scaladsl.Flow import akka.http.scaladsl.model.{ HttpRequest, HttpResponse } import akka.http.scaladsl.util.FastFuture._ @@ -41,7 +41,7 @@ object Route { def handlerFlow(route: Route)(implicit routingSettings: RoutingSettings, materializer: Materializer, routingLog: RoutingLog, - executionContext: ExecutionContext = null, + executionContext: ExecutionContextExecutor = null, rejectionHandler: RejectionHandler = RejectionHandler.default, exceptionHandler: ExceptionHandler = null): Flow[HttpRequest, HttpResponse, NotUsed] = Flow[HttpRequest].mapAsync(1)(asyncHandler(route)) @@ -52,7 +52,7 @@ object Route { def asyncHandler(route: Route)(implicit routingSettings: RoutingSettings, materializer: Materializer, routingLog: RoutingLog, - executionContext: ExecutionContext = null, + executionContext: ExecutionContextExecutor = null, rejectionHandler: RejectionHandler = RejectionHandler.default, exceptionHandler: ExceptionHandler = null): HttpRequest ⇒ Future[HttpResponse] = { val effectiveEC = if (executionContext ne null) executionContext else materializer.executionContext diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/BasicDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/BasicDirectives.scala index 5a91f6d688..002bda4d10 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/BasicDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/BasicDirectives.scala @@ -5,7 +5,7 @@ package akka.http.scaladsl.server package directives -import scala.concurrent.{ Future, ExecutionContext } +import scala.concurrent.{ Future, ExecutionContextExecutor } import scala.collection.immutable import akka.event.LoggingAdapter import akka.stream.Materializer @@ -132,13 +132,13 @@ trait BasicDirectives { /** * Runs its inner route with the given alternative [[ExecutionContext]]. */ - def withExecutionContext(ec: ExecutionContext): Directive0 = + def withExecutionContext(ec: ExecutionContextExecutor): Directive0 = mapRequestContext(_ withExecutionContext ec) /** * Extracts the [[ExecutionContext]] from the [[RequestContext]]. */ - def extractExecutionContext: Directive1[ExecutionContext] = BasicDirectives._extractExecutionContext + def extractExecutionContext: Directive1[ExecutionContextExecutor] = BasicDirectives._extractExecutionContext /** * Runs its inner route with the given alternative [[Materializer]]. @@ -191,7 +191,7 @@ object BasicDirectives extends BasicDirectives { private val _extractUnmatchedPath: Directive1[Uri.Path] = extract(_.unmatchedPath) private val _extractRequest: Directive1[HttpRequest] = extract(_.request) private val _extractUri: Directive1[Uri] = extract(_.request.uri) - private val _extractExecutionContext: Directive1[ExecutionContext] = extract(_.executionContext) + private val _extractExecutionContext: Directive1[ExecutionContextExecutor] = extract(_.executionContext) private val _extractMaterializer: Directive1[Materializer] = extract(_.materializer) private val _extractLog: Directive1[LoggingAdapter] = extract(_.log) private val _extractSettings: Directive1[RoutingSettings] = extract(_.settings) diff --git a/akka-stream-tests/src/test/java/akka/stream/io/OutputStreamSinkTest.java b/akka-stream-tests/src/test/java/akka/stream/io/OutputStreamSinkTest.java index c9040b5b35..0e4d1d0676 100644 --- a/akka-stream-tests/src/test/java/akka/stream/io/OutputStreamSinkTest.java +++ b/akka-stream-tests/src/test/java/akka/stream/io/OutputStreamSinkTest.java @@ -16,6 +16,7 @@ import scala.concurrent.Future; import scala.concurrent.duration.FiniteDuration; import java.io.OutputStream; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertTrue; @@ -32,8 +33,6 @@ public class OutputStreamSinkTest extends StreamTest { @Test public void mustSignalFailureViaIoResult() throws Exception { - final FiniteDuration timeout = FiniteDuration.create(300, TimeUnit.MILLISECONDS); - final OutputStream os = new OutputStream() { volatile int left = 3; public void write(int data) { @@ -43,8 +42,8 @@ public class OutputStreamSinkTest extends StreamTest { left -= 1; } }; - final Future resultFuture = Source.single(ByteString.fromString("123456")).runWith(StreamConverters.fromOutputStream(() -> os), materializer); - final IOResult result = Await.result(resultFuture, timeout); + final CompletionStage resultFuture = Source.single(ByteString.fromString("123456")).runWith(StreamConverters.fromOutputStream(() -> os), materializer); + final IOResult result = resultFuture.toCompletableFuture().get(300, TimeUnit.MILLISECONDS); assertFalse(result.wasSuccessful()); assertTrue(result.getError().getMessage().equals("Can't accept more data.")); diff --git a/akka-stream-tests/src/test/java/akka/stream/javadsl/BidiFlowTest.java b/akka-stream-tests/src/test/java/akka/stream/javadsl/BidiFlowTest.java index 07a05d5062..975c139ed5 100644 --- a/akka-stream-tests/src/test/java/akka/stream/javadsl/BidiFlowTest.java +++ b/akka-stream-tests/src/test/java/akka/stream/javadsl/BidiFlowTest.java @@ -6,6 +6,7 @@ package akka.stream.javadsl; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; import akka.NotUsed; @@ -85,33 +86,20 @@ public class BidiFlowTest extends StreamTest { } })); - private final BidiFlow> bidiMat = + private final BidiFlow> bidiMat = BidiFlow.fromGraph( GraphDSL.create( Sink.head(), - new Function2>, SinkShape, BidiShape>() { - @Override - public BidiShape apply(Builder> b, SinkShape sink) - throws Exception { + (b, sink) -> { b.from(b.add(Source.single(42))).to(sink); final FlowShape top = b.add(Flow - .of(Integer.class).map(new Function() { - @Override - public Long apply(Integer arg) { - return (long) ((int) arg) + 2; - } - })); + .of(Integer.class).map(i -> (long)(i + 2))); final FlowShape bottom = b.add(Flow - .of(ByteString.class).map(new Function() { - @Override - public String apply(ByteString arg) { - return arg.decodeString("UTF-8"); - } - })); + .of(ByteString.class).map(bytes -> bytes.decodeString("UTF-8"))); return new BidiShape(top .in(), top.out(), bottom.in(), bottom.out()); } - })); + )); private final String str = "Hello World"; private final ByteString bytes = ByteString.fromString(str); @@ -125,14 +113,11 @@ public class BidiFlowTest extends StreamTest { @Test public void mustWorkInIsolation() throws Exception { - final Pair, Future> p = + final Pair, CompletionStage> p = RunnableGraph.fromGraph(GraphDSL .create(Sink. head(), Sink. head(), - Keep., Future> both(), - new Function3, Future>>, SinkShape, SinkShape, ClosedShape>() { - @Override - public ClosedShape apply(Builder, Future>> b, SinkShape st, - SinkShape sb) throws Exception { + Keep.both(), + (b, st, sb) -> { final BidiShape s = b.add(bidi); b.from(b.add(Source.single(1))).toInlet(s.in1()); @@ -140,11 +125,10 @@ public class BidiFlowTest extends StreamTest { b.from(b.add(Source.single(bytes))).toInlet(s.in2()); b.from(s.out2()).to(sb); return ClosedShape.getInstance(); - } })).run(materializer); - final Long rt = Await.result(p.first(), oneSec); - final String rb = Await.result(p.second(), oneSec); + final Long rt = p.first().toCompletableFuture().get(1, TimeUnit.SECONDS); + final String rb = p.second().toCompletableFuture().get(1, TimeUnit.SECONDS); assertEquals((Long) 3L, rt); assertEquals(str, rb); @@ -158,8 +142,8 @@ public class BidiFlowTest extends StreamTest { return ByteString.fromString("Hello " + arg); } })); - final Future> result = Source.from(list).via(f).grouped(10).runWith(Sink.> head(), materializer); - assertEquals(Arrays.asList("Hello 3", "Hello 4", "Hello 5"), Await.result(result, oneSec)); + final CompletionStage> result = Source.from(list).via(f).grouped(10).runWith(Sink.> head(), materializer); + assertEquals(Arrays.asList("Hello 3", "Hello 4", "Hello 5"), result.toCompletableFuture().get(1, TimeUnit.SECONDS)); } @Test @@ -171,8 +155,8 @@ public class BidiFlowTest extends StreamTest { } }).join(bidi); final List inputs = Arrays.asList(ByteString.fromString("1"), ByteString.fromString("2")); - final Future> result = Source.from(inputs).via(f).grouped(10).runWith(Sink.> head(), materializer); - assertEquals(Arrays.asList(3L, 4L), Await.result(result, oneSec)); + final CompletionStage> result = Source.from(inputs).via(f).grouped(10).runWith(Sink.> head(), materializer); + assertEquals(Arrays.asList(3L, 4L), result.toCompletableFuture().get(1, TimeUnit.SECONDS)); } @Test @@ -183,8 +167,8 @@ public class BidiFlowTest extends StreamTest { return arg.toString(); } })); - final Future> result = Source.from(list).via(f).grouped(10).runWith(Sink.> head(), materializer); - assertEquals(Arrays.asList("5", "6", "7"), Await.result(result, oneSec)); + final CompletionStage> result = Source.from(list).via(f).grouped(10).runWith(Sink.> head(), materializer); + assertEquals(Arrays.asList("5", "6", "7"), result.toCompletableFuture().get(1, TimeUnit.SECONDS)); } @Test @@ -195,80 +179,49 @@ public class BidiFlowTest extends StreamTest { return arg.toString(); } }).join(inverse.reversed()).join(bidi.reversed()); - final Future> result = Source.from(list).via(f).grouped(10).runWith(Sink.> head(), materializer); - assertEquals(Arrays.asList("5", "6", "7"), Await.result(result, oneSec)); + final CompletionStage> result = Source.from(list).via(f).grouped(10).runWith(Sink.> head(), materializer); + assertEquals(Arrays.asList("5", "6", "7"), result.toCompletableFuture().get(1, TimeUnit.SECONDS)); } @Test public void mustMaterializeToItsValue() throws Exception { - final Future f = RunnableGraph.fromGraph( - GraphDSL.create(bidiMat, - new Function2 >, BidiShape, ClosedShape>() { - @Override - public ClosedShape apply(Builder> b, - BidiShape shape) throws Exception { - final FlowShape left = b.add(Flow.of(String.class).map( - new Function() { - @Override - public Integer apply(String arg) { - return Integer.valueOf(arg); - } - })); - final FlowShape right = b.add(Flow.of(Long.class).map( - new Function() { - @Override - public ByteString apply(Long arg) { - return ByteString.fromString("Hello " + arg); - } - })); + final CompletionStage f = RunnableGraph.fromGraph( + GraphDSL.create(bidiMat, (b, shape) -> { + final FlowShape left = b.add(Flow.of(String.class).map(Integer::valueOf)); + final FlowShape right = b.add(Flow.of(Long.class).map(s -> ByteString.fromString("Hello " + s))); b.from(shape.out2()).via(left).toInlet(shape.in1()) .from(shape.out1()).via(right).toInlet(shape.in2()); return ClosedShape.getInstance(); - } })).run(materializer); - assertEquals((Integer) 42, Await.result(f, oneSec)); + assertEquals((Integer) 42, f.toCompletableFuture().get(1, TimeUnit.SECONDS)); } @Test public void mustCombineMaterializationValues() throws Exception { - final Flow> left = Flow.fromGraph(GraphDSL.create( - Sink.head(), new Function2>, SinkShape, FlowShape>() { - @Override - public FlowShape apply(Builder> b, - SinkShape sink) throws Exception { + final Flow> left = Flow.fromGraph(GraphDSL.create( + Sink.head(), (b, sink) -> { final UniformFanOutShape bcast = b.add(Broadcast.create(2)); final UniformFanInShape merge = b.add(Merge.create(2)); - final FlowShape flow = b.add(Flow.of(String.class).map( - new Function() { - @Override - public Integer apply(String arg) { - return Integer.valueOf(arg); - } - })); + final FlowShape flow = b.add(Flow.of(String.class).map(Integer::valueOf)); b.from(bcast).to(sink) .from(b.add(Source.single(1))).viaFanOut(bcast).toFanIn(merge) .from(flow).toFanIn(merge); return new FlowShape(flow.in(), merge.out()); - } })); - final Flow>> right = Flow.fromGraph(GraphDSL.create( - Sink.>head(), new Function2>>, SinkShape>, FlowShape>() { - @Override - public FlowShape apply(Builder>> b, - SinkShape> sink) throws Exception { + final Flow>> right = Flow.fromGraph(GraphDSL.create( + Sink.>head(), (b, sink) -> { final FlowShape> flow = b.add(Flow.of(Long.class).grouped(10)); b.from(flow).to(sink); return new FlowShape(flow.in(), b.add(Source.single(ByteString.fromString("10"))).out()); - } })); - final Pair, Future>, Future>> result = - left.joinMat(bidiMat, Keep., Future> both()).joinMat(right, Keep., Future>, Future>> both()).run(materializer); - final Future l = result.first().first(); - final Future m = result.first().second(); - final Future> r = result.second(); - assertEquals((Integer) 1, Await.result(l, oneSec)); - assertEquals((Integer) 42, Await.result(m, oneSec)); - final Long[] rr = Await.result(r, oneSec).toArray(new Long[2]); + final Pair, CompletionStage>, CompletionStage>> result = + left.joinMat(bidiMat, Keep.both()).joinMat(right, Keep.both()).run(materializer); + final CompletionStage l = result.first().first(); + final CompletionStage m = result.first().second(); + final CompletionStage> r = result.second(); + assertEquals((Integer) 1, l.toCompletableFuture().get(1, TimeUnit.SECONDS)); + assertEquals((Integer) 42, m.toCompletableFuture().get(1, TimeUnit.SECONDS)); + final Long[] rr = r.toCompletableFuture().get(1, TimeUnit.SECONDS).toArray(new Long[2]); Arrays.sort(rr); assertArrayEquals(new Long[] { 3L, 12L }, rr); } diff --git a/akka-stream-tests/src/test/java/akka/stream/javadsl/FlowGraphTest.java b/akka-stream-tests/src/test/java/akka/stream/javadsl/FlowGraphTest.java index 9313832c3d..59978638d9 100644 --- a/akka-stream-tests/src/test/java/akka/stream/javadsl/FlowGraphTest.java +++ b/akka-stream-tests/src/test/java/akka/stream/javadsl/FlowGraphTest.java @@ -5,7 +5,7 @@ package akka.stream.javadsl; import akka.NotUsed; import akka.japi.Pair; -import akka.pattern.Patterns; +import akka.pattern.PatternsCS; import akka.japi.tuple.Tuple4; import akka.stream.*; import akka.stream.javadsl.GraphDSL.Builder; @@ -23,6 +23,7 @@ import scala.concurrent.Future; import scala.concurrent.duration.Duration; import java.util.*; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertEquals; @@ -83,9 +84,9 @@ public class FlowGraphTest extends StreamTest { // collecting final Publisher pub = source.runWith(publisher, materializer); - final Future> all = Source.fromPublisher(pub).grouped(100).runWith(Sink.>head(), materializer); + final CompletionStage> all = Source.fromPublisher(pub).grouped(100).runWith(Sink.>head(), materializer); - final List result = Await.result(all, Duration.apply(200, TimeUnit.MILLISECONDS)); + final List result = all.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); assertEquals(new HashSet(Arrays.asList("a", "b", "c", "d", "e", "f")), new HashSet(result)); } @@ -259,19 +260,16 @@ public class FlowGraphTest extends StreamTest { } }); - final Future future = RunnableGraph.fromGraph(GraphDSL.create(Sink.head(), - new Function2>, SinkShape, ClosedShape>() { - @Override - public ClosedShape apply(Builder> b, SinkShape out) throws Exception { + final CompletionStage future = RunnableGraph.fromGraph(GraphDSL.create(Sink.head(), + (b, out) -> { final FanInShape2 zip = b.add(sumZip); b.from(b.add(in1)).toInlet(zip.in0()); b.from(b.add(in2)).toInlet(zip.in1()); b.from(zip.out()).to(out); return ClosedShape.getInstance(); - } })).run(materializer); - final Integer result = Await.result(future, Duration.create(300, TimeUnit.MILLISECONDS)); + final Integer result = future.toCompletableFuture().get(300, TimeUnit.MILLISECONDS); assertEquals(11, (int) result); } @@ -289,11 +287,8 @@ public class FlowGraphTest extends StreamTest { } }); - final Future future = RunnableGraph.fromGraph( - GraphDSL.create(Sink.head(), - new Function2>, SinkShape, ClosedShape>() { - @Override - public ClosedShape apply(Builder> b, SinkShape out) throws Exception { + final CompletionStage future = RunnableGraph.fromGraph( + GraphDSL.create(Sink.head(), (b, out) -> { final FanInShape4 zip = b.add(sumZip); b.from(b.add(in1)).toInlet(zip.in0()); b.from(b.add(in2)).toInlet(zip.in1()); @@ -301,10 +296,9 @@ public class FlowGraphTest extends StreamTest { b.from(b.add(in4)).toInlet(zip.in3()); b.from(zip.out()).to(out); return ClosedShape.getInstance(); - } })).run(materializer); - final Integer result = Await.result(future, Duration.create(300, TimeUnit.MILLISECONDS)); + final Integer result = future.toCompletableFuture().get(300, TimeUnit.MILLISECONDS); assertEquals(1111, (int) result); } @@ -314,21 +308,14 @@ public class FlowGraphTest extends StreamTest { final Source in1 = Source.single(1); final TestProbe probe = TestProbe.apply(system); - final Future future = RunnableGraph.fromGraph( - GraphDSL.create(Sink. head(), new Function2>, SinkShape, ClosedShape>() { - @Override - public ClosedShape apply(Builder> b, SinkShape out) throws Exception { + final CompletionStage future = RunnableGraph.fromGraph( + GraphDSL.create(Sink. head(), (b, out) -> { b.from(b.add(Source.single(1))).to(out); - b.from(b.materializedValue()).to(b.add(Sink.foreach(new Procedure>(){ - public void apply(Future mat) throws Exception { - Patterns.pipe(mat, system.dispatcher()).to(probe.ref()); - } - }))); + b.from(b.materializedValue()).to(b.add(Sink.foreach(mat -> PatternsCS.pipe(mat, system.dispatcher()).to(probe.ref())))); return ClosedShape.getInstance(); - } })).run(materializer); - final Integer result = Await.result(future, Duration.create(300, TimeUnit.MILLISECONDS)); + final Integer result = future.toCompletableFuture().get(300, TimeUnit.MILLISECONDS); assertEquals(1, (int) result); probe.expectMsg(1); diff --git a/akka-stream-tests/src/test/java/akka/stream/javadsl/FlowTest.java b/akka-stream-tests/src/test/java/akka/stream/javadsl/FlowTest.java index 324caee47d..3955fa0b1d 100644 --- a/akka-stream-tests/src/test/java/akka/stream/javadsl/FlowTest.java +++ b/akka-stream-tests/src/test/java/akka/stream/javadsl/FlowTest.java @@ -28,6 +28,9 @@ import scala.concurrent.duration.Duration; import scala.concurrent.duration.FiniteDuration; import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.stream.Stream; @@ -50,8 +53,8 @@ public class FlowTest extends StreamTest { final JavaTestKit probe = new JavaTestKit(system); final String[] lookup = { "a", "b", "c", "d", "e", "f" }; final java.lang.Iterable input = Arrays.asList(0, 1, 2, 3, 4, 5); - final Source ints = Source.from(input); - final Flow flow1 = Flow.of(Integer.class).drop(2).take(3 + final Source ints = Source.from(input); + final Flow flow1 = Flow.of(Integer.class).drop(2).take(3 ).takeWithin(FiniteDuration.create(10, TimeUnit.SECONDS )).map(new Function() { public String apply(Integer elem) { @@ -62,7 +65,7 @@ public class FlowTest extends StreamTest { return !elem.equals("c"); } }); - final Flow flow2 = Flow.of(String.class).grouped(2 + final Flow flow2 = Flow.of(String.class).grouped(2 ).mapConcat(new Function, java.lang.Iterable>() { public java.util.List apply(java.util.List elem) { return elem; @@ -74,16 +77,9 @@ public class FlowTest extends StreamTest { } }); - ints.via(flow1.via(flow2)).runFold("", new Function2() { - public String apply(String acc, String elem) { - return acc + elem; - } - }, materializer - ).foreach(new Foreach() { // Scala Future - public void each(String elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }, system.dispatcher()); + ints.via(flow1.via(flow2)) + .runFold("", (acc, elem) -> acc + elem, materializer) + .thenAccept(elem -> probe.getRef().tell(elem, ActorRef.noSender())); probe.expectMsgEquals("de"); } @@ -91,36 +87,26 @@ public class FlowTest extends StreamTest { @Test public void mustBeAbleToUseDropWhile() throws Exception { final JavaTestKit probe = new JavaTestKit(system); - final Source source = Source.from(Arrays.asList(0, 1, 2, 3)); - final Flow flow = Flow.of(Integer.class).dropWhile - (new Predicate() { - public boolean test(Integer elem) { - return elem < 2; - } - }); + final Source source = Source.from(Arrays.asList(0, 1, 2, 3)); + final Flow flow = + Flow.of(Integer.class).dropWhile(elem -> elem < 2); - final Future future = source.via(flow).runWith(Sink.foreach(new Procedure() { // Scala Future - public void apply(Integer elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }), materializer); + final CompletionStage future = + source.via(flow).runWith(Sink.foreach(elem -> probe.getRef().tell(elem, ActorRef.noSender())), materializer); probe.expectMsgEquals(2); probe.expectMsgEquals(3); - Await.ready(future, Duration.apply(200, TimeUnit.MILLISECONDS)); + future.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); } @Test public void mustBeAbleToUseIntersperse() throws Exception { final JavaTestKit probe = new JavaTestKit(system); - final Source source = Source.from(Arrays.asList("0", "1", "2", "3")); - final Flow flow = Flow.of(String.class).intersperse("[", ",", "]"); + final Source source = Source.from(Arrays.asList("0", "1", "2", "3")); + final Flow flow = Flow.of(String.class).intersperse("[", ",", "]"); - final Future future = source.via(flow).runWith(Sink.foreach(new Procedure() { // Scala Future - public void apply(String elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }), materializer); + final CompletionStage future = + source.via(flow).runWith(Sink.foreach(elem -> probe.getRef().tell(elem, ActorRef.noSender())), materializer); probe.expectMsgEquals("["); probe.expectMsgEquals("0"); @@ -131,20 +117,17 @@ public class FlowTest extends StreamTest { probe.expectMsgEquals(","); probe.expectMsgEquals("3"); probe.expectMsgEquals("]"); - Await.ready(future, Duration.apply(200, TimeUnit.MILLISECONDS)); + future.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); } @Test public void mustBeAbleToUseIntersperseAndConcat() throws Exception { final JavaTestKit probe = new JavaTestKit(system); - final Source source = Source.from(Arrays.asList("0", "1", "2", "3")); - final Flow flow = Flow.of(String.class).intersperse(","); + final Source source = Source.from(Arrays.asList("0", "1", "2", "3")); + final Flow flow = Flow.of(String.class).intersperse(","); - final Future future = Source.single(">> ").concat(source.via(flow)).runWith(Sink.foreach(new Procedure() { // Scala Future - public void apply(String elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }), materializer); + final CompletionStage future = + Source.single(">> ").concat(source.via(flow)).runWith(Sink.foreach(elem -> probe.getRef().tell(elem, ActorRef.noSender())), materializer); probe.expectMsgEquals(">> "); probe.expectMsgEquals("0"); @@ -154,25 +137,22 @@ public class FlowTest extends StreamTest { probe.expectMsgEquals("2"); probe.expectMsgEquals(","); probe.expectMsgEquals("3"); - Await.ready(future, Duration.apply(200, TimeUnit.MILLISECONDS)); + future.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); } @Test public void mustBeAbleToUseTakeWhile() throws Exception { final JavaTestKit probe = new JavaTestKit(system); - final Source source = Source.from(Arrays.asList(0, 1, 2, 3)); - final Flow flow = Flow.of(Integer.class).takeWhile + final Source source = Source.from(Arrays.asList(0, 1, 2, 3)); + final Flow flow = Flow.of(Integer.class).takeWhile (new Predicate() { public boolean test(Integer elem) { return elem < 2; } }); - final Future future = source.via(flow).runWith(Sink.foreach(new Procedure() { // Scala Future - public void apply(Integer elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }), materializer); + final CompletionStage future = + source.via(flow).runWith(Sink.foreach(elem -> probe.getRef().tell(elem, ActorRef.noSender())), materializer); probe.expectMsgEquals(0); probe.expectMsgEquals(1); @@ -180,7 +160,7 @@ public class FlowTest extends StreamTest { FiniteDuration duration = Duration.apply(200, TimeUnit.MILLISECONDS); probe.expectNoMsg(duration); - Await.ready(future, duration); + future.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); } @@ -189,7 +169,7 @@ public class FlowTest extends StreamTest { final JavaTestKit probe = new JavaTestKit(system); final Iterable input = Arrays.asList(0, 1, 2, 3, 4, 5, 6, 7); // duplicate each element, stop after 4 elements, and emit sum to the end - final Flow flow = Flow.of(Integer.class).transform(new Creator>() { + final Flow flow = Flow.of(Integer.class).transform(new Creator>() { @Override public PushPullStage create() throws Exception { return new StatefulStage() { @@ -252,9 +232,9 @@ public class FlowTest extends StreamTest { .grouped(10) .mergeSubstreams(); - final Future>> future = + final CompletionStage>> future = Source.from(input).via(flow).grouped(10).runWith(Sink.>> head(), materializer); - final Object[] result = Await.result(future, Duration.create(1, TimeUnit.SECONDS)).toArray(); + final Object[] result = future.toCompletableFuture().get(1, TimeUnit.SECONDS).toArray(); Arrays.sort(result, (Comparator)(Object) new Comparator>() { @Override public int compare(List o1, List o2) { @@ -278,9 +258,9 @@ public class FlowTest extends StreamTest { .grouped(10) .concatSubstreams(); - final Future>> future = + final CompletionStage>> future = Source.from(input).via(flow).grouped(10).runWith(Sink.>> head(), materializer); - final List> result = Await.result(future, Duration.create(1, TimeUnit.SECONDS)); + final List> result = future.toCompletableFuture().get(1, TimeUnit.SECONDS); assertEquals(Arrays.asList(Arrays.asList("A", "B", "C"), Arrays.asList(".", "D"), Arrays.asList(".", "E", "F")), result); } @@ -298,9 +278,9 @@ public class FlowTest extends StreamTest { .grouped(10) .concatSubstreams(); - final Future>> future = + final CompletionStage>> future = Source.from(input).via(flow).grouped(10).runWith(Sink.>> head(), materializer); - final List> result = Await.result(future, Duration.create(1, TimeUnit.SECONDS)); + final List> result = future.toCompletableFuture().get(1, TimeUnit.SECONDS); assertEquals(Arrays.asList(Arrays.asList("A", "B", "C", "."), Arrays.asList("D", "."), Arrays.asList("E", "F")), result); } @@ -352,9 +332,9 @@ public class FlowTest extends StreamTest { // collecting final Publisher pub = source.runWith(publisher, materializer); - final Future> all = Source.fromPublisher(pub).grouped(100).runWith(Sink.>head(), materializer); + final CompletionStage> all = Source.fromPublisher(pub).grouped(100).runWith(Sink.>head(), materializer); - final List result = Await.result(all, Duration.apply(200, TimeUnit.MILLISECONDS)); + final List result = all.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); assertEquals(new HashSet(Arrays.asList("a", "b", "c", "d", "e", "f")), new HashSet(result)); } @@ -396,9 +376,9 @@ public class FlowTest extends StreamTest { final Iterable input1 = Arrays.asList("A", "B", "C"); final Iterable input2 = Arrays.asList("D", "E", "F"); - final Source in1 = Source.from(input1); - final Source in2 = Source.from(input2); - final Flow flow = Flow.of(String.class); + final Source in1 = Source.from(input1); + final Source in2 = Source.from(input2); + final Flow flow = Flow.of(String.class); in1.via(flow.concat(in2)).runForeach(new Procedure() { public void apply(String elem) { probe.getRef().tell(elem, ActorRef.noSender()); @@ -415,9 +395,9 @@ public class FlowTest extends StreamTest { final Iterable input1 = Arrays.asList("A", "B", "C"); final Iterable input2 = Arrays.asList("D", "E", "F"); - final Source in1 = Source.from(input1); - final Source in2 = Source.from(input2); - final Flow flow = Flow.of(String.class); + final Source in1 = Source.from(input1); + final Source in2 = Source.from(input2); + final Flow flow = Flow.of(String.class); in2.via(flow.prepend(in1)).runForeach(new Procedure() { public void apply(String elem) { probe.getRef().tell(elem, ActorRef.noSender()); @@ -432,15 +412,14 @@ public class FlowTest extends StreamTest { public void mustBeAbleToUsePrefixAndTail() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final Iterable input = Arrays.asList(1, 2, 3, 4, 5, 6); - final Flow, Source>, ?> flow = Flow.of(Integer.class).prefixAndTail(3); - Future, Source>> future = + final Flow, Source>, NotUsed> flow = Flow.of(Integer.class).prefixAndTail(3); + CompletionStage, Source>> future = Source.from(input).via(flow).runWith(Sink., Source>>head(), materializer); - Pair, Source> result = Await.result(future, - probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + Pair, Source> result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(Arrays.asList(1, 2, 3), result.first()); - Future> tailFuture = result.second().grouped(4).runWith(Sink.>head(), materializer); - List tailResult = Await.result(tailFuture, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + CompletionStage> tailFuture = result.second().grouped(4).runWith(Sink.>head(), materializer); + List tailResult = tailFuture.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(Arrays.asList(4, 5, 6), tailResult); } @@ -454,12 +433,12 @@ public class FlowTest extends StreamTest { mainInputs.add(Source.from(input1)); mainInputs.add(Source.from(input2)); - final Flow, List, ?> flow = Flow.>create(). + final Flow, List, NotUsed> flow = Flow.>create(). flatMapConcat(ConstantFun.>javaIdentityFunction()).grouped(6); - Future> future = Source.from(mainInputs).via(flow) + CompletionStage> future = Source.from(mainInputs).via(flow) .runWith(Sink.>head(), materializer); - List result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + List result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(Arrays.asList(1, 2, 3, 4, 5), result); } @@ -478,12 +457,12 @@ public class FlowTest extends StreamTest { mainInputs.add(Source.from(input3)); mainInputs.add(Source.from(input4)); - final Flow, List, ?> flow = Flow.>create(). + final Flow, List, NotUsed> flow = Flow.>create(). flatMapMerge(3, ConstantFun.>javaIdentityFunction()).grouped(60); - Future> future = Source.from(mainInputs).via(flow) + CompletionStage> future = Source.from(mainInputs).via(flow) .runWith(Sink.>head(), materializer); - List result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + List result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); final Set set = new HashSet(); for (Integer i: result) { set.add(i); @@ -501,10 +480,10 @@ public class FlowTest extends StreamTest { final JavaTestKit probe = new JavaTestKit(system); final List input = Arrays.asList("A", "B", "C"); final Flow, NotUsed> flow = Flow.of(String.class).buffer(2, OverflowStrategy.backpressure()).grouped(4); - Future> future = Source.from(input).via(flow) + final CompletionStage> future = Source.from(input).via(flow) .runWith(Sink.>head(), materializer); - List result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + List result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(input, result); } @@ -523,13 +502,8 @@ public class FlowTest extends StreamTest { return aggr + in; } }); - Future future = Source.from(input).via(flow).runFold("", new Function2() { - @Override - public String apply(String aggr, String in) throws Exception { - return aggr + in; - } - }, materializer); - String result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + CompletionStage future = Source.from(input).via(flow).runFold("", (aggr, in) -> aggr + in, materializer); + String result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals("ABC", result); } @@ -548,13 +522,8 @@ public class FlowTest extends StreamTest { return aggr + in; } }); - Future future = Source.from(input).via(flow).runFold("", new Function2() { - @Override - public String apply(String aggr, String in) throws Exception { - return aggr + in; - } - }, materializer); - String result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + CompletionStage future = Source.from(input).via(flow).runFold("", (aggr, in) -> aggr + in, materializer); + String result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals("ABC", result); } @@ -578,13 +547,8 @@ public class FlowTest extends StreamTest { return aggr + in; } }); - Future future = Source.from(input).via(flow).runFold("", new Function2() { - @Override - public String apply(String aggr, String in) throws Exception { - return aggr + in; - } - }, materializer); - String result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + CompletionStage future = Source.from(input).via(flow).runFold("", (aggr, in) -> aggr + in, materializer); + String result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals("ABC", result); } @@ -593,9 +557,9 @@ public class FlowTest extends StreamTest { final JavaTestKit probe = new JavaTestKit(system); final List input = Arrays.asList("A", "B", "C"); final Flow flow = Flow.of(String.class).expand(in -> Stream.iterate(in, i -> i).iterator()); - final Sink> sink = Sink.head(); - Future future = Source.from(input).via(flow).runWith(sink, materializer); - String result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + final Sink> sink = Sink.head(); + CompletionStage future = Source.from(input).via(flow).runWith(sink, materializer); + String result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals("A", result); } @@ -603,11 +567,7 @@ public class FlowTest extends StreamTest { public void mustBeAbleToUseMapAsync() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final Iterable input = Arrays.asList("a", "b", "c"); - final Flow flow = Flow.of(String.class).mapAsync(4, new Function>() { - public Future apply(String elem) { - return Futures.successful(elem.toUpperCase()); - } - }); + final Flow flow = Flow.of(String.class).mapAsync(4, elem -> CompletableFuture.completedFuture(elem.toUpperCase())); Source.from(input).via(flow).runForeach(new Procedure() { public void apply(String elem) { probe.getRef().tell(elem, ActorRef.noSender()); @@ -623,8 +583,8 @@ public class FlowTest extends StreamTest { final TestPublisher.ManualProbe publisherProbe = TestPublisher.manualProbe(true,system); final JavaTestKit probe = new JavaTestKit(system); - final Source source = Source.fromPublisher(publisherProbe); - final Flow flow = Flow.of(Integer.class).map( + final Source source = Source.fromPublisher(publisherProbe); + final Flow flow = Flow.of(Integer.class).map( new Function() { public Integer apply(Integer elem) { if (elem == 2) throw new RuntimeException("ex"); @@ -638,11 +598,8 @@ public class FlowTest extends StreamTest { } }); - final Future future = source.via(flow).runWith(Sink.foreach(new Procedure() { - public void apply(Integer elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }), materializer); + final CompletionStage future = + source.via(flow).runWith(Sink.foreach(elem -> probe.getRef().tell(elem, ActorRef.noSender())), materializer); final PublisherProbeSubscription s = publisherProbe.expectSubscription(); @@ -652,7 +609,7 @@ public class FlowTest extends StreamTest { probe.expectMsgEquals(1); s.sendNext(2); probe.expectMsgEquals(0); - Await.ready(future, Duration.apply(200, TimeUnit.MILLISECONDS)); + future.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); } @Test @@ -660,9 +617,9 @@ public class FlowTest extends StreamTest { final JavaTestKit probe = new JavaTestKit(system); final List input = Arrays.asList("A", "B", "C"); - Flow otherFlow = Flow.of(String.class); + Flow otherFlow = Flow.of(String.class); - Flow myFlow = Flow.of(String.class).via(otherFlow); + Flow myFlow = Flow.of(String.class).via(otherFlow); Source.from(input).via(myFlow).runWith(Sink.foreach(new Procedure() { // Scala Future public void apply(String elem) { probe.getRef().tell(elem, ActorRef.noSender()); @@ -676,7 +633,7 @@ public class FlowTest extends StreamTest { public void mustBeAbleToMaterializeIdentityToJavaSink() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final List input = Arrays.asList("A", "B", "C"); - Flow otherFlow = Flow.of(String.class); + Flow otherFlow = Flow.of(String.class); Sink sink = Flow.of(String.class).to(otherFlow.to(Sink.foreach(new Procedure() { // Scala Future public void apply(String elem) { @@ -767,54 +724,54 @@ public class FlowTest extends StreamTest { } @Test - public void mustBeAbleToUseInitialTimeout() throws Exception { + public void mustBeAbleToUseInitialTimeout() throws Throwable { try { - Await.result( - Source.maybe() - .via(Flow.of(Integer.class).initialTimeout(Duration.create(1, "second"))) - .runWith(Sink.head(), materializer), - Duration.create(3, "second") - ); - fail("A TimeoutException was expected"); - } catch(TimeoutException e) { + try { + Source. maybe().via(Flow.of(Integer.class).initialTimeout(Duration.create(1, "second"))) + .runWith(Sink. head(), materializer).toCompletableFuture().get(3, TimeUnit.SECONDS); + fail("A TimeoutException was expected"); + } catch (ExecutionException e) { + throw e.getCause(); + } + } catch (TimeoutException e) { // expected } } @Test - public void mustBeAbleToUseCompletionTimeout() throws Exception { + public void mustBeAbleToUseCompletionTimeout() throws Throwable { try { - Await.result( - Source.maybe() - .via(Flow.of(Integer.class).completionTimeout(Duration.create(1, "second"))) - .runWith(Sink.head(), materializer), - Duration.create(3, "second") - ); - fail("A TimeoutException was expected"); - } catch(TimeoutException e) { + try { + Source. maybe().via(Flow.of(Integer.class).completionTimeout(Duration.create(1, "second"))) + .runWith(Sink. head(), materializer).toCompletableFuture().get(3, TimeUnit.SECONDS); + fail("A TimeoutException was expected"); + } catch (ExecutionException e) { + throw e.getCause(); + } + } catch (TimeoutException e) { // expected } } @Test - public void mustBeAbleToUseIdleTimeout() throws Exception { + public void mustBeAbleToUseIdleTimeout() throws Throwable { try { - Await.result( - Source.maybe() - .via(Flow.of(Integer.class).idleTimeout(Duration.create(1, "second"))) - .runWith(Sink.head(), materializer), - Duration.create(3, "second") - ); - fail("A TimeoutException was expected"); - } catch(TimeoutException e) { + try { + Source. maybe().via(Flow.of(Integer.class).idleTimeout(Duration.create(1, "second"))) + .runWith(Sink. head(), materializer).toCompletableFuture().get(3, TimeUnit.SECONDS); + fail("A TimeoutException was expected"); + } catch (ExecutionException e) { + throw e.getCause(); + } + } catch (TimeoutException e) { // expected } } @Test public void mustBeAbleToUseKeepAlive() throws Exception { - Integer result = Await.result( + Integer result = Source.maybe() .via(Flow.of(Integer.class) .keepAlive(Duration.create(1, "second"), new Creator() { @@ -824,9 +781,8 @@ public class FlowTest extends StreamTest { }) ) .takeWithin(Duration.create(1500, "milliseconds")) - .runWith(Sink.head(), materializer), - Duration.create(3, "second") - ); + .runWith(Sink.head(), materializer) + .toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals((Object) 0, result); } diff --git a/akka-stream-tests/src/test/java/akka/stream/javadsl/SinkTest.java b/akka-stream-tests/src/test/java/akka/stream/javadsl/SinkTest.java index c10a9b0201..a9e941a96f 100644 --- a/akka-stream-tests/src/test/java/akka/stream/javadsl/SinkTest.java +++ b/akka-stream-tests/src/test/java/akka/stream/javadsl/SinkTest.java @@ -8,6 +8,8 @@ import java.util.Arrays; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; import akka.NotUsed; import akka.japi.function.Function; @@ -40,21 +42,17 @@ public class SinkTest extends StreamTest { @Test public void mustBeAbleToUseFuture() throws Exception { - final Sink> futSink = Sink.head(); + final Sink> futSink = Sink.head(); final List list = Collections.singletonList(1); - final Future future = Source.from(list).runWith(futSink, materializer); - assert Await.result(future, Duration.create("1 second")).equals(1); + final CompletionStage future = Source.from(list).runWith(futSink, materializer); + assert future.toCompletableFuture().get(1, TimeUnit.SECONDS).equals(1); } @Test public void mustBeAbleToUseFold() throws Exception { - Sink> foldSink = Sink.fold(0, new Function2() { - @Override public Integer apply(Integer arg1, Integer arg2) throws Exception { - return arg1 + arg2; - } - }); + Sink> foldSink = Sink.fold(0, (arg1, arg2) -> arg1 + arg2); @SuppressWarnings("unused") - Future integerFuture = Source.from(new ArrayList()).runWith(foldSink, materializer); + CompletionStage integerFuture = Source.from(new ArrayList()).runWith(foldSink, materializer); } @Test @@ -97,7 +95,7 @@ public class SinkTest extends StreamTest { public void mustSuitablyOverrideAttributeHandlingMethods() { @SuppressWarnings("unused") - final Sink> s = + final Sink> s = Sink. head().withAttributes(Attributes.name("")).addAttributes(Attributes.asyncBoundary()).named(""); } } diff --git a/akka-stream-tests/src/test/java/akka/stream/javadsl/SourceTest.java b/akka-stream-tests/src/test/java/akka/stream/javadsl/SourceTest.java index c14f854a8f..0fa0952f22 100644 --- a/akka-stream-tests/src/test/java/akka/stream/javadsl/SourceTest.java +++ b/akka-stream-tests/src/test/java/akka/stream/javadsl/SourceTest.java @@ -13,6 +13,7 @@ import akka.dispatch.OnSuccess; import akka.japi.JavaPartialFunction; import akka.japi.Pair; import akka.japi.function.*; +import akka.japi.pf.PFBuilder; import akka.stream.*; import akka.stream.impl.ConstantFun; import akka.stream.stage.*; @@ -29,6 +30,9 @@ import scala.concurrent.duration.FiniteDuration; import scala.util.Try; import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.stream.Stream; @@ -54,32 +58,18 @@ public class SourceTest extends StreamTest { final java.lang.Iterable input = Arrays.asList(0, 1, 2, 3, 4, 5); final Source ints = Source.from(input); - ints.drop(2).take(3).takeWithin(FiniteDuration.create(10, TimeUnit.SECONDS)).map(new Function() { - public String apply(Integer elem) { - return lookup[elem]; - } - }).filter(new Predicate() { - public boolean test(String elem) { - return !elem.equals("c"); - } - }).grouped(2).mapConcat(new Function, java.util.List>() { - public java.util.List apply(java.util.List elem) { - return elem; - } - }).groupedWithin(100, FiniteDuration.create(50, TimeUnit.MILLISECONDS)) - .mapConcat(new Function, java.util.List>() { - public java.util.List apply(java.util.List elem) { - return elem; - } - }).runFold("", new Function2() { - public String apply(String acc, String elem) { - return acc + elem; - } - }, materializer).foreach(new Foreach() { // Scala Future - public void each(String elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }, system.dispatcher()); + ints + .drop(2) + .take(3) + .takeWithin(FiniteDuration.create(10, TimeUnit.SECONDS)) + .map(elem -> lookup[elem]) + .filter(elem -> !elem.equals("c")) + .grouped(2) + .mapConcat(elem -> elem) + .groupedWithin(100, FiniteDuration.create(50, TimeUnit.MILLISECONDS)) + .mapConcat(elem -> elem) + .runFold("", (acc, elem) -> acc + elem, materializer) + .thenAccept(elem -> probe.getRef().tell(elem, ActorRef.noSender())); probe.expectMsgEquals("de"); } @@ -88,20 +78,11 @@ public class SourceTest extends StreamTest { public void mustBeAbleToUseVoidTypeInForeach() { final JavaTestKit probe = new JavaTestKit(system); final java.lang.Iterable input = Arrays.asList("a", "b", "c"); - Source ints = Source.from(input); + Source ints = Source.from(input); - Future completion = ints.runForeach(new Procedure() { - public void apply(String elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }, materializer); + final CompletionStage completion = ints.runForeach(elem -> probe.getRef().tell(elem, ActorRef.noSender()), materializer); - completion.onSuccess(new OnSuccess() { - @Override - public void onSuccess(Done elem) throws Throwable { - probe.getRef().tell(String.valueOf(elem), ActorRef.noSender()); - } - }, system.dispatcher()); + completion.thenAccept(elem -> probe.getRef().tell(String.valueOf(elem), ActorRef.noSender())); probe.expectMsgEquals("a"); probe.expectMsgEquals("b"); @@ -176,9 +157,9 @@ public class SourceTest extends StreamTest { .grouped(10) .mergeSubstreams(); - final Future>> future = + final CompletionStage>> future = source.grouped(10).runWith(Sink.>> head(), materializer); - final Object[] result = Await.result(future, Duration.create(1, TimeUnit.SECONDS)).toArray(); + final Object[] result = future.toCompletableFuture().get(1, TimeUnit.SECONDS).toArray(); Arrays.sort(result, (Comparator)(Object) new Comparator>() { @Override public int compare(List o1, List o2) { @@ -202,9 +183,9 @@ public class SourceTest extends StreamTest { .grouped(10) .concatSubstreams(); - final Future>> future = + final CompletionStage>> future = source.grouped(10).runWith(Sink.>> head(), materializer); - final List> result = Await.result(future, Duration.create(1, TimeUnit.SECONDS)); + final List> result = future.toCompletableFuture().get(1, TimeUnit.SECONDS); assertEquals(Arrays.asList(Arrays.asList("A", "B", "C"), Arrays.asList(".", "D"), Arrays.asList(".", "E", "F")), result); } @@ -222,9 +203,9 @@ public class SourceTest extends StreamTest { .grouped(10) .concatSubstreams(); - final Future>> future = + final CompletionStage>> future = source.grouped(10).runWith(Sink.>> head(), materializer); - final List> result = Await.result(future, Duration.create(1, TimeUnit.SECONDS)); + final List> result = future.toCompletableFuture().get(1, TimeUnit.SECONDS); assertEquals(Arrays.asList(Arrays.asList("A", "B", "C", "."), Arrays.asList("D", "."), Arrays.asList("E", "F")), result); } @@ -235,8 +216,8 @@ public class SourceTest extends StreamTest { final Iterable input1 = Arrays.asList("A", "B", "C"); final Iterable input2 = Arrays.asList("D", "E", "F"); - final Source in1 = Source.from(input1); - final Source in2 = Source.from(input2); + final Source in1 = Source.from(input1); + final Source in2 = Source.from(input2); in1.concat(in2).runForeach(new Procedure() { public void apply(String elem) { @@ -254,8 +235,8 @@ public class SourceTest extends StreamTest { final Iterable input1 = Arrays.asList("A", "B", "C"); final Iterable input2 = Arrays.asList("D", "E", "F"); - final Source in1 = Source.from(input1); - final Source in2 = Source.from(input2); + final Source in1 = Source.from(input1); + final Source in2 = Source.from(input2); in2.prepend(in1).runForeach(new Procedure() { public void apply(String elem) { @@ -308,20 +289,16 @@ public class SourceTest extends StreamTest { final JavaTestKit probe = new JavaTestKit(system); final Iterable input = Arrays.asList("A", "B", "C"); - Source.from(input).map(new Function() { - public String apply(String arg0) throws Exception { - throw new RuntimeException("simulated err"); - } - }).runWith(Sink.head(), materializer).onComplete(new OnSuccess>() { - @Override - public void onSuccess(Try e) throws Throwable { - if (e == null) { + Source.from(input) + . map(in -> { throw new RuntimeException("simulated err"); }) + .runWith(Sink.head(), materializer) + .whenComplete((s, ex) -> { + if (ex == null) { probe.getRef().tell("done", ActorRef.noSender()); } else { - probe.getRef().tell(e.failed().get().getMessage(), ActorRef.noSender()); + probe.getRef().tell(ex.getMessage(), ActorRef.noSender()); } - } - }, system.dispatcher()); + }); probe.expectMsgEquals("simulated err"); } @@ -330,8 +307,8 @@ public class SourceTest extends StreamTest { public void mustBeAbleToUseToFuture() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final Iterable input = Arrays.asList("A", "B", "C"); - Future future = Source.from(input).runWith(Sink.head(), materializer); - String result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + CompletionStage future = Source.from(input).runWith(Sink.head(), materializer); + String result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals("A", result); } @@ -339,14 +316,13 @@ public class SourceTest extends StreamTest { public void mustBeAbleToUsePrefixAndTail() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final Iterable input = Arrays.asList(1, 2, 3, 4, 5, 6); - Future, Source>> future = Source.from(input).prefixAndTail(3) + CompletionStage, Source>> future = Source.from(input).prefixAndTail(3) .runWith(Sink., Source>>head(), materializer); - Pair, Source> result = Await.result(future, - probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + Pair, Source> result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(Arrays.asList(1, 2, 3), result.first()); - Future> tailFuture = result.second().grouped(4).runWith(Sink.>head(), materializer); - List tailResult = Await.result(tailFuture, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + CompletionStage> tailFuture = result.second().grouped(4).runWith(Sink.>head(), materializer); + List tailResult = tailFuture.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(Arrays.asList(4, 5, 6), tailResult); } @@ -360,12 +336,12 @@ public class SourceTest extends StreamTest { mainInputs.add(Source.from(input1)); mainInputs.add(Source.from(input2)); - Future> future = Source.from(mainInputs) + CompletionStage> future = Source.from(mainInputs) .flatMapConcat(ConstantFun.>javaIdentityFunction()) .grouped(6) .runWith(Sink.>head(), materializer); - List result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + List result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(Arrays.asList(1, 2, 3, 4, 5), result); } @@ -384,11 +360,11 @@ public class SourceTest extends StreamTest { mainInputs.add(Source.from(input3)); mainInputs.add(Source.from(input4)); - Future> future = Source.from(mainInputs) + CompletionStage> future = Source.from(mainInputs) .flatMapMerge(3, ConstantFun.>javaIdentityFunction()).grouped(60) .runWith(Sink.>head(), materializer); - List result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + List result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); final Set set = new HashSet(); for (Integer i: result) { set.add(i); @@ -405,10 +381,10 @@ public class SourceTest extends StreamTest { public void mustBeAbleToUseBuffer() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final List input = Arrays.asList("A", "B", "C"); - Future> future = Source.from(input).buffer(2, OverflowStrategy.backpressure()).grouped(4) + final CompletionStage> future = Source.from(input).buffer(2, OverflowStrategy.backpressure()).grouped(4) .runWith(Sink.>head(), materializer); - List result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + List result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(input, result); } @@ -416,23 +392,10 @@ public class SourceTest extends StreamTest { public void mustBeAbleToUseConflate() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final List input = Arrays.asList("A", "B", "C"); - Future future = Source.from(input).conflate(new Function() { - @Override - public String apply(String s) throws Exception { - return s; - } - }, new Function2() { - @Override - public String apply(String aggr, String in) throws Exception { - return aggr + in; - } - }).runFold("", new Function2() { - @Override - public String apply(String aggr, String in) throws Exception { - return aggr + in; - } - }, materializer); - String result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + CompletionStage future = Source.from(input) + .conflate(s -> s, (aggr, in) -> aggr + in) + .runFold("", (aggr, in) -> aggr + in, materializer); + String result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals("ABC", result); } @@ -440,8 +403,8 @@ public class SourceTest extends StreamTest { public void mustBeAbleToUseExpand() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final List input = Arrays.asList("A", "B", "C"); - Future future = Source.from(input).expand(in -> Stream.iterate(in, i -> i).iterator()).runWith(Sink.head(), materializer); - String result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + CompletionStage future = Source.from(input).expand(in -> Stream.iterate(in, i -> i).iterator()).runWith(Sink.head(), materializer); + String result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals("A", result); } @@ -468,15 +431,9 @@ public class SourceTest extends StreamTest { public void mustBeAbleToUseMapFuture() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final Iterable input = Arrays.asList("a", "b", "c"); - Source.from(input).mapAsync(4, new Function>() { - public Future apply(String elem) { - return Futures.successful(elem.toUpperCase()); - } - }).runForeach(new Procedure() { - public void apply(String elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }, materializer); + Source.from(input) + .mapAsync(4, elem -> CompletableFuture.completedFuture(elem.toUpperCase())) + .runForeach(elem -> probe.getRef().tell(elem, ActorRef.noSender()), materializer); probe.expectMsgEquals("A"); probe.expectMsgEquals("B"); probe.expectMsgEquals("C"); @@ -486,16 +443,16 @@ public class SourceTest extends StreamTest { public void mustWorkFromFuture() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final Iterable input = Arrays.asList("A", "B", "C"); - Future future1 = Source.from(input).runWith(Sink.head(), materializer); - Future future2 = Source.fromFuture(future1).runWith(Sink.head(), materializer); - String result = Await.result(future2, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + CompletionStage future1 = Source.from(input).runWith(Sink.head(), materializer); + CompletionStage future2 = Source.fromCompletionStage(future1).runWith(Sink.head(), materializer); + String result = future2.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals("A", result); } @Test public void mustWorkFromRange() throws Exception { - Future> f = Source.range(0, 10).grouped(20).runWith(Sink.> head(), materializer); - final List result = Await.result(f, FiniteDuration.create(3, TimeUnit.SECONDS)); + CompletionStage> f = Source.range(0, 10).grouped(20).runWith(Sink.> head(), materializer); + final List result = f.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(11, result.size()); Integer counter = 0; for (Integer i: result) @@ -504,8 +461,8 @@ public class SourceTest extends StreamTest { @Test public void mustWorkFromRangeWithStep() throws Exception { - Future> f = Source.range(0, 10, 2).grouped(20).runWith(Sink.> head(), materializer); - final List result = Await.result(f, FiniteDuration.create(3, TimeUnit.SECONDS)); + CompletionStage> f = Source.range(0, 10, 2).grouped(20).runWith(Sink.> head(), materializer); + final List result = f.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(6, result.size()); Integer counter = 0; for (Integer i: result) { @@ -516,8 +473,8 @@ public class SourceTest extends StreamTest { @Test public void mustRepeat() throws Exception { - final Future> f = Source.repeat(42).grouped(10000).runWith(Sink.> head(), materializer); - final List result = Await.result(f, FiniteDuration.create(3, TimeUnit.SECONDS)); + final CompletionStage> f = Source.repeat(42).grouped(10000).runWith(Sink.> head(), materializer); + final List result = f.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(result.size(), 10000); for (Integer i: result) assertEquals(i, (Integer) 42); } @@ -540,39 +497,31 @@ public class SourceTest extends StreamTest { @Test public void mustBeAbleToUseDropWhile() throws Exception { final JavaTestKit probe = new JavaTestKit(system); - final Source source = Source.from(Arrays.asList(0, 1, 2, 3)).dropWhile + final Source source = Source.from(Arrays.asList(0, 1, 2, 3)).dropWhile (new Predicate() { public boolean test(Integer elem) { return elem < 2; } }); - final Future future = source.runWith(Sink.foreach(new Procedure() { // Scala Future - public void apply(Integer elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }), materializer); + final CompletionStage future = source.runWith(Sink.foreach(elem -> probe.getRef().tell(elem, ActorRef.noSender())), materializer); probe.expectMsgEquals(2); probe.expectMsgEquals(3); - Await.ready(future, Duration.apply(200, TimeUnit.MILLISECONDS)); + future.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); } @Test public void mustBeAbleToUseTakeWhile() throws Exception { final JavaTestKit probe = new JavaTestKit(system); - final Source source = Source.from(Arrays.asList(0, 1, 2, 3)).takeWhile + final Source source = Source.from(Arrays.asList(0, 1, 2, 3)).takeWhile (new Predicate() { public boolean test(Integer elem) { return elem < 2; } }); - final Future future = source.runWith(Sink.foreach(new Procedure() { // Scala Future - public void apply(Integer elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }), materializer); + final CompletionStage future = source.runWith(Sink.foreach(elem -> probe.getRef().tell(elem, ActorRef.noSender())), materializer); probe.expectMsgEquals(0); probe.expectMsgEquals(1); @@ -580,7 +529,7 @@ public class SourceTest extends StreamTest { FiniteDuration duration = Duration.apply(200, TimeUnit.MILLISECONDS); probe.expectNoMsg(duration); - Await.ready(future, duration); + future.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); } @Test @@ -588,56 +537,41 @@ public class SourceTest extends StreamTest { final ManualProbe publisherProbe = TestPublisher.manualProbe(true,system); final JavaTestKit probe = new JavaTestKit(system); - final Source source = Source.fromPublisher(publisherProbe).map( - new Function() { - public Integer apply(Integer elem) { + final Source source = + Source.fromPublisher(publisherProbe) + .map(elem -> { if (elem == 1) throw new RuntimeException("ex"); else return elem; - } - }) - .recover(new JavaPartialFunction() { - public Integer apply(Throwable elem, boolean isCheck) { - if (isCheck) return null; - return 0; - } - }); + }) + .recover(new PFBuilder() + .matchAny(ex -> 0) + .build()); - final Future future = source.runWith(Sink.foreach(new Procedure() { - public void apply(Integer elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }), materializer); + final CompletionStage future = source.runWith(Sink.foreach(elem -> probe.getRef().tell(elem, ActorRef.noSender())), materializer); final PublisherProbeSubscription s = publisherProbe.expectSubscription(); s.sendNext(0); probe.expectMsgEquals(0); s.sendNext(1); probe.expectMsgEquals(0); - Await.ready(future, Duration.apply(200, TimeUnit.MILLISECONDS)); + future.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); } @Test public void mustBeAbleToCombine() throws Exception { final JavaTestKit probe = new JavaTestKit(system); - final Source source1 = Source.from(Arrays.asList(0, 1)); - final Source source2 = Source.from(Arrays.asList(2, 3)); + final Source source1 = Source.from(Arrays.asList(0, 1)); + final Source source2 = Source.from(Arrays.asList(2, 3)); - final Source source = Source.combine(source1, source2, new ArrayList>(), - new Function, NotUsed>>() { - public Graph, NotUsed> apply(Integer elem) { - return Merge.create(elem); - } - }); + final Source source = Source.combine( + source1, source2, new ArrayList>(), + width -> Merge. create(width)); - final Future future = source.runWith(Sink.foreach(new Procedure() { // Scala Future - public void apply(Integer elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }), materializer); + final CompletionStage future = source.runWith(Sink.foreach(elem -> probe.getRef().tell(elem, ActorRef.noSender())), materializer); probe.expectMsgAllOf(0, 1, 2, 3); - Await.ready(future, Duration.apply(200, TimeUnit.MILLISECONDS)); + future.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); } @Test @@ -710,48 +644,53 @@ public class SourceTest extends StreamTest { @Test - public void mustBeAbleToUseInitialTimeout() throws Exception { + public void mustBeAbleToUseInitialTimeout() throws Throwable { try { - Await.result( - Source.maybe().initialTimeout(Duration.create(1, "second")).runWith(Sink.head(), materializer), - Duration.create(3, "second") - ); - fail("A TimeoutException was expected"); - } catch(TimeoutException e) { - // expected - } - } - - - @Test - public void mustBeAbleToUseCompletionTimeout() throws Exception { - try { - Await.result( - Source.maybe().completionTimeout(Duration.create(1, "second")).runWith(Sink.head(), materializer), - Duration.create(3, "second") - ); - fail("A TimeoutException was expected"); - } catch(TimeoutException e) { + try { + Source.maybe().initialTimeout(Duration.create(1, "second")).runWith(Sink.head(), materializer) + .toCompletableFuture().get(3, TimeUnit.SECONDS); + fail("A TimeoutException was expected"); + } catch (ExecutionException e) { + throw e.getCause(); + } + } catch (TimeoutException e) { // expected } } @Test - public void mustBeAbleToUseIdleTimeout() throws Exception { + public void mustBeAbleToUseCompletionTimeout() throws Throwable { try { - Await.result( - Source.maybe().idleTimeout(Duration.create(1, "second")).runWith(Sink.head(), materializer), - Duration.create(3, "second") - ); - fail("A TimeoutException was expected"); - } catch(TimeoutException e) { + try { + Source.maybe().completionTimeout(Duration.create(1, "second")).runWith(Sink.head(), materializer) + .toCompletableFuture().get(3, TimeUnit.SECONDS); + fail("A TimeoutException was expected"); + } catch (ExecutionException e) { + throw e.getCause(); + } + } catch (TimeoutException e) { + // expected + } + } + + @Test + public void mustBeAbleToUseIdleTimeout() throws Throwable { + try { + try { + Source.maybe().idleTimeout(Duration.create(1, "second")).runWith(Sink.head(), materializer) + .toCompletableFuture().get(3, TimeUnit.SECONDS); + fail("A TimeoutException was expected"); + } catch (ExecutionException e) { + throw e.getCause(); + } + } catch (TimeoutException e) { // expected } } @Test public void mustBeAbleToUseIdleInject() throws Exception { - Integer result = Await.result( + Integer result = Source.maybe() .keepAlive(Duration.create(1, "second"), new Creator() { public Integer create() { @@ -759,9 +698,8 @@ public class SourceTest extends StreamTest { } }) .takeWithin(Duration.create(1500, "milliseconds")) - .runWith(Sink.head(), materializer), - Duration.create(3, "second") - ); + .runWith(Sink.head(), materializer) + .toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals((Object) 0, result); } diff --git a/akka-stream-tests/src/test/java/akka/stream/javadsl/TcpTest.java b/akka-stream-tests/src/test/java/akka/stream/javadsl/TcpTest.java index deade7145f..328f98c1ff 100644 --- a/akka-stream-tests/src/test/java/akka/stream/javadsl/TcpTest.java +++ b/akka-stream-tests/src/test/java/akka/stream/javadsl/TcpTest.java @@ -8,6 +8,8 @@ import static org.junit.Assert.assertTrue; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.List; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.net.BindException; @@ -37,7 +39,7 @@ public class TcpTest extends StreamTest { public static AkkaJUnitActorSystemResource actorSystemResource = new AkkaJUnitActorSystemResource("TcpTest", AkkaSpec.testConf()); - final Sink> echoHandler = + final Sink> echoHandler = Sink.foreach(new Procedure() { public void apply(IncomingConnection conn) { conn.handleWith(Flow.of(ByteString.class), materializer); @@ -54,17 +56,16 @@ public class TcpTest extends StreamTest { @Test public void mustWorkInHappyCase() throws Exception { final InetSocketAddress serverAddress = TestUtils.temporaryServerAddress("127.0.0.1", false); - final Source> binding = Tcp.get(system) + final Source> binding = Tcp.get(system) .bind(serverAddress.getHostName(), serverAddress.getPort()); // TODO getHostString in Java7 - final Future future = binding.to(echoHandler).run(materializer); - final ServerBinding b = Await.result(future, FiniteDuration.create(5, TimeUnit.SECONDS)); + final CompletionStage future = binding.to(echoHandler).run(materializer); + final ServerBinding b = future.toCompletableFuture().get(5, TimeUnit.SECONDS); assertEquals(b.localAddress().getPort(), serverAddress.getPort()); - final Future resultFuture = Source + final CompletionStage resultFuture = Source .from(testInput) - // TODO getHostString in Java7 - .via(Tcp.get(system).outgoingConnection(serverAddress.getHostName(), serverAddress.getPort())) + .via(Tcp.get(system).outgoingConnection(serverAddress.getHostString(), serverAddress.getPort())) .runFold(ByteString.empty(), new Function2() { public ByteString apply(ByteString acc, ByteString elem) { @@ -72,7 +73,7 @@ public class TcpTest extends StreamTest { } }, materializer); - final byte[] result = Await.result(resultFuture, FiniteDuration.create(5, TimeUnit.SECONDS)).toArray(); + final byte[] result = resultFuture.toCompletableFuture().get(5, TimeUnit.SECONDS).toArray(); for (int i = 0; i < testInput.size(); i ++) { assertEquals(testInput.get(i).head(), result[i]); } @@ -81,11 +82,11 @@ public class TcpTest extends StreamTest { @Test public void mustReportServerBindFailure() throws Exception { final InetSocketAddress serverAddress = TestUtils.temporaryServerAddress("127.0.0.1", false); - final Source> binding = Tcp.get(system) + final Source> binding = Tcp.get(system) .bind(serverAddress.getHostName(), serverAddress.getPort()); // TODO getHostString in Java7 - final Future future = binding.to(echoHandler).run(materializer); - final ServerBinding b = Await.result(future, FiniteDuration.create(5, TimeUnit.SECONDS)); + final CompletionStage future = binding.to(echoHandler).run(materializer); + final ServerBinding b = future.toCompletableFuture().get(5, TimeUnit.SECONDS); assertEquals(b.localAddress().getPort(), serverAddress.getPort()); new JavaTestKit(system) {{ @@ -93,9 +94,11 @@ public class TcpTest extends StreamTest { @Override protected Void run() { try { - Await.result(binding.to(echoHandler).run(materializer), FiniteDuration.create(5, TimeUnit.SECONDS)); + binding.to(echoHandler).run(materializer).toCompletableFuture().get(5, TimeUnit.SECONDS); assertTrue("Expected BindFailedException, but nothing was reported", false); - } catch (BindFailedException e) { + } catch (ExecutionException e) { + if (e.getCause() instanceof BindFailedException) {} // all good + else throw new AssertionError("failed", e); // expected } catch (Exception e) { throw new AssertionError("failed", e); @@ -107,19 +110,19 @@ public class TcpTest extends StreamTest { } @Test - public void mustReportClientConnectFailure() throws Exception { + public void mustReportClientConnectFailure() throws Throwable { final InetSocketAddress serverAddress = TestUtils.temporaryServerAddress( "127.0.0.1", false); try { - Await.result( - Source.from(testInput) - // TODO getHostString in Java7 - .viaMat(Tcp.get(system).outgoingConnection(serverAddress.getHostName(), serverAddress.getPort()), - Keep.> right()) - .to(Sink. ignore()) - .run(materializer), - FiniteDuration.create(5, TimeUnit.SECONDS)); - assertTrue("Expected StreamTcpException, but nothing was reported", false); + try { + Source.from(testInput) + .viaMat(Tcp.get(system).outgoingConnection(serverAddress.getHostString(), serverAddress.getPort()), + Keep.right()) + .to(Sink. ignore()).run(materializer).toCompletableFuture().get(5, TimeUnit.SECONDS); + assertTrue("Expected StreamTcpException, but nothing was reported", false); + } catch (ExecutionException e) { + throw e.getCause(); + } } catch (StreamTcpException e) { // expected } diff --git a/akka-stream-tests/src/test/java/akka/stream/stage/StageTest.java b/akka-stream-tests/src/test/java/akka/stream/stage/StageTest.java index ad05c98c85..51edf8e43b 100644 --- a/akka-stream-tests/src/test/java/akka/stream/stage/StageTest.java +++ b/akka-stream-tests/src/test/java/akka/stream/stage/StageTest.java @@ -3,6 +3,7 @@ */ package akka.stream.stage; +import akka.NotUsed; import akka.stream.StreamTest; import akka.stream.javadsl.AkkaJUnitActorSystemResource; import akka.stream.javadsl.Sink; @@ -19,6 +20,8 @@ import scala.concurrent.duration.Duration; import java.util.Arrays; import java.util.List; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; public class StageTest extends StreamTest { public StageTest() { @@ -32,16 +35,16 @@ public class StageTest extends StreamTest { @Test public void javaStageUsage() throws Exception { final java.lang.Iterable input = Arrays.asList(0, 1, 2, 3, 4, 5); - final Source ints = Source.from(input); + final Source ints = Source.from(input); final JavaIdentityStage identity = new JavaIdentityStage(); - final Future> result = + final CompletionStage> result = ints .via(identity) .via(identity) .grouped(1000) .runWith(Sink.>head(), materializer); - assertEquals(Arrays.asList(0, 1, 2, 3, 4, 5), Await.result(result, Duration.create(3, "seconds"))); + assertEquals(Arrays.asList(0, 1, 2, 3, 4, 5), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); } } diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/QueueSourceSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/QueueSourceSpec.scala index be9605a319..4ef72e9b83 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/QueueSourceSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/QueueSourceSpec.scala @@ -11,9 +11,9 @@ import akka.stream.stage.OutHandler import akka.stream.testkit.Utils._ import akka.stream.testkit.{ AkkaSpec, TestSubscriber } import akka.testkit.TestProbe - import scala.concurrent.duration._ import scala.concurrent.{ Future, _ } +import akka.Done class QueueSourceSpec extends AkkaSpec { implicit val materializer = ActorMaterializer() @@ -70,7 +70,7 @@ class QueueSourceSpec extends AkkaSpec { expectNoMsg(pause) sub.cancel() - expectMsg(()) + expectMsg(Done) } "buffer when needed" in { @@ -123,7 +123,7 @@ class QueueSourceSpec extends AkkaSpec { sub.cancel() - expectMsgAllOf(QueueOfferResult.QueueClosed, ()) + expectMsgAllOf(QueueOfferResult.QueueClosed, Done) } "fail stream on buffer overflow in fail mode" in assertAllStagesStopped { @@ -218,7 +218,7 @@ class QueueSourceSpec extends AkkaSpec { val sub = s.expectSubscription queue.watchCompletion().pipeTo(testActor) sub.cancel() - expectMsg(()) + expectMsg(Done) queue.offer(1).onFailure { case e ⇒ e.isInstanceOf[IllegalStateException] should ===(true) } } diff --git a/akka-stream/src/main/scala/akka/stream/QueueOfferResult.scala b/akka-stream/src/main/scala/akka/stream/QueueOfferResult.scala new file mode 100644 index 0000000000..26addfe1ed --- /dev/null +++ b/akka-stream/src/main/scala/akka/stream/QueueOfferResult.scala @@ -0,0 +1,33 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ +package akka.stream + +sealed abstract class QueueOfferResult + +/** + * Contains types that is used as return types for async callbacks to streams + */ +object QueueOfferResult { + + /** + * Type is used to indicate that stream is successfully enqueued an element + */ + final case object Enqueued extends QueueOfferResult + + /** + * Type is used to indicate that stream is dropped an element + */ + final case object Dropped extends QueueOfferResult + + /** + * Type is used to indicate that stream is failed before or during call to the stream + * @param cause - exception that stream failed with + */ + final case class Failure(cause: Throwable) extends QueueOfferResult + + /** + * Type is used to indicate that stream is completed before call + */ + case object QueueClosed extends QueueOfferResult +} diff --git a/akka-stream/src/main/scala/akka/stream/impl/Sinks.scala b/akka-stream/src/main/scala/akka/stream/impl/Sinks.scala index 739df1def1..148a34b08b 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/Sinks.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/Sinks.scala @@ -4,7 +4,6 @@ package akka.stream.impl import java.util.concurrent.atomic.AtomicReference - import akka.{ Done, NotUsed } import akka.actor.{ ActorRef, Props } import akka.stream.Attributes.InputBuffer @@ -12,11 +11,15 @@ import akka.stream._ import akka.stream.impl.StreamLayout.Module import akka.stream.stage._ import org.reactivestreams.{ Publisher, Subscriber } - import scala.annotation.unchecked.uncheckedVariance import scala.concurrent.{ Future, Promise } import scala.language.postfixOps import scala.util.{ Failure, Success, Try } +import akka.stream.scaladsl.SinkQueue +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters._ +import scala.compat.java8.OptionConverters._ +import java.util.Optional /** * INTERNAL API @@ -316,3 +319,8 @@ private[akka] class QueueSink[T]() extends GraphStageWithMaterializedValue[SinkS }) } } + +private[akka] final class SinkQueueAdapter[T](delegate: SinkQueue[T]) extends akka.stream.javadsl.SinkQueue[T] { + import akka.dispatch.ExecutionContexts.{ sameThreadExecutionContext ⇒ same } + def pull(): CompletionStage[Optional[T]] = delegate.pull().map(_.asJava)(same).toJava +} diff --git a/akka-stream/src/main/scala/akka/stream/impl/Sources.scala b/akka-stream/src/main/scala/akka/stream/impl/Sources.scala index e3eda50bca..0ee9c516a7 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/Sources.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/Sources.scala @@ -7,6 +7,10 @@ import akka.stream.OverflowStrategies._ import akka.stream._ import akka.stream.stage._ import scala.concurrent.{ Future, Promise } +import akka.stream.scaladsl.SourceQueue +import akka.Done +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters._ /** * INTERNAL API @@ -16,7 +20,7 @@ private[akka] class QueueSource[T](maxBuffer: Int, overflowStrategy: OverflowStr val out = Outlet[T]("queueSource.out") override val shape: SourceShape[T] = SourceShape.of(out) - val completion = Promise[Unit] + val completion = Promise[Done] override def createLogicAndMaterializedValue(inheritedAttributes: Attributes) = { val stageLogic = new GraphStageLogic(shape) with CallbackWrapper[(T, Offered)] { @@ -88,7 +92,7 @@ private[akka] class QueueSource[T](maxBuffer: Int, overflowStrategy: OverflowStr pendingOffer = None case None ⇒ // do nothing } - completion.success(()) + completion.success(Done) completeStage() } @@ -125,3 +129,7 @@ private[akka] class QueueSource[T](maxBuffer: Int, overflowStrategy: OverflowStr } } +private[akka] final class SourceQueueAdapter[T](delegate: SourceQueue[T]) extends akka.stream.javadsl.SourceQueue[T] { + def offer(elem: T): CompletionStage[QueueOfferResult] = delegate.offer(elem).toJava + def watchCompletion(): CompletionStage[Done] = delegate.watchCompletion().toJava +} diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/BidiFlow.scala b/akka-stream/src/main/scala/akka/stream/javadsl/BidiFlow.scala index 768f8ab199..a6380a5831 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/BidiFlow.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/BidiFlow.scala @@ -6,7 +6,6 @@ package akka.stream.javadsl import akka.NotUsed import akka.japi.function import akka.stream._ -import akka.stream.scaladsl.Flow import scala.concurrent.duration.FiniteDuration diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/FileIO.scala b/akka-stream/src/main/scala/akka/stream/javadsl/FileIO.scala index a927cc2ae0..872412a74e 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/FileIO.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/FileIO.scala @@ -4,13 +4,11 @@ package akka.stream.javadsl import java.io.{ InputStream, OutputStream, File } - import akka.japi.function import akka.stream.{ scaladsl, javadsl, ActorAttributes } import akka.stream.io.IOResult import akka.util.ByteString - -import scala.concurrent.Future +import java.util.concurrent.CompletionStage /** * Factories to create sinks and sources from files @@ -22,7 +20,7 @@ object FileIO { * Overwrites existing files, if you want to append to an existing file use [[#file(File, Boolean)]] and * pass in `true` as the Boolean argument. * - * Materializes a [[Future]] of [[IOResult]] that will be completed with the size of the file (in bytes) at the streams completion, + * Materializes a [[java.util.concurrent.CompletionStage]] of [[IOResult]] that will be completed with the size of the file (in bytes) at the streams completion, * and a possible exception if IO operation was not completed successfully. * * You can configure the default dispatcher for this Source by changing the `akka.stream.blocking-io-dispatcher` or @@ -30,13 +28,13 @@ object FileIO { * * @param f The file to write to */ - def toFile(f: File): javadsl.Sink[ByteString, Future[IOResult]] = toFile(f, append = false) + def toFile(f: File): javadsl.Sink[ByteString, CompletionStage[IOResult]] = toFile(f, append = false) /** * Creates a Sink that writes incoming [[ByteString]] elements to the given file and either overwrites * or appends to it. * - * Materializes a [[Future]] of [[IOResult]] that will be completed with the size of the file (in bytes) at the streams completion, + * Materializes a [[java.util.concurrent.CompletionStage]] of [[IOResult]] that will be completed with the size of the file (in bytes) at the streams completion, * and a possible exception if IO operation was not completed successfully. * * You can configure the default dispatcher for this Source by changing the `akka.stream.blocking-io-dispatcher` or @@ -45,8 +43,8 @@ object FileIO { * @param f The file to write to * @param append Whether or not the file should be overwritten or appended to */ - def toFile(f: File, append: Boolean): javadsl.Sink[ByteString, Future[IOResult]] = - new Sink(scaladsl.FileIO.toFile(f, append)).asInstanceOf[javadsl.Sink[ByteString, Future[IOResult]]] + def toFile(f: File, append: Boolean): javadsl.Sink[ByteString, CompletionStage[IOResult]] = + new Sink(scaladsl.FileIO.toFile(f, append).toCompletionStage()) /** * Creates a Source from a Files contents. @@ -56,10 +54,10 @@ object FileIO { * You can configure the default dispatcher for this Source by changing the `akka.stream.blocking-io-dispatcher` or * set it for a given Source by using [[ActorAttributes]]. * - * It materializes a [[Future]] of [[IOResult]] containing the number of bytes read from the source file upon completion, + * It materializes a [[java.util.concurrent.CompletionStage]] of [[IOResult]] containing the number of bytes read from the source file upon completion, * and a possible exception if IO operation was not completed successfully. */ - def fromFile(f: File): javadsl.Source[ByteString, Future[IOResult]] = fromFile(f, 8192) + def fromFile(f: File): javadsl.Source[ByteString, CompletionStage[IOResult]] = fromFile(f, 8192) /** * Creates a synchronous (Java 6 compatible) Source from a Files contents. @@ -69,10 +67,10 @@ object FileIO { * You can configure the default dispatcher for this Source by changing the `akka.stream.blocking-io-dispatcher` or * set it for a given Source by using [[ActorAttributes]]. * - * It materializes a [[Future]] of [[IOResult]] containing the number of bytes read from the source file upon completion, + * It materializes a [[java.util.concurrent.CompletionStage]] of [[IOResult]] containing the number of bytes read from the source file upon completion, * and a possible exception if IO operation was not completed successfully. */ - def fromFile(f: File, chunkSize: Int): javadsl.Source[ByteString, Future[IOResult]] = - new Source(scaladsl.FileIO.fromFile(f, chunkSize)).asInstanceOf[Source[ByteString, Future[IOResult]]] + def fromFile(f: File, chunkSize: Int): javadsl.Source[ByteString, CompletionStage[IOResult]] = + new Source(scaladsl.FileIO.fromFile(f, chunkSize).toCompletionStage()) } diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/Flow.scala b/akka-stream/src/main/scala/akka/stream/javadsl/Flow.scala index 739094c194..c46fbfc49f 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/Flow.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/Flow.scala @@ -13,10 +13,11 @@ import akka.stream.stage.Stage import org.reactivestreams.Processor import scala.annotation.unchecked.uncheckedVariance import scala.collection.immutable -import scala.concurrent.Future import scala.concurrent.duration.FiniteDuration import akka.japi.Util import java.util.Comparator +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters._ object Flow { @@ -322,22 +323,22 @@ final class Flow[-In, +Out, +Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends /** * Transform this stream by applying the given function to each of the elements - * as they pass through this processing step. The function returns a `Future` and the + * as they pass through this processing step. The function returns a `CompletionStage` and the * value of that future will be emitted downstreams. As many futures as requested elements by * downstream may run in parallel and may complete in any order, but the elements that * are emitted downstream are in the same order as received from upstream. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#stop]] * the stream will be completed with failure. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#resume]] or * [[akka.stream.Supervision#restart]] the element is dropped and the stream continues. * * The function `f` is always invoked on the elements in the order they arrive. * - * '''Emits when''' the Future returned by the provided function finishes for the next element in sequence + * '''Emits when''' the CompletionStage returned by the provided function finishes for the next element in sequence * * '''Backpressures when''' the number of futures reaches the configured parallelism and the downstream * backpressures or the first future is not completed @@ -348,29 +349,29 @@ final class Flow[-In, +Out, +Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends * * @see [[#mapAsyncUnordered]] */ - def mapAsync[T](parallelism: Int, f: function.Function[Out, Future[T]]): javadsl.Flow[In, T, Mat] = - new Flow(delegate.mapAsync(parallelism)(f.apply)) + def mapAsync[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): javadsl.Flow[In, T, Mat] = + new Flow(delegate.mapAsync(parallelism)(x => f(x).toScala)) /** * Transform this stream by applying the given function to each of the elements - * as they pass through this processing step. The function returns a `Future` and the + * as they pass through this processing step. The function returns a `CompletionStage` and the * value of that future will be emitted downstreams. As many futures as requested elements by * downstream may run in parallel and each processed element will be emitted downstream * as soon as it is ready, i.e. it is possible that the elements are not emitted downstream * in the same order as received from upstream. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#stop]] * the stream will be completed with failure. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#resume]] or * [[akka.stream.Supervision#restart]] the element is dropped and the stream continues. * * The function `f` is always invoked on the elements in the order they arrive (even though the result of the futures * returned by `f` might be emitted in a different order). * - * '''Emits when''' any of the Futures returned by the provided function complete + * '''Emits when''' any of the CompletionStages returned by the provided function complete * * '''Backpressures when''' the number of futures reaches the configured parallelism and the downstream backpressures * @@ -380,8 +381,8 @@ final class Flow[-In, +Out, +Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends * * @see [[#mapAsync]] */ - def mapAsyncUnordered[T](parallelism: Int, f: function.Function[Out, Future[T]]): javadsl.Flow[In, T, Mat] = - new Flow(delegate.mapAsyncUnordered(parallelism)(f.apply)) + def mapAsyncUnordered[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): javadsl.Flow[In, T, Mat] = + new Flow(delegate.mapAsyncUnordered(parallelism)(x => f(x).toScala)) /** * Only pass on those elements that satisfy the given predicate. @@ -1386,7 +1387,7 @@ final class Flow[-In, +Out, +Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends def zipMat[T, M, M2](that: Graph[SourceShape[T], M], matF: function.Function2[Mat, M, M2]): javadsl.Flow[In, Out @uncheckedVariance Pair T, M2] = this.viaMat(Flow.fromGraph(GraphDSL.create(that, - new function.Function2[GraphDSL.Builder[M], SourceShape[T], FlowShape[Out, Out @ uncheckedVariance Pair T]] { + new function.Function2[GraphDSL.Builder[M], SourceShape[T], FlowShape[Out, Out @uncheckedVariance Pair T]] { def apply(b: GraphDSL.Builder[M], s: SourceShape[T]): FlowShape[Out, Out @uncheckedVariance Pair T] = { val zip: FanInShape2[Out, T, Out Pair T] = b.add(Zip.create[Out, T]) b.from(s).toInlet(zip.in1) diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/Queue.scala b/akka-stream/src/main/scala/akka/stream/javadsl/Queue.scala new file mode 100644 index 0000000000..25493d3db7 --- /dev/null +++ b/akka-stream/src/main/scala/akka/stream/javadsl/Queue.scala @@ -0,0 +1,48 @@ +/** + * Copyright (C) 2015 Typesafe Inc. + */ +package akka.stream.javadsl + +import akka.Done +import java.util.concurrent.CompletionStage +import java.util.Optional +import akka.stream.QueueOfferResult + +/** + * This trait allows to have the queue as a data source for some stream. + */ +trait SourceQueue[T] { + + /** + * Method offers next element to a stream and returns future that: + * - completes with `Enqueued` if element is consumed by a stream + * - completes with `Dropped` when stream dropped offered element + * - completes with `QueueClosed` when stream is completed during future is active + * - completes with `Failure(f)` when failure to enqueue element from upstream + * - fails when stream is completed or you cannot call offer in this moment because of implementation rules + * (like for backpressure mode and full buffer you need to wait for last offer call Future completion) + * + * @param elem element to send to a stream + */ + def offer(elem: T): CompletionStage[QueueOfferResult] + + /** + * Method returns future that completes when stream is completed and fails when stream failed + */ + def watchCompletion(): CompletionStage[Done] +} + +/** + * Trait allows to have the queue as a sink for some stream. + * "SinkQueue" pulls data from stream with backpressure mechanism. + */ +trait SinkQueue[T] { + + /** + * Method pulls elements from stream and returns future that: + * - fails if stream is failed + * - completes with None in case if stream is completed + * - completes with `Some(element)` in case next element is available from stream. + */ + def pull(): CompletionStage[Optional[T]] +} diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/Sink.scala b/akka-stream/src/main/scala/akka/stream/javadsl/Sink.scala index b7854e0253..b3cfbf7c6d 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/Sink.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/Sink.scala @@ -4,7 +4,6 @@ package akka.stream.javadsl import java.util.Optional - import akka.{ Done, NotUsed } import akka.actor.{ ActorRef, Props } import akka.dispatch.ExecutionContexts @@ -12,32 +11,34 @@ import akka.japi.function import akka.stream.impl.StreamLayout import akka.stream.{ javadsl, scaladsl, _ } import org.reactivestreams.{ Publisher, Subscriber } - import scala.compat.java8.OptionConverters._ -import scala.concurrent.{ ExecutionContext, Future } +import scala.concurrent.ExecutionContext import scala.util.Try +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters.FutureOps +import akka.stream.impl.SinkQueueAdapter /** Java API */ object Sink { /** * A `Sink` that will invoke the given function for every received element, giving it its previous * output (or the given `zero` value) and the element as input. - * The returned [[scala.concurrent.Future]] will be completed with value of the final + * The returned [[java.util.concurrent.CompletionStage]] will be completed with value of the final * function evaluation when the input stream ends, or completed with `Failure` * if there is a failure is signaled in the stream. */ - def fold[U, In](zero: U, f: function.Function2[U, In, U]): javadsl.Sink[In, Future[U]] = - new Sink(scaladsl.Sink.fold[U, In](zero)(f.apply)) + def fold[U, In](zero: U, f: function.Function2[U, In, U]): javadsl.Sink[In, CompletionStage[U]] = + new Sink(scaladsl.Sink.fold[U, In](zero)(f.apply).toCompletionStage()) /** * A `Sink` that will invoke the given function for every received element, giving it its previous * output (from the second element) and the element as input. - * The returned [[scala.concurrent.Future]] will be completed with value of the final + * The returned [[java.util.concurrent.CompletionStage]] will be completed with value of the final * function evaluation when the input stream ends, or completed with `Failure` * if there is a failure signaled in the stream. */ - def reduce[In](f: function.Function2[In, In, In]): Sink[In, Future[In]] = - new Sink(scaladsl.Sink.reduce[In](f.apply)) + def reduce[In](f: function.Function2[In, In, In]): Sink[In, CompletionStage[In]] = + new Sink(scaladsl.Sink.reduce[In](f.apply).toCompletionStage()) /** * Helper to create [[Sink]] from `Subscriber`. @@ -54,8 +55,8 @@ object Sink { /** * A `Sink` that will consume the stream and discard the elements. */ - def ignore[T](): Sink[T, Future[Done]] = - new Sink(scaladsl.Sink.ignore) + def ignore[T](): Sink[T, CompletionStage[Done]] = + new Sink(scaladsl.Sink.ignore.toCompletionStage()) /** * A `Sink` that materializes into a [[org.reactivestreams.Publisher]]. @@ -73,26 +74,26 @@ object Sink { /** * A `Sink` that will invoke the given procedure for each received element. The sink is materialized - * into a [[scala.concurrent.Future]] will be completed with `Success` when reaching the + * into a [[java.util.concurrent.CompletionStage]] will be completed with `Success` when reaching the * normal end of the stream, or completed with `Failure` if there is a failure is signaled in * the stream.. */ - def foreach[T](f: function.Procedure[T]): Sink[T, Future[Done]] = - new Sink(scaladsl.Sink.foreach(f.apply)) + def foreach[T](f: function.Procedure[T]): Sink[T, CompletionStage[Done]] = + new Sink(scaladsl.Sink.foreach(f.apply).toCompletionStage()) /** * A `Sink` that will invoke the given procedure for each received element in parallel. The sink is materialized - * into a [[scala.concurrent.Future]]. + * into a [[java.util.concurrent.CompletionStage]]. * * If `f` throws an exception and the supervision decision is - * [[akka.stream.Supervision.Stop]] the `Future` will be completed with failure. + * [[akka.stream.Supervision.Stop]] the `CompletionStage` will be completed with failure. * * If `f` throws an exception and the supervision decision is * [[akka.stream.Supervision.Resume]] or [[akka.stream.Supervision.Restart]] the * element is dropped and the stream continues. */ - def foreachParallel[T](parallel: Int)(f: function.Procedure[T])(ec: ExecutionContext): Sink[T, Future[Done]] = - new Sink(scaladsl.Sink.foreachParallel(parallel)(f.apply)(ec)) + def foreachParallel[T](parallel: Int)(f: function.Procedure[T])(ec: ExecutionContext): Sink[T, CompletionStage[Done]] = + new Sink(scaladsl.Sink.foreachParallel(parallel)(f.apply)(ec).toCompletionStage()) /** * A `Sink` that when the flow is completed, either through a failure or normal @@ -103,60 +104,60 @@ object Sink { new Sink(scaladsl.Sink.onComplete[In](x ⇒ callback.apply(x))) /** - * A `Sink` that materializes into a `Future` of the first value received. - * If the stream completes before signaling at least a single element, the Future will be failed with a [[NoSuchElementException]]. - * If the stream signals an error errors before signaling at least a single element, the Future will be failed with the streams exception. + * A `Sink` that materializes into a `CompletionStage` of the first value received. + * If the stream completes before signaling at least a single element, the CompletionStage will be failed with a [[NoSuchElementException]]. + * If the stream signals an error errors before signaling at least a single element, the CompletionStage will be failed with the streams exception. * * See also [[headOption]]. */ - def head[In](): Sink[In, Future[In]] = - new Sink(scaladsl.Sink.head[In]) + def head[In](): Sink[In, CompletionStage[In]] = + new Sink(scaladsl.Sink.head[In].toCompletionStage()) /** - * A `Sink` that materializes into a `Future` of the optional first value received. - * If the stream completes before signaling at least a single element, the value of the Future will be an empty [[java.util.Optional]]. - * If the stream signals an error errors before signaling at least a single element, the Future will be failed with the streams exception. + * A `Sink` that materializes into a `CompletionStage` of the optional first value received. + * If the stream completes before signaling at least a single element, the value of the CompletionStage will be an empty [[java.util.Optional]]. + * If the stream signals an error errors before signaling at least a single element, the CompletionStage will be failed with the streams exception. * * See also [[head]]. */ - def headOption[In](): Sink[In, Future[Optional[In]]] = + def headOption[In](): Sink[In, CompletionStage[Optional[In]]] = new Sink(scaladsl.Sink.headOption[In].mapMaterializedValue( - _.map(_.asJava)(ExecutionContexts.sameThreadExecutionContext))) + _.map(_.asJava)(ExecutionContexts.sameThreadExecutionContext).toJava)) /** - * A `Sink` that materializes into a `Future` of the last value received. - * If the stream completes before signaling at least a single element, the Future will be failed with a [[NoSuchElementException]]. - * If the stream signals an error errors before signaling at least a single element, the Future will be failed with the streams exception. + * A `Sink` that materializes into a `CompletionStage` of the last value received. + * If the stream completes before signaling at least a single element, the CompletionStage will be failed with a [[NoSuchElementException]]. + * If the stream signals an error errors before signaling at least a single element, the CompletionStage will be failed with the streams exception. * * See also [[lastOption]]. */ - def last[In](): Sink[In, Future[In]] = - new Sink(scaladsl.Sink.last[In]) + def last[In](): Sink[In, CompletionStage[In]] = + new Sink(scaladsl.Sink.last[In].toCompletionStage()) /** - * A `Sink` that materializes into a `Future` of the optional last value received. - * If the stream completes before signaling at least a single element, the value of the Future will be an empty [[java.util.Optional]]. - * If the stream signals an error errors before signaling at least a single element, the Future will be failed with the streams exception. + * A `Sink` that materializes into a `CompletionStage` of the optional last value received. + * If the stream completes before signaling at least a single element, the value of the CompletionStage will be an empty [[java.util.Optional]]. + * If the stream signals an error errors before signaling at least a single element, the CompletionStage will be failed with the streams exception. * * See also [[head]]. */ - def lastOption[In](): Sink[In, Future[Optional[In]]] = + def lastOption[In](): Sink[In, CompletionStage[Optional[In]]] = new Sink(scaladsl.Sink.lastOption[In].mapMaterializedValue( - _.map(_.asJava)(ExecutionContexts.sameThreadExecutionContext))) + _.map(_.asJava)(ExecutionContexts.sameThreadExecutionContext).toJava)) /** * A `Sink` that keeps on collecting incoming elements until upstream terminates. * As upstream may be unbounded, `Flow[T].take` or the stricter `Flow[T].limit` (and their variants) * may be used to ensure boundedness. - * Materializes into a `Future` of `Seq[T]` containing all the collected elements. + * Materializes into a `CompletionStage` of `Seq[T]` containing all the collected elements. * `List` is limited to `Integer.MAX_VALUE` elements, this Sink will cancel the stream * after having received that many elements. * * See also [[Flow.limit]], [[Flow.limitWeighted]], [[Flow.take]], [[Flow.takeWithin]], [[Flow.takeWhile]] */ - def seq[In]: Sink[In, Future[java.util.List[In]]] = { + def seq[In]: Sink[In, CompletionStage[java.util.List[In]]] = { import scala.collection.JavaConverters._ - new Sink(scaladsl.Sink.seq[In].mapMaterializedValue(fut ⇒ fut.map(sq ⇒ sq.asJava)(ExecutionContexts.sameThreadExecutionContext))) + new Sink(scaladsl.Sink.seq[In].mapMaterializedValue(fut ⇒ fut.map(sq ⇒ sq.asJava)(ExecutionContexts.sameThreadExecutionContext).toJava)) } /** @@ -224,10 +225,10 @@ object Sink { /** * Creates a `Sink` that is materialized as an [[akka.stream.SinkQueue]]. - * [[akka.stream.SinkQueue.pull]] method is pulling element from the stream and returns ``Future[Option[T]]``. - * `Future` completes when element is available. + * [[akka.stream.SinkQueue.pull]] method is pulling element from the stream and returns ``CompletionStage[Option[T]]``. + * `CompletionStage` completes when element is available. * - * Before calling pull method second time you need to wait until previous Future completes. + * Before calling pull method second time you need to wait until previous CompletionStage completes. * Pull returns Failed future with ''IllegalStateException'' if previous future has not yet completed. * * `Sink` will request at most number of elements equal to size of `inputBuffer` from @@ -240,7 +241,7 @@ object Sink { * @see [[akka.stream.SinkQueue]] */ def queue[T](): Sink[T, SinkQueue[T]] = - new Sink(scaladsl.Sink.queue()) + new Sink(scaladsl.Sink.queue[T]().mapMaterializedValue(new SinkQueueAdapter(_))) } diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/Source.scala b/akka-stream/src/main/scala/akka/stream/javadsl/Source.scala index c19ee74240..504d40e0a4 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/Source.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/Source.scala @@ -6,7 +6,6 @@ package akka.stream.javadsl import java.io.{ OutputStream, InputStream, File } import java.util import java.util.Optional - import akka.{ Done, NotUsed } import akka.actor.{ ActorRef, Cancellable, Props } import akka.event.LoggingAdapter @@ -25,8 +24,11 @@ import scala.collection.immutable.Range.Inclusive import scala.concurrent.duration.FiniteDuration import scala.concurrent.{ Future, Promise } import scala.language.{ higherKinds, implicitConversions } - import scala.compat.java8.OptionConverters._ +import java.util.concurrent.CompletionStage +import java.util.concurrent.CompletableFuture +import scala.compat.java8.FutureConverters._ +import akka.stream.impl.SourceQueueAdapter /** Java API */ object Source { @@ -39,21 +41,21 @@ object Source { def empty[O](): Source[O, NotUsed] = _empty.asInstanceOf[Source[O, NotUsed]] /** - * Create a `Source` which materializes a [[scala.concurrent.Promise]] which controls what element + * Create a `Source` which materializes a [[java.util.concurrent.CompletableFuture]] which controls what element * will be emitted by the Source. - * If the materialized promise is completed with a Some, that value will be produced downstream, + * If the materialized promise is completed with a filled Optional, that value will be produced downstream, * followed by completion. - * If the materialized promise is completed with a None, no value will be produced downstream and completion will + * If the materialized promise is completed with an empty Optional, no value will be produced downstream and completion will * be signalled immediately. * If the materialized promise is completed with a failure, then the returned source will terminate with that error. * If the downstream of this source cancels before the promise has been completed, then the promise will be completed - * with None. + * with an empty Optional. */ - def maybe[T]: Source[T, Promise[Optional[T]]] = { + def maybe[T]: Source[T, CompletableFuture[Optional[T]]] = { new Source(scaladsl.Source.maybe[T].mapMaterializedValue { scalaOptionPromise: Promise[Option[T]] ⇒ - val javaOptionPromise = Promise[Optional[T]]() + val javaOptionPromise = new CompletableFuture[Optional[T]]() scalaOptionPromise.completeWith( - javaOptionPromise.future + javaOptionPromise.toScala .map(_.asScala)(akka.dispatch.ExecutionContexts.sameThreadExecutionContext)) javaOptionPromise @@ -160,6 +162,15 @@ object Source { def fromFuture[O](future: Future[O]): javadsl.Source[O, NotUsed] = new Source(scaladsl.Source.fromFuture(future)) + /** + * Start a new `Source` from the given `CompletionStage`. The stream will consist of + * one element when the `CompletionStage` is completed with a successful value, which + * may happen before or after materializing the `Flow`. + * The stream terminates with a failure if the `CompletionStage` is completed with a failure. + */ + def fromCompletionStage[O](future: CompletionStage[O]): javadsl.Source[O, NotUsed] = + new Source(scaladsl.Source.fromCompletionStage(future)) + /** * Elements are emitted periodically with the specified interval. * The tick element will be delivered to downstream consumers that has requested any elements. @@ -193,10 +204,10 @@ object Source { /** * Same as [[unfold]], but uses an async function to generate the next state-element tuple. */ - def unfoldAsync[S, E](s: S, f: function.Function[S, Future[Optional[(S, E)]]]): Source[E, NotUsed] = + def unfoldAsync[S, E](s: S, f: function.Function[S, CompletionStage[Optional[(S, E)]]]): Source[E, NotUsed] = new Source( scaladsl.Source.unfoldAsync(s)( - (s: S) ⇒ f.apply(s).map(_.asScala)(akka.dispatch.ExecutionContexts.sameThreadExecutionContext))) + (s: S) ⇒ f.apply(s).toScala.map(_.asScala)(akka.dispatch.ExecutionContexts.sameThreadExecutionContext))) /** * Create a `Source` that immediately ends the stream with the `cause` failure to every connected `Sink`. @@ -262,7 +273,7 @@ object Source { /** * Combines several sources with fan-in strategy like `Merge` or `Concat` and returns `Source`. */ - def combine[T, U](first: Source[T, _], second: Source[T, _], rest: java.util.List[Source[T, _]], + def combine[T, U](first: Source[T, _ <: Any], second: Source[T, _ <: Any], rest: java.util.List[Source[T, _ <: Any]], strategy: function.Function[java.lang.Integer, _ <: Graph[UniformFanInShape[T, U], NotUsed]]): Source[U, NotUsed] = { import scala.collection.JavaConverters._ val seq = if (rest != null) rest.asScala.map(_.asScala) else Seq() @@ -279,12 +290,12 @@ object Source { * there is no space available in the buffer. * * Acknowledgement mechanism is available. - * [[akka.stream.SourceQueue.offer]] returns ``Future[StreamCallbackStatus[Boolean]]`` which completes with `Success(true)` + * [[akka.stream.SourceQueue.offer]] returns `CompletionStage>` which completes with `Success(true)` * if element was added to buffer or sent downstream. It completes with `Success(false)` if element was dropped. Can also complete * with [[akka.stream.StreamCallbackStatus.Failure]] - when stream failed or [[akka.stream.StreamCallbackStatus.StreamCompleted]] * when downstream is completed. * - * The strategy [[akka.stream.OverflowStrategy.backpressure]] will not complete last `offer():Future` + * The strategy [[akka.stream.OverflowStrategy.backpressure]] will not complete last `offer():CompletionStage` * call when buffer is full. * * You can watch accessibility of stream with [[akka.stream.SourceQueue.watchCompletion]]. @@ -299,7 +310,7 @@ object Source { * @param overflowStrategy Strategy that is used when incoming elements cannot fit inside the buffer */ def queue[T](bufferSize: Int, overflowStrategy: OverflowStrategy): Source[T, SourceQueue[T]] = - new Source(scaladsl.Source.queue(bufferSize, overflowStrategy)) + new Source(scaladsl.Source.queue[T](bufferSize, overflowStrategy).mapMaterializedValue(new SourceQueueAdapter(_))) } @@ -456,22 +467,22 @@ final class Source[+Out, +Mat](delegate: scaladsl.Source[Out, Mat]) extends Grap * Shortcut for running this `Source` with a fold function. * The given function is invoked for every received element, giving it its previous * output (or the given `zero` value) and the element as input. - * The returned [[scala.concurrent.Future]] will be completed with value of the final + * The returned [[java.util.concurrent.CompletionStage]] will be completed with value of the final * function evaluation when the input stream ends, or completed with `Failure` * if there is a failure is signaled in the stream. */ - def runFold[U](zero: U, f: function.Function2[U, Out, U], materializer: Materializer): Future[U] = + def runFold[U](zero: U, f: function.Function2[U, Out, U], materializer: Materializer): CompletionStage[U] = runWith(Sink.fold(zero, f), materializer) /** * Shortcut for running this `Source` with a reduce function. * The given function is invoked for every received element, giving it its previous * output (from the second ones) an the element as input. - * The returned [[scala.concurrent.Future]] will be completed with value of the final + * The returned [[java.util.concurrent.CompletionStage]] will be completed with value of the final * function evaluation when the input stream ends, or completed with `Failure` * if there is a failure is signaled in the stream. */ - def runReduce[U >: Out](f: function.Function2[U, U, U], materializer: Materializer): Future[U] = + def runReduce[U >: Out](f: function.Function2[U, U, U], materializer: Materializer): CompletionStage[U] = runWith(Sink.reduce(f), materializer) /** @@ -724,11 +735,11 @@ final class Source[+Out, +Mat](delegate: scaladsl.Source[Out, Mat]) extends Grap /** * Shortcut for running this `Source` with a foreach procedure. The given procedure is invoked * for each received element. - * The returned [[scala.concurrent.Future]] will be completed with `Success` when reaching the - * normal end of the stream, or completed with `Failure` if there is a failure is signaled in + * The returned [[java.util.concurrent.CompletionStage]] will be completed normally when reaching the + * normal end of the stream, or completed exceptionally if there is a failure is signaled in * the stream. */ - def runForeach(f: function.Procedure[Out], materializer: Materializer): Future[Done] = + def runForeach(f: function.Procedure[Out], materializer: Materializer): CompletionStage[Done] = runWith(Sink.foreach(f), materializer) // COMMON OPS // @@ -790,66 +801,66 @@ final class Source[+Out, +Mat](delegate: scaladsl.Source[Out, Mat]) extends Grap /** * Transform this stream by applying the given function to each of the elements - * as they pass through this processing step. The function returns a `Future` and the - * value of that future will be emitted downstreams. As many futures as requested elements by + * as they pass through this processing step. The function returns a `CompletionStage` and the + * value of that future will be emitted downstreams. As many CompletionStages as requested elements by * downstream may run in parallel and may complete in any order, but the elements that * are emitted downstream are in the same order as received from upstream. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#stop]] * the stream will be completed with failure. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#resume]] or * [[akka.stream.Supervision#restart]] the element is dropped and the stream continues. * * The function `f` is always invoked on the elements in the order they arrive. * - * '''Emits when''' the Future returned by the provided function finishes for the next element in sequence + * '''Emits when''' the CompletionStage returned by the provided function finishes for the next element in sequence * - * '''Backpressures when''' the number of futures reaches the configured parallelism and the downstream - * backpressures or the first future is not completed + * '''Backpressures when''' the number of CompletionStages reaches the configured parallelism and the downstream + * backpressures or the first CompletionStage is not completed * - * '''Completes when''' upstream completes and all futures has been completed and all elements has been emitted + * '''Completes when''' upstream completes and all CompletionStages has been completed and all elements has been emitted * * '''Cancels when''' downstream cancels * * @see [[#mapAsyncUnordered]] */ - def mapAsync[T](parallelism: Int, f: function.Function[Out, Future[T]]): javadsl.Source[T, Mat] = - new Source(delegate.mapAsync(parallelism)(f.apply)) + def mapAsync[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): javadsl.Source[T, Mat] = + new Source(delegate.mapAsync(parallelism)(x => f(x).toScala)) /** * Transform this stream by applying the given function to each of the elements - * as they pass through this processing step. The function returns a `Future` and the - * value of that future will be emitted downstreams. As many futures as requested elements by + * as they pass through this processing step. The function returns a `CompletionStage` and the + * value of that future will be emitted downstreams. As many CompletionStages as requested elements by * downstream may run in parallel and each processed element will be emitted downstream * as soon as it is ready, i.e. it is possible that the elements are not emitted downstream * in the same order as received from upstream. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#stop]] * the stream will be completed with failure. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#resume]] or * [[akka.stream.Supervision#restart]] the element is dropped and the stream continues. * - * The function `f` is always invoked on the elements in the order they arrive (even though the result of the futures + * The function `f` is always invoked on the elements in the order they arrive (even though the result of the CompletionStages * returned by `f` might be emitted in a different order). * - * '''Emits when''' any of the Futures returned by the provided function complete + * '''Emits when''' any of the CompletionStages returned by the provided function complete * - * '''Backpressures when''' the number of futures reaches the configured parallelism and the downstream backpressures + * '''Backpressures when''' the number of CompletionStages reaches the configured parallelism and the downstream backpressures * - * '''Completes when''' upstream completes and all futures has been completed and all elements has been emitted + * '''Completes when''' upstream completes and all CompletionStages has been completed and all elements has been emitted * * '''Cancels when''' downstream cancels * * @see [[#mapAsync]] */ - def mapAsyncUnordered[T](parallelism: Int, f: function.Function[Out, Future[T]]): javadsl.Source[T, Mat] = - new Source(delegate.mapAsyncUnordered(parallelism)(f.apply)) + def mapAsyncUnordered[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): javadsl.Source[T, Mat] = + new Source(delegate.mapAsyncUnordered(parallelism)(x => f(x).toScala)) /** * Only pass on those elements that satisfy the given predicate. diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/StreamConverters.scala b/akka-stream/src/main/scala/akka/stream/javadsl/StreamConverters.scala index d75bfca3ec..b890b980b3 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/StreamConverters.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/StreamConverters.scala @@ -4,14 +4,12 @@ package akka.stream.javadsl import java.io.{ InputStream, OutputStream } - import akka.japi.function import akka.stream.{ scaladsl, javadsl, ActorAttributes } import akka.stream.io.IOResult import akka.util.ByteString - -import scala.concurrent.Future import scala.concurrent.duration.FiniteDuration +import java.util.concurrent.CompletionStage /** * Converters for interacting with the blocking `java.io` streams APIs @@ -20,7 +18,7 @@ object StreamConverters { /** * Sink which writes incoming [[ByteString]]s to an [[OutputStream]] created by the given function. * - * Materializes a [[Future]] of [[IOResult]] that will be completed with the size of the file (in bytes) at the streams completion, + * Materializes a [[CompletionStage]] of [[IOResult]] that will be completed with the size of the file (in bytes) at the streams completion, * and a possible exception if IO operation was not completed successfully. * * You can configure the default dispatcher for this Source by changing the `akka.stream.blocking-io-dispatcher` or @@ -28,8 +26,8 @@ object StreamConverters { * * @param f A Creator which creates an OutputStream to write to */ - def fromOutputStream(f: function.Creator[OutputStream]): javadsl.Sink[ByteString, Future[IOResult]] = - new Sink(scaladsl.StreamConverters.fromOutputStream(() ⇒ f.create())).asInstanceOf[javadsl.Sink[ByteString, Future[IOResult]]] + def fromOutputStream(f: function.Creator[OutputStream]): javadsl.Sink[ByteString, CompletionStage[IOResult]] = + new Sink(scaladsl.StreamConverters.fromOutputStream(() ⇒ f.create()).toCompletionStage()) /** * Creates a Sink which when materialized will return an [[java.io.InputStream]] which it is possible @@ -67,10 +65,10 @@ object StreamConverters { * You can configure the default dispatcher for this Source by changing the `akka.stream.blocking-io-dispatcher` or * set it for a given Source by using [[ActorAttributes]]. * - * It materializes a [[Future]] containing the number of bytes read from the source file upon completion. + * It materializes a [[CompletionStage]] containing the number of bytes read from the source file upon completion. */ - def fromInputStream(in: function.Creator[InputStream], chunkSize: Int): javadsl.Source[ByteString, Future[IOResult]] = - new Source(scaladsl.StreamConverters.fromInputStream(() ⇒ in.create(), chunkSize)).asInstanceOf[Source[ByteString, Future[IOResult]]] + def fromInputStream(in: function.Creator[InputStream], chunkSize: Int): javadsl.Source[ByteString, CompletionStage[IOResult]] = + new Source(scaladsl.StreamConverters.fromInputStream(() ⇒ in.create(), chunkSize).toCompletionStage()) /** * Creates a Source from an [[java.io.InputStream]] created by the given function. @@ -80,10 +78,10 @@ object StreamConverters { * You can configure the default dispatcher for this Source by changing the `akka.stream.blocking-io-dispatcher` or * set it for a given Source by using [[ActorAttributes]]. * - * It materializes a [[Future]] of [[IOResult]] containing the number of bytes read from the source file upon completion, + * It materializes a [[CompletionStage]] of [[IOResult]] containing the number of bytes read from the source file upon completion, * and a possible exception if IO operation was not completed successfully. */ - def fromInputStream(in: function.Creator[InputStream]): javadsl.Source[ByteString, Future[IOResult]] = fromInputStream(in, 8192) + def fromInputStream(in: function.Creator[InputStream]): javadsl.Source[ByteString, CompletionStage[IOResult]] = fromInputStream(in, 8192) /** * Creates a Source which when materialized will return an [[java.io.OutputStream]] which it is possible diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/SubFlow.scala b/akka-stream/src/main/scala/akka/stream/javadsl/SubFlow.scala index ca5d32d0ab..ab243019ca 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/SubFlow.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/SubFlow.scala @@ -13,10 +13,11 @@ import akka.stream.stage.Stage import scala.collection.immutable import scala.collection.JavaConverters._ import scala.annotation.unchecked.uncheckedVariance -import scala.concurrent.Future import scala.concurrent.duration.FiniteDuration import akka.japi.Util import java.util.Comparator +import scala.compat.java8.FutureConverters._ +import java.util.concurrent.CompletionStage /** * A “stream of streams” sub-flow of data elements, e.g. produced by `groupBy`. @@ -166,66 +167,66 @@ class SubFlow[-In, +Out, +Mat](delegate: scaladsl.SubFlow[Out, Mat, scaladsl.Flo /** * Transform this stream by applying the given function to each of the elements - * as they pass through this processing step. The function returns a `Future` and the - * value of that future will be emitted downstreams. As many futures as requested elements by + * as they pass through this processing step. The function returns a `CompletionStage` and the + * value of that future will be emitted downstreams. As many CompletionStages as requested elements by * downstream may run in parallel and may complete in any order, but the elements that * are emitted downstream are in the same order as received from upstream. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#stop]] * the stream will be completed with failure. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#resume]] or * [[akka.stream.Supervision#restart]] the element is dropped and the stream continues. * * The function `f` is always invoked on the elements in the order they arrive. * - * '''Emits when''' the Future returned by the provided function finishes for the next element in sequence + * '''Emits when''' the CompletionStage returned by the provided function finishes for the next element in sequence * - * '''Backpressures when''' the number of futures reaches the configured parallelism and the downstream - * backpressures or the first future is not completed + * '''Backpressures when''' the number of CompletionStages reaches the configured parallelism and the downstream + * backpressures or the first CompletionStage is not completed * - * '''Completes when''' upstream completes and all futures has been completed and all elements has been emitted + * '''Completes when''' upstream completes and all CompletionStages has been completed and all elements has been emitted * * '''Cancels when''' downstream cancels * * @see [[#mapAsyncUnordered]] */ - def mapAsync[T](parallelism: Int, f: function.Function[Out, Future[T]]): SubFlow[In, T, Mat] = - new SubFlow(delegate.mapAsync(parallelism)(f.apply)) + def mapAsync[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): SubFlow[In, T, Mat] = + new SubFlow(delegate.mapAsync(parallelism)(x => f(x).toScala)) /** * Transform this stream by applying the given function to each of the elements - * as they pass through this processing step. The function returns a `Future` and the - * value of that future will be emitted downstreams. As many futures as requested elements by + * as they pass through this processing step. The function returns a `CompletionStage` and the + * value of that future will be emitted downstreams. As many CompletionStages as requested elements by * downstream may run in parallel and each processed element will be emitted downstream * as soon as it is ready, i.e. it is possible that the elements are not emitted downstream * in the same order as received from upstream. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#stop]] * the stream will be completed with failure. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#resume]] or * [[akka.stream.Supervision#restart]] the element is dropped and the stream continues. * - * The function `f` is always invoked on the elements in the order they arrive (even though the result of the futures + * The function `f` is always invoked on the elements in the order they arrive (even though the result of the CompletionStages * returned by `f` might be emitted in a different order). * - * '''Emits when''' any of the Futures returned by the provided function complete + * '''Emits when''' any of the CompletionStages returned by the provided function complete * - * '''Backpressures when''' the number of futures reaches the configured parallelism and the downstream backpressures + * '''Backpressures when''' the number of CompletionStages reaches the configured parallelism and the downstream backpressures * - * '''Completes when''' upstream completes and all futures has been completed and all elements has been emitted + * '''Completes when''' upstream completes and all CompletionStages have been completed and all elements has been emitted * * '''Cancels when''' downstream cancels * * @see [[#mapAsync]] */ - def mapAsyncUnordered[T](parallelism: Int, f: function.Function[Out, Future[T]]): SubFlow[In, T, Mat] = - new SubFlow(delegate.mapAsyncUnordered(parallelism)(f.apply)) + def mapAsyncUnordered[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): SubFlow[In, T, Mat] = + new SubFlow(delegate.mapAsyncUnordered(parallelism)(x => f(x).toScala)) /** * Only pass on those elements that satisfy the given predicate. diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/SubSource.scala b/akka-stream/src/main/scala/akka/stream/javadsl/SubSource.scala index 137904da40..e8bed892ba 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/SubSource.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/SubSource.scala @@ -13,10 +13,11 @@ import akka.stream.stage.Stage import scala.collection.immutable import scala.collection.JavaConverters._ import scala.annotation.unchecked.uncheckedVariance -import scala.concurrent.Future import scala.concurrent.duration.FiniteDuration import akka.japi.Util import java.util.Comparator +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters._ /** * A “stream of streams” sub-flow of data elements, e.g. produced by `groupBy`. @@ -162,66 +163,66 @@ class SubSource[+Out, +Mat](delegate: scaladsl.SubFlow[Out, Mat, scaladsl.Source /** * Transform this stream by applying the given function to each of the elements - * as they pass through this processing step. The function returns a `Future` and the - * value of that future will be emitted downstreams. As many futures as requested elements by + * as they pass through this processing step. The function returns a `CompletionStage` and the + * value of that future will be emitted downstreams. As many CompletionStages as requested elements by * downstream may run in parallel and may complete in any order, but the elements that * are emitted downstream are in the same order as received from upstream. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#stop]] * the stream will be completed with failure. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#resume]] or * [[akka.stream.Supervision#restart]] the element is dropped and the stream continues. * * The function `f` is always invoked on the elements in the order they arrive. * - * '''Emits when''' the Future returned by the provided function finishes for the next element in sequence + * '''Emits when''' the CompletionStage returned by the provided function finishes for the next element in sequence * - * '''Backpressures when''' the number of futures reaches the configured parallelism and the downstream - * backpressures or the first future is not completed + * '''Backpressures when''' the number of CompletionStages reaches the configured parallelism and the downstream + * backpressures or the first CompletionStage is not completed * - * '''Completes when''' upstream completes and all futures has been completed and all elements has been emitted + * '''Completes when''' upstream completes and all CompletionStages has been completed and all elements has been emitted * * '''Cancels when''' downstream cancels * * @see [[#mapAsyncUnordered]] */ - def mapAsync[T](parallelism: Int, f: function.Function[Out, Future[T]]): SubSource[T, Mat] = - new SubSource(delegate.mapAsync(parallelism)(f.apply)) + def mapAsync[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): SubSource[T, Mat] = + new SubSource(delegate.mapAsync(parallelism)(x => f(x).toScala)) /** * Transform this stream by applying the given function to each of the elements - * as they pass through this processing step. The function returns a `Future` and the - * value of that future will be emitted downstreams. As many futures as requested elements by + * as they pass through this processing step. The function returns a `CompletionStage` and the + * value of that future will be emitted downstreams. As many CompletionStages as requested elements by * downstream may run in parallel and each processed element will be emitted downstream * as soon as it is ready, i.e. it is possible that the elements are not emitted downstream * in the same order as received from upstream. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#stop]] * the stream will be completed with failure. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#resume]] or * [[akka.stream.Supervision#restart]] the element is dropped and the stream continues. * * The function `f` is always invoked on the elements in the order they arrive (even though the result of the futures * returned by `f` might be emitted in a different order). * - * '''Emits when''' any of the Futures returned by the provided function complete + * '''Emits when''' any of the CompletionStage returned by the provided function complete * - * '''Backpressures when''' the number of futures reaches the configured parallelism and the downstream backpressures + * '''Backpressures when''' the number of CompletionStage reaches the configured parallelism and the downstream backpressures * - * '''Completes when''' upstream completes and all futures has been completed and all elements has been emitted + * '''Completes when''' upstream completes and all CompletionStage has been completed and all elements has been emitted * * '''Cancels when''' downstream cancels * * @see [[#mapAsync]] */ - def mapAsyncUnordered[T](parallelism: Int, f: function.Function[Out, Future[T]]): SubSource[T, Mat] = - new SubSource(delegate.mapAsyncUnordered(parallelism)(f.apply)) + def mapAsyncUnordered[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): SubSource[T, Mat] = + new SubSource(delegate.mapAsyncUnordered(parallelism)(x => f(x).toScala)) /** * Only pass on those elements that satisfy the given predicate. diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/Tcp.scala b/akka-stream/src/main/scala/akka/stream/javadsl/Tcp.scala index 113321a448..c4112f80f5 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/Tcp.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/Tcp.scala @@ -6,11 +6,9 @@ package akka.stream.javadsl import java.lang.{ Iterable ⇒ JIterable } import java.util.Optional import akka.NotUsed - import scala.collection.immutable import scala.concurrent.duration._ import java.net.InetSocketAddress -import scala.concurrent.Future import scala.util.control.NoStackTrace import akka.actor.ActorSystem import akka.actor.ExtendedActorSystem @@ -21,8 +19,9 @@ import akka.stream.scaladsl import akka.util.ByteString import akka.japi.Util.immutableSeq import akka.io.Inet.SocketOption - import scala.compat.java8.OptionConverters._ +import scala.compat.java8.FutureConverters._ +import java.util.concurrent.CompletionStage object Tcp extends ExtensionId[Tcp] with ExtensionIdProvider { @@ -39,9 +38,9 @@ object Tcp extends ExtensionId[Tcp] with ExtensionIdProvider { * Asynchronously triggers the unbinding of the port that was bound by the materialization of the `connections` * [[Source]]. * - * The produced [[scala.concurrent.Future]] is fulfilled when the unbinding has been completed. + * The produced [[java.util.concurrent.CompletionStage]] is fulfilled when the unbinding has been completed. */ - def unbind(): Future[Unit] = delegate.unbind() + def unbind(): CompletionStage[Unit] = delegate.unbind().toJava } /** @@ -128,10 +127,10 @@ class Tcp(system: ExtendedActorSystem) extends akka.actor.Extension { backlog: Int, options: JIterable[SocketOption], halfClose: Boolean, - idleTimeout: Duration): Source[IncomingConnection, Future[ServerBinding]] = + idleTimeout: Duration): Source[IncomingConnection, CompletionStage[ServerBinding]] = Source.fromGraph(delegate.bind(interface, port, backlog, immutableSeq(options), halfClose, idleTimeout) .map(new IncomingConnection(_)) - .mapMaterializedValue(_.map(new ServerBinding(_))(ec))) + .mapMaterializedValue(_.map(new ServerBinding(_))(ec).toJava)) /** * Creates a [[Tcp.ServerBinding]] without specifying options. @@ -141,10 +140,10 @@ class Tcp(system: ExtendedActorSystem) extends akka.actor.Extension { * [[akka.stream.scaladsl.RunnableGraph]] the server is not immediately available. Only after the materialized future * completes is the server ready to accept client connections. */ - def bind(interface: String, port: Int): Source[IncomingConnection, Future[ServerBinding]] = + def bind(interface: String, port: Int): Source[IncomingConnection, CompletionStage[ServerBinding]] = Source.fromGraph(delegate.bind(interface, port) .map(new IncomingConnection(_)) - .mapMaterializedValue(_.map(new ServerBinding(_))(ec))) + .mapMaterializedValue(_.map(new ServerBinding(_))(ec).toJava)) /** * Creates an [[Tcp.OutgoingConnection]] instance representing a prospective TCP client connection to the given endpoint. @@ -167,16 +166,16 @@ class Tcp(system: ExtendedActorSystem) extends akka.actor.Extension { options: JIterable[SocketOption], halfClose: Boolean, connectTimeout: Duration, - idleTimeout: Duration): Flow[ByteString, ByteString, Future[OutgoingConnection]] = + idleTimeout: Duration): Flow[ByteString, ByteString, CompletionStage[OutgoingConnection]] = Flow.fromGraph(delegate.outgoingConnection(remoteAddress, localAddress.asScala, immutableSeq(options), halfClose, connectTimeout, idleTimeout) - .mapMaterializedValue(_.map(new OutgoingConnection(_))(ec))) + .mapMaterializedValue(_.map(new OutgoingConnection(_))(ec).toJava)) /** * Creates an [[Tcp.OutgoingConnection]] without specifying options. * It represents a prospective TCP client connection to the given endpoint. */ - def outgoingConnection(host: String, port: Int): Flow[ByteString, ByteString, Future[OutgoingConnection]] = + def outgoingConnection(host: String, port: Int): Flow[ByteString, ByteString, CompletionStage[OutgoingConnection]] = Flow.fromGraph(delegate.outgoingConnection(new InetSocketAddress(host, port)) - .mapMaterializedValue(_.map(new OutgoingConnection(_))(ec))) + .mapMaterializedValue(_.map(new OutgoingConnection(_))(ec).toJava)) } diff --git a/akka-stream/src/main/scala/akka/stream/Queue.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/Queue.scala similarity index 63% rename from akka-stream/src/main/scala/akka/stream/Queue.scala rename to akka-stream/src/main/scala/akka/stream/scaladsl/Queue.scala index ddd9eed8d5..315de23ec5 100644 --- a/akka-stream/src/main/scala/akka/stream/Queue.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/Queue.scala @@ -1,9 +1,11 @@ /** * Copyright (C) 2015 Typesafe Inc. */ -package akka.stream +package akka.stream.scaladsl import scala.concurrent.Future +import akka.Done +import akka.stream.QueueOfferResult /** * This trait allows to have the queue as a data source for some stream. @@ -26,7 +28,7 @@ trait SourceQueue[T] { /** * Method returns future that completes when stream is completed and fails when stream failed */ - def watchCompletion(): Future[Unit] + def watchCompletion(): Future[Done] } /** @@ -43,33 +45,3 @@ trait SinkQueue[T] { */ def pull(): Future[Option[T]] } - -sealed abstract class QueueOfferResult - -/** - * Contains types that is used as return types for async callbacks to streams - */ -object QueueOfferResult { - - /** - * Type is used to indicate that stream is successfully enqueued an element - */ - final case object Enqueued extends QueueOfferResult - - /** - * Type is used to indicate that stream is dropped an element - */ - final case object Dropped extends QueueOfferResult - - /** - * Type is used to indicate that stream is failed before or during call to the stream - * @param cause - exception that stream failed with - */ - final case class Failure(cause: Throwable) extends QueueOfferResult - - /** - * Type is used to indicate that stream is completed before call - */ - case object QueueClosed extends QueueOfferResult -} - diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/Sink.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/Sink.scala index d9463aa84d..e9dce93416 100644 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/Sink.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/Sink.scala @@ -16,7 +16,6 @@ import akka.stream.stage.{ Context, PushStage, SyncDirective, TerminationDirecti import akka.stream.{ javadsl, _ } import akka.util.ByteString import org.reactivestreams.{ Publisher, Subscriber } - import scala.annotation.tailrec import scala.concurrent.duration.{ FiniteDuration, _ } import scala.concurrent.{ ExecutionContext, Future } diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/Source.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/Source.scala index d264dbc435..3ca8cc5c85 100644 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/Source.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/Source.scala @@ -23,6 +23,8 @@ import scala.collection.immutable import scala.concurrent.duration.{ FiniteDuration, _ } import scala.concurrent.{ Future, Promise } import akka.stream.impl.fusing.Buffer +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters._ /** * A `Source` is a set of stream processing steps that has one open output. It can comprise @@ -230,6 +232,15 @@ object Source { def fromFuture[T](future: Future[T]): Source[T, NotUsed] = fromGraph(new FutureSource(future)) + /** + * Start a new `Source` from the given `Future`. The stream will consist of + * one element when the `Future` is completed with a successful value, which + * may happen before or after materializing the `Flow`. + * The stream terminates with a failure if the `Future` is completed with a failure. + */ + def fromCompletionStage[T](future: CompletionStage[T]): Source[T, NotUsed] = + fromGraph(new FutureSource(future.toScala)) + /** * Elements are emitted periodically with the specified interval. * The tick element will be delivered to downstream consumers that has requested any elements. diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/package.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/package.scala index 1e42d15522..a46c99809e 100644 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/package.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/package.scala @@ -3,6 +3,11 @@ */ package akka.stream +import java.util.concurrent.CompletionStage +import scala.concurrent.Future +import scala.compat.java8.FutureConverters +import akka.japi.function + /** * Scala API: The flow DSL allows the formulation of stream transformations based on some * input. The starting point is called [[Source]] and can be a collection, an iterator, @@ -49,4 +54,12 @@ package akka.stream * is fully started and active. */ package object scaladsl { + implicit class SourceToCompletionStage[Out, T](val src: Source[Out, Future[T]]) extends AnyVal { + def toCompletionStage(): Source[Out, CompletionStage[T]] = + src.mapMaterializedValue(FutureConverters.toJava) + } + implicit class SinkToCompletionStage[In, T](val sink: Sink[In, Future[T]]) extends AnyVal { + def toCompletionStage(): Sink[In, CompletionStage[T]] = + sink.mapMaterializedValue(FutureConverters.toJava) + } } diff --git a/project/Dependencies.scala b/project/Dependencies.scala index ea27be8442..66d86faad6 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -112,7 +112,7 @@ object Dependencies { // TODO check if `l ++=` everywhere expensive? val l = libraryDependencies - val actor = l ++= Seq(config) + val actor = l ++= Seq(config, java8Compat) val testkit = l ++= Seq(Test.junit, Test.scalatest.value) ++ Test.metricsAll @@ -187,7 +187,6 @@ object Dependencies { lazy val stream = l ++= Seq[sbt.ModuleID]( sslConfigAkka, reactiveStreams, - java8Compat, Test.junitIntf, Test.scalatest.value) diff --git a/project/MiMa.scala b/project/MiMa.scala index c05228fcc5..2a2ebc329f 100644 --- a/project/MiMa.scala +++ b/project/MiMa.scala @@ -609,7 +609,11 @@ object MiMa extends AutoPlugin { ProblemFilters.exclude[MissingMethodProblem]("akka.pattern.BackoffSupervisor.akka$pattern$BackoffSupervisor$$child"), // #19487 - FilterAnyProblem("akka.actor.dungeon.Children") + FilterAnyProblem("akka.actor.dungeon.Children"), + + // #19440 + ProblemFilters.exclude[MissingMethodProblem]("akka.pattern.PipeToSupport.pipeCompletionStage"), + ProblemFilters.exclude[MissingMethodProblem]("akka.pattern.FutureTimeoutSupport.afterCompletionStage") ) ) }