From f98c1946d8993f85dbf8aa9511ef2bace1f4cd65 Mon Sep 17 00:00:00 2001 From: Hawstein Date: Fri, 3 Jun 2016 19:43:42 +0800 Subject: [PATCH 01/85] +doc #20466 example snippet for akka http java dsl: BasicDirectives (#20647) --- .../BasicDirectivesExamplesTest.java | 788 ++++++++++++++++++ .../basic-directives/cancelRejection.rst | 3 +- .../basic-directives/cancelRejections.rst | 3 +- .../directives/basic-directives/extract.rst | 3 +- .../extractExecutionContext.rst | 3 +- .../basic-directives/extractLog.rst | 3 +- .../basic-directives/extractMaterializer.rst | 3 +- .../basic-directives/extractRequest.rst | 3 +- .../extractRequestContext.rst | 3 +- .../basic-directives/extractSettings.rst | 3 +- .../basic-directives/extractUnmatchedPath.rst | 3 +- .../basic-directives/extractUri.rst | 3 +- .../basic-directives/mapInnerRoute.rst | 3 +- .../basic-directives/mapRejections.rst | 3 +- .../basic-directives/mapRequest.rst | 3 +- .../basic-directives/mapRequestContext.rst | 3 +- .../basic-directives/mapResponse.rst | 6 +- .../basic-directives/mapResponseEntity.rst | 3 +- .../basic-directives/mapResponseHeaders.rst | 3 +- .../basic-directives/mapRouteResult.rst | 3 +- .../basic-directives/mapRouteResultFuture.rst | 3 +- .../basic-directives/mapRouteResultPF.rst | 4 +- .../basic-directives/mapRouteResultWith.rst | 3 +- .../basic-directives/mapRouteResultWithPF.rst | 3 +- .../basic-directives/mapSettings.rst | 3 +- .../basic-directives/mapUnmatchedPath.rst | 3 +- .../directives/basic-directives/pass.rst | 3 +- .../directives/basic-directives/provide.rst | 3 +- .../basic-directives/recoverRejections.rst | 3 +- .../recoverRejectionsWith.rst | 3 +- .../basic-directives/withExecutionContext.rst | 3 +- .../directives/basic-directives/withLog.rst | 3 +- .../basic-directives/withMaterializer.rst | 3 +- .../basic-directives/withSettings.rst | 4 +- .../BasicDirectivesExamplesSpec.scala | 24 +- .../javadsl/testkit/TestRouteResult.scala | 11 +- .../http/javadsl/server/RouteResult.scala | 18 + .../server/directives/BasicDirectives.scala | 21 +- 38 files changed, 914 insertions(+), 52 deletions(-) create mode 100644 akka-docs/rst/java/code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java diff --git a/akka-docs/rst/java/code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java b/akka-docs/rst/java/code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java new file mode 100644 index 0000000000..fa5a520460 --- /dev/null +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java @@ -0,0 +1,788 @@ +/* + * Copyright (C) 2016-2016 Lightbend Inc. + */ +package docs.http.javadsl.server.directives; + +import akka.actor.ActorSystem; +import akka.dispatch.ExecutionContexts; +import akka.event.Logging; +import akka.event.LoggingAdapter; +import akka.http.javadsl.model.ContentTypes; +import akka.http.javadsl.model.HttpEntities; +import akka.http.javadsl.model.HttpEntity; +import akka.http.javadsl.model.HttpMethods; +import akka.http.javadsl.model.HttpRequest; +import akka.http.javadsl.model.HttpResponse; +import akka.http.javadsl.model.ResponseEntity; +import akka.http.javadsl.model.StatusCodes; +import akka.http.javadsl.model.headers.RawHeader; +import akka.http.javadsl.model.headers.Server; +import akka.http.javadsl.model.headers.ProductVersion; +import akka.http.javadsl.settings.RoutingSettings; +import akka.http.javadsl.testkit.JUnitRouteTest; +import akka.http.javadsl.server.*; +import akka.japi.pf.PFBuilder; +import akka.stream.ActorMaterializer; +import akka.stream.ActorMaterializerSettings; +import akka.stream.javadsl.FileIO; +import akka.stream.javadsl.Sink; +import akka.stream.javadsl.Source; +import akka.util.ByteString; +import org.junit.Ignore; +import org.junit.Test; +import scala.concurrent.ExecutionContextExecutor; + +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.Collections; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.Executors; +import java.util.function.Function; +import java.util.function.Predicate; +import java.util.function.Supplier; +import java.util.stream.StreamSupport; + +public class BasicDirectivesExamplesTest extends JUnitRouteTest { + + @Test + public void testExtract() { + //#extract + final Route route = extract( + ctx -> ctx.getRequest().getUri().toString().length(), + len -> complete("The length of the request URI is " + len) + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/abcdef")) + .assertEntity("The length of the request URI is 25"); + //#extract + } + + @Test + public void testExtractLog() { + //#extractLog + final Route route = extractLog(log -> { + log.debug("I'm logging things in much detail..!"); + return complete("It's amazing!"); + }); + + // tests: + testRoute(route).run(HttpRequest.GET("/abcdef")) + .assertEntity("It's amazing!"); + //#extractLog + } + + @Test + public void testWithMaterializer() { + //#withMaterializer + final ActorMaterializerSettings settings = ActorMaterializerSettings.create(system()); + final ActorMaterializer special = ActorMaterializer.create(settings, system(), "special"); + + final Route sample = path("sample", () -> + extractMaterializer(mat -> + onSuccess(() -> + // explicitly use the materializer: + Source.single("Materialized by " + mat.hashCode() + "!") + .runWith(Sink.head(), mat), this::complete + ) + ) + ); + + final Route route = route( + pathPrefix("special", () -> + withMaterializer(special, () -> sample) // `special` materializer will be used + ), + sample // default materializer will be used + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/sample")) + .assertEntity("Materialized by " + materializer().hashCode()+ "!"); + testRoute(route).run(HttpRequest.GET("/special/sample")) + .assertEntity("Materialized by " + special.hashCode()+ "!"); + //#withMaterializer + } + + @Test + public void testExtractMaterializer() { + //#extractMaterializer + final Route route = path("sample", () -> + extractMaterializer(mat -> + onSuccess(() -> + // explicitly use the materializer: + Source.single("Materialized by " + mat.hashCode() + "!") + .runWith(Sink.head(), mat), this::complete + ) + ) + ); // default materializer will be used + + testRoute(route).run(HttpRequest.GET("/sample")) + .assertEntity("Materialized by " + materializer().hashCode()+ "!"); + //#extractMaterializer + } + + @Test + public void testWithExecutionContext() { + //#withExecutionContext + + final ExecutionContextExecutor special = + ExecutionContexts.fromExecutor(Executors.newFixedThreadPool(1)); + + final Route sample = path("sample", () -> + extractExecutionContext(executor -> + onSuccess(() -> + CompletableFuture.supplyAsync(() -> + "Run on " + executor.hashCode() + "!", executor + ), this::complete + ) + ) + ); + + final Route route = route( + pathPrefix("special", () -> + // `special` execution context will be used + withExecutionContext(special, () -> sample) + ), + sample // default execution context will be used + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/sample")) + .assertEntity("Run on " + system().dispatcher().hashCode() + "!"); + testRoute(route).run(HttpRequest.GET("/special/sample")) + .assertEntity("Run on " + special.hashCode() + "!"); + //#withExecutionContext + } + + @Test + public void testExtractExecutionContext() { + //#extractExecutionContext + final Route route = path("sample", () -> + extractExecutionContext(executor -> + onSuccess(() -> + CompletableFuture.supplyAsync( + // uses the `executor` ExecutionContext + () -> "Run on " + executor.hashCode() + "!", executor + ), str -> complete(str) + ) + ) + ); + + //tests: + testRoute(route).run(HttpRequest.GET("/sample")) + .assertEntity("Run on " + system().dispatcher().hashCode() + "!"); + //#extractExecutionContext + } + + @Test + public void testWithLog() { + //#withLog + final LoggingAdapter special = Logging.getLogger(system(), "SpecialRoutes"); + + final Route sample = path("sample", () -> + extractLog(log -> { + final String msg = "Logging using " + log + "!"; + log.debug(msg); + return complete(msg); + } + ) + ); + + final Route route = route( + pathPrefix("special", () -> + withLog(special, () -> sample) + ), + sample + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/sample")) + .assertEntity("Logging using " + system().log() + "!"); + testRoute(route).run(HttpRequest.GET("/special/sample")) + .assertEntity("Logging using " + special + "!"); + //#withLog + } + + @Ignore("Ignore compile-only test") + @Test + public void testWithSettings() { + //#withSettings + final RoutingSettings special = + RoutingSettings + .create(system().settings().config()) + .withFileIODispatcher("special-io-dispatcher"); + + final Route sample = path("sample", () -> { + // internally uses the configured fileIODispatcher: + // ContentTypes.APPLICATION_JSON, source + final Source source = + FileIO.fromPath(Paths.get("example.json")) + .mapMaterializedValue(completionStage -> (Object) completionStage); + return complete( + HttpResponse.create() + .withEntity(HttpEntities.create(ContentTypes.APPLICATION_JSON, source)) + ); + }); + + final Route route = get(() -> + route( + pathPrefix("special", () -> + // `special` file-io-dispatcher will be used to read the file + withSettings(special, () -> sample) + ), + sample // default file-io-dispatcher will be used to read the file + ) + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/special/sample")) + .assertEntity("{}"); + testRoute(route).run(HttpRequest.GET("/sample")) + .assertEntity("{}"); + //#withSettings + } + + @Test + public void testMapResponse() { + //#mapResponse + final Route route = mapResponse( + response -> response.withStatus(StatusCodes.BAD_GATEWAY), + () -> complete("abc") + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/abcdef?ghi=12")) + .assertStatusCode(StatusCodes.BAD_GATEWAY); + //#mapResponse + } + + @Test + public void testMapResponseAdvanced() { + //#mapResponse-advanced + class ApiRoute { + + private final ActorSystem system; + + private final LoggingAdapter log; + + private final HttpEntity nullJsonEntity = + HttpEntities.create(ContentTypes.APPLICATION_JSON, "{}"); + + public ApiRoute(ActorSystem system) { + this.system = system; + this.log = Logging.getLogger(system, "ApiRoutes"); + } + + private HttpResponse nonSuccessToEmptyJsonEntity(HttpResponse response) { + if (response.status().isSuccess()) { + return response; + } else { + log.warning( + "Dropping response entity since response status code was: " + response.status()); + return response.withEntity((ResponseEntity) nullJsonEntity); + } + } + + /** Wrapper for all of our JSON API routes */ + private Route apiRoute(Supplier innerRoutes) { + return mapResponse(this::nonSuccessToEmptyJsonEntity, innerRoutes); + } + } + + final ApiRoute api = new ApiRoute(system()); + + final Route route = api.apiRoute(() -> + get(() -> complete(StatusCodes.INTERNAL_SERVER_ERROR)) + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/")) + .assertEntity("{}"); + //#mapResponse-advanced + } + + @Test + public void testMapRouteResult() { + //#mapRouteResult + // this directive is a joke, don't do that :-) + final Route route = mapRouteResult(r -> { + if (r instanceof Complete) { + final HttpResponse response = ((Complete) r).getResponse(); + return RouteResults.complete(response.withStatus(200)); + } else { + return r; + } + }, () -> complete(StatusCodes.ACCEPTED)); + + // tests: + testRoute(route).run(HttpRequest.GET("/")) + .assertStatusCode(StatusCodes.OK); + //#mapRouteResult + } + + @Test + public void testMapRouteResultFuture() { + //#mapRouteResultFuture + final Route route = mapRouteResultFuture(cr -> + cr.exceptionally(t -> { + if (t instanceof IllegalArgumentException) { + return RouteResults.complete( + HttpResponse.create().withStatus(StatusCodes.INTERNAL_SERVER_ERROR)); + } else { + return null; + } + }).thenApply(rr -> { + if (rr instanceof Complete) { + final HttpResponse res = ((Complete) rr).getResponse(); + return RouteResults.complete( + res.addHeader(Server.create(ProductVersion.create("MyServer", "1.0")))); + } else { + return rr; + } + }), () -> complete("Hello world!")); + + // tests: + testRoute(route).run(HttpRequest.GET("/")) + .assertStatusCode(StatusCodes.OK) + .assertHeaderExists(Server.create(ProductVersion.create("MyServer", "1.0"))); + //#mapRouteResultFuture + } + + @Test + public void testMapResponseEntity() { + //#mapResponseEntity + final Function prefixEntity = entity -> { + if (entity instanceof HttpEntity.Strict) { + final HttpEntity.Strict strict = (HttpEntity.Strict) entity; + return HttpEntities.create( + strict.getContentType(), + ByteString.fromString("test").concat(strict.getData())); + } else { + throw new IllegalStateException("Unexpected entity type"); + } + }; + + final Route route = mapResponseEntity(prefixEntity, () -> complete("abc")); + + testRoute(route).run(HttpRequest.GET("/")) + .assertEntity("testabc"); + //#mapResponseEntity + } + + @Test + public void testMapResponseHeaders() { + //#mapResponseHeaders + // adds all request headers to the response + final Route echoRequestHeaders = extract( + ctx -> ctx.getRequest().getHeaders(), + headers -> respondWithHeaders(headers, () -> complete("test")) + ); + + final Route route = mapResponseHeaders(headers -> { + headers.removeIf(header -> header.lowercaseName().equals("id")); + return headers; + }, () -> echoRequestHeaders); + + // tests: + testRoute(route).run(HttpRequest.GET("/").addHeaders( + Arrays.asList(RawHeader.create("id", "12345"),RawHeader.create("id2", "67890")))) + .assertHeaderKindNotExists("id") + .assertHeaderExists("id2", "67890"); + //#mapResponseHeaders + } + + @Ignore("Not implemented yet") + @Test + public void testMapInnerRoute() { + //#mapInnerRoute + // TODO: implement mapInnerRoute + //#mapInnerRoute + } + + @Test + public void testMapRejections() { + //#mapRejections + // ignore any rejections and replace them by AuthorizationFailedRejection + final Route route = mapRejections( + rejections -> Collections.singletonList((Rejection) Rejections.authorizationFailed()), + () -> path("abc", () -> complete("abc")) + ); + + // tests: + runRouteUnSealed(route, HttpRequest.GET("/")) + .assertRejections(Rejections.authorizationFailed()); + testRoute(route).run(HttpRequest.GET("/abc")) + .assertStatusCode(StatusCodes.OK); + //#mapRejections + } + + @Test + public void testRecoverRejections() { + //#recoverRejections + final Function, Optional> neverAuth = + creds -> Optional.empty(); + final Function, Optional> alwaysAuth = + creds -> Optional.of("id"); + + final Route originalRoute = pathPrefix("auth", () -> + route( + path("never", () -> + authenticateBasic("my-realm", neverAuth, obj -> complete("Welcome to the bat-cave!")) + ), + path("always", () -> + authenticateBasic("my-realm", alwaysAuth, obj -> complete("Welcome to the secret place!")) + ) + ) + ); + + final Function, Boolean> existsAuthenticationFailedRejection = + rejections -> + StreamSupport.stream(rejections.spliterator(), false) + .anyMatch(r -> r instanceof AuthenticationFailedRejection); + + final Route route = recoverRejections(rejections -> { + if (existsAuthenticationFailedRejection.apply(rejections)) { + return RouteResults.complete( + HttpResponse.create().withEntity("Nothing to see here, move along.")); + } else if (!rejections.iterator().hasNext()) { // see "Empty Rejections" for more details + return RouteResults.complete( + HttpResponse.create().withStatus(StatusCodes.NOT_FOUND) + .withEntity("Literally nothing to see here.")); + } else { + return RouteResults.rejected(rejections); + } + }, () -> originalRoute); + + // tests: + testRoute(route).run(HttpRequest.GET("/auth/never")) + .assertStatusCode(StatusCodes.OK) + .assertEntity("Nothing to see here, move along."); + testRoute(route).run(HttpRequest.GET("/auth/always")) + .assertStatusCode(StatusCodes.OK) + .assertEntity("Welcome to the secret place!"); + testRoute(route).run(HttpRequest.GET("/auth/does_not_exist")) + .assertStatusCode(StatusCodes.NOT_FOUND) + .assertEntity("Literally nothing to see here."); + //#recoverRejections + } + + @Test + public void testRecoverRejectionsWith() { + //#recoverRejectionsWith + final Function, Optional> neverAuth = + creds -> Optional.empty(); + + final Route originalRoute = pathPrefix("auth", () -> + path("never", () -> + authenticateBasic("my-realm", neverAuth, obj -> complete("Welcome to the bat-cave!")) + ) + ); + + final Function, Boolean> existsAuthenticationFailedRejection = + rejections -> + StreamSupport.stream(rejections.spliterator(), false) + .anyMatch(r -> r instanceof AuthenticationFailedRejection); + + final Route route = recoverRejectionsWith( + rejections -> CompletableFuture.supplyAsync(() -> { + if (existsAuthenticationFailedRejection.apply(rejections)) { + return RouteResults.complete( + HttpResponse.create().withEntity("Nothing to see here, move along.")); + } else { + return RouteResults.rejected(rejections); + } + }), () -> originalRoute); + + // tests: + testRoute(route).run(HttpRequest.GET("/auth/never")) + .assertStatusCode(StatusCodes.OK) + .assertEntity("Nothing to see here, move along."); + //#recoverRejectionsWith + } + + @Test + public void testMapRequest() { + //#mapRequest + final Route route = mapRequest(req -> + req.withMethod(HttpMethods.POST), () -> + extractRequest(req -> complete("The request method was " + req.method().name())) + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/")) + .assertEntity("The request method was POST"); + //#mapRequest + } + + @Test + public void testMapRequestContext() { + //#mapRequestContext + final Route route = mapRequestContext(ctx -> + ctx.withRequest(HttpRequest.create().withMethod(HttpMethods.POST)), () -> + extractRequest(req -> complete(req.method().value())) + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/abc/def/ghi")) + .assertEntity("POST"); + //#mapRequestContext + } + + @Test + public void testMapRouteResult0() { + //#mapRouteResult + final Route route = mapRouteResult(rr -> { + final Iterable rejections = Collections.singletonList(Rejections.authorizationFailed()); + return RouteResults.rejected(rejections); + }, () -> complete("abc")); + + // tests: + runRouteUnSealed(route, HttpRequest.GET("/")) + .assertRejections(Rejections.authorizationFailed()); + //#mapRouteResult + } + + public static final class MyCustomRejection implements akka.http.scaladsl.server.Rejection {} + + @Test + public void testMapRouteResultPF() { + //#mapRouteResultPF + final Route route = mapRouteResultPF( + new PFBuilder() + .match(Rejected.class, rejected -> { + final Iterable rejections = + Collections.singletonList(Rejections.authorizationFailed()); + return RouteResults.rejected(rejections); + }).build(), () -> reject(new MyCustomRejection())); + + // tests: + runRouteUnSealed(route, HttpRequest.GET("/")) + .assertRejections(Rejections.authorizationFailed()); + //#mapRouteResultPF + } + + @Test + public void testMapRouteResultWithPF() { + //#mapRouteResultWithPF + final Route route = mapRouteResultWithPF( + new PFBuilder>() + .match(Rejected.class, rejected -> CompletableFuture.supplyAsync(() -> { + final Iterable rejections = + Collections.singletonList(Rejections.authorizationFailed()); + return RouteResults.rejected(rejections); + }) + ).build(), () -> reject(new MyCustomRejection())); + + // tests: + runRouteUnSealed(route, HttpRequest.GET("/")) + .assertRejections(Rejections.authorizationFailed()); + //#mapRouteResultWithPF + } + + @Test + public void testMapRouteResultWith() { + //#mapRouteResultWith + final Route route = mapRouteResultWith(rr -> CompletableFuture.supplyAsync(() -> { + if (rr instanceof Rejected) { + final Iterable rejections = + Collections.singletonList(Rejections.authorizationFailed()); + return RouteResults.rejected(rejections); + } else { + return rr; + } + }), () -> reject(new MyCustomRejection())); + + // tests: + runRouteUnSealed(route, HttpRequest.GET("/")) + .assertRejections(Rejections.authorizationFailed()); + //#mapRouteResultWith + } + + @Test + public void testPass() { + //#pass + final Route route = pass(() -> complete("abc")); + + // tests: + testRoute(route).run(HttpRequest.GET("/")) + .assertEntity("abc"); + //#pass + } + + private Route providePrefixedStringRoute(String value) { + return provide("prefix:" + value, this::complete); + } + + @Test + public void testProvide() { + //#provide + final Route route = providePrefixedStringRoute("test"); + + // tests: + testRoute(route).run(HttpRequest.GET("/")) + .assertEntity("prefix:test"); + //#provide + } + + @Ignore("Test failed") + @Test + public void testCancelRejections() { + //#cancelRejections + final Predicate isMethodRejection = p -> p instanceof MethodRejection; + final Route route = cancelRejections( + isMethodRejection, () -> post(() -> complete("Result")) + ); + + // tests: + runRouteUnSealed(route, HttpRequest.GET("/")) + .assertRejections(); + //#cancelRejections + } + + @Ignore("Test failed") + @Test + public void testCancelRejection() { + //#cancelRejection + final Route route = cancelRejection(Rejections.method(HttpMethods.POST), () -> + post(() -> complete("Result")) + ); + + // tests: + runRouteUnSealed(route, HttpRequest.GET("/")) + .assertRejections(); + //#cancelRejection + } + + @Test + public void testExtractRequest() { + //#extractRequest + final Route route = extractRequest(request -> + complete("Request method is " + request.method().name() + + " and content-type is " + request.entity().getContentType()) + ); + + // tests: + testRoute(route).run(HttpRequest.POST("/").withEntity("text")) + .assertEntity("Request method is POST and content-type is text/plain; charset=UTF-8"); + testRoute(route).run(HttpRequest.GET("/")) + .assertEntity("Request method is GET and content-type is none/none"); + //#extractRequest + } + + @Test + public void testExtractSettings() { + //#extractSettings + final Route route = extractSettings(settings -> + complete("RoutingSettings.renderVanityFooter = " + settings.getRenderVanityFooter()) + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/")) + .assertEntity("RoutingSettings.renderVanityFooter = true"); + //#extractSettings + } + + @Test + public void testMapSettings() { + //#mapSettings + final Route route = mapSettings(settings -> + settings.withFileGetConditional(false), () -> + extractSettings(settings -> + complete("RoutingSettings.fileGetConditional = " + settings.getFileGetConditional()) + ) + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/")) + .assertEntity("RoutingSettings.fileGetConditional = false"); + //#mapSettings + } + + @Test + public void testExtractRequestContext() { + //#extractRequestContext + final Route route = extractRequestContext(ctx -> { + ctx.getLog().debug("Using access to additional context availablethings, like the logger."); + final HttpRequest request = ctx.getRequest(); + return complete("Request method is " + request.method().name() + + " and content-type is " + request.entity().getContentType()); + }); + + // tests: + testRoute(route).run(HttpRequest.POST("/").withEntity("text")) + .assertEntity("Request method is POST and content-type is text/plain; charset=UTF-8"); + testRoute(route).run(HttpRequest.GET("/")) + .assertEntity("Request method is GET and content-type is none/none"); + //#extractRequestContext + } + + @Test + public void testExtractUri() { + //#extractUri + final Route route = extractUri(uri -> + complete("Full URI: " + uri) + ); + + // tests: + // tests are executed with the host assumed to be "example.com" + testRoute(route).run(HttpRequest.GET("/")) + .assertEntity("Full URI: http://example.com/"); + testRoute(route).run(HttpRequest.GET("/test")) + .assertEntity("Full URI: http://example.com/test"); + //#extractUri + } + + @Test + public void testMapUnmatchedPath() { + //#mapUnmatchedPath + final Function ignore456 = path -> { + int slashPos = path.indexOf("/"); + if (slashPos != -1) { + String head = path.substring(0, slashPos); + String tail = path.substring(slashPos); + if (head.length() <= 3) { + return tail; + } else { + return path.substring(3); + } + } else { + return path; + } + }; + + final Route route = pathPrefix("123", () -> + mapUnmatchedPath(ignore456, () -> + path("abc", () -> + complete("Content") + ) + ) + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/123/abc")) + .assertEntity("Content"); + testRoute(route).run(HttpRequest.GET("/123456/abc")) + .assertEntity("Content"); + //#mapUnmatchedPath + } + + @Test + public void testExtractUnmatchedPath() { + //#extractUnmatchedPath + final Route route = pathPrefix("abc", () -> + extractUnmatchedPath(remaining -> + complete("Unmatched: '" + remaining + "'") + ) + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/abc")) + .assertEntity("Unmatched: ''"); + testRoute(route).run(HttpRequest.GET("/abc/456")) + .assertEntity("Unmatched: '/456'"); + //#extractUnmatchedPath + } + +} diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/cancelRejection.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/cancelRejection.rst index 8651b87a71..f1912765e2 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/cancelRejection.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/cancelRejection.rst @@ -16,4 +16,5 @@ which provides a nicer DSL for building rejection handlers. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#cancelRejection diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/cancelRejections.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/cancelRejections.rst index c91ae5649f..5204437de4 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/cancelRejections.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/cancelRejections.rst @@ -18,4 +18,5 @@ which provides a nicer DSL for building rejection handlers. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#cancelRejections diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extract.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extract.rst index 45bfebf4d0..4896f35e98 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extract.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extract.rst @@ -13,4 +13,5 @@ See :ref:`ProvideDirectives-java` for an overview of similar directives. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#extract diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractExecutionContext.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractExecutionContext.rst index 878538ca6e..ad37d1975c 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractExecutionContext.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractExecutionContext.rst @@ -14,4 +14,5 @@ See :ref:`-extract-java-` to learn more about how extractions work. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#extractExecutionContext diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractLog.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractLog.rst index 02e3d7b825..939090ea95 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractLog.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractLog.rst @@ -15,4 +15,5 @@ See :ref:`-extract-java-` and :ref:`ProvideDirectives-java` for an overview of s Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#extractLog diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractMaterializer.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractMaterializer.rst index 447a0698d6..f1ede20d2f 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractMaterializer.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractMaterializer.rst @@ -13,4 +13,5 @@ See also :ref:`-withMaterializer-java-` to see how to customise the used materia Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#extractMaterializer diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractRequest.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractRequest.rst index ac990e314a..91c532ea11 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractRequest.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractRequest.rst @@ -13,4 +13,5 @@ directives. See :ref:`Request Directives-java`. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#extractRequest diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractRequestContext.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractRequestContext.rst index 44d1efa7f3..3abec29650 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractRequestContext.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractRequestContext.rst @@ -16,4 +16,5 @@ See also :ref:`-extractRequest-java-` if only interested in the :class:`HttpRequ Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#extractRequestContext diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractSettings.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractSettings.rst index a694279c5b..3983ba7e79 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractSettings.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractSettings.rst @@ -13,4 +13,5 @@ It is possible to override the settings for specific sub-routes by using the :re Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#extractRequestContext diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractUnmatchedPath.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractUnmatchedPath.rst index a0a07266c4..4cabc34f83 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractUnmatchedPath.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractUnmatchedPath.rst @@ -15,4 +15,5 @@ Use ``mapUnmatchedPath`` to change the value of the unmatched path. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#extractUnmatchedPath diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractUri.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractUri.rst index 875ab01f1e..38985f0d68 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractUri.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/extractUri.rst @@ -12,4 +12,5 @@ targeted access to parts of the URI. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#extractUri diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapInnerRoute.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapInnerRoute.rst index f2908a90e5..a88cb022bc 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapInnerRoute.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapInnerRoute.rst @@ -12,4 +12,5 @@ with any other route. Usually, the returned route wraps the original one with cu Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#mapInnerRoute diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRejections.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRejections.rst index 34fdf1d440..351e903cc5 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRejections.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRejections.rst @@ -16,4 +16,5 @@ See :ref:`Response Transforming Directives-java` for similar directives. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#mapRejections diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRequest.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRequest.rst index 87c3a8fa3b..a11e8ef1b8 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRequest.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRequest.rst @@ -16,4 +16,5 @@ See :ref:`Request Transforming Directives-java` for an overview of similar direc Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#mapRequest diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRequestContext.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRequestContext.rst index 39cd8cc3c7..f5546fa409 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRequestContext.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRequestContext.rst @@ -15,4 +15,5 @@ See :ref:`Request Transforming Directives-java` for an overview of similar direc Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#mapRequestContext diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapResponse.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapResponse.rst index c4a53d4466..912556d536 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapResponse.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapResponse.rst @@ -14,8 +14,10 @@ See also :ref:`-mapResponseHeaders-java-` or :ref:`-mapResponseEntity-java-` for Example: Override status ------------------------ -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#mapResponse Example: Default to empty JSON response on errors ------------------------------------------------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#mapResponse-advanced diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapResponseEntity.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapResponseEntity.rst index 8994140991..799c9618c5 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapResponseEntity.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapResponseEntity.rst @@ -13,4 +13,5 @@ See :ref:`Response Transforming Directives-java` for similar directives. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#mapResponseEntity diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapResponseHeaders.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapResponseHeaders.rst index eacf9bb0c1..fae2264127 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapResponseHeaders.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapResponseHeaders.rst @@ -14,4 +14,5 @@ See :ref:`Response Transforming Directives-java` for similar directives. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#mapResponseHeaders diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRouteResult.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRouteResult.rst index d440ba759d..764734e1f9 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRouteResult.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRouteResult.rst @@ -14,4 +14,5 @@ See :ref:`Result Transformation Directives-java` for similar directives. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#mapRouteResult diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRouteResultFuture.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRouteResultFuture.rst index 0a0e33b8c5..efc21b4515 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRouteResultFuture.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRouteResultFuture.rst @@ -17,4 +17,5 @@ See :ref:`Result Transformation Directives-java` for similar directives. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#mapRouteResultFuture diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRouteResultPF.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRouteResultPF.rst index 8ff60a8305..7ed461d4e3 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRouteResultPF.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRouteResultPF.rst @@ -17,4 +17,6 @@ See :ref:`Result Transformation Directives-java` for similar directives. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#mapRouteResultPF + diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRouteResultWith.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRouteResultWith.rst index b58e4de9ee..7757074126 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRouteResultWith.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRouteResultWith.rst @@ -16,4 +16,5 @@ See :ref:`Result Transformation Directives-java` for similar directives. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#mapRouteResultWith diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRouteResultWithPF.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRouteResultWithPF.rst index bf13964fac..e9f1c5d6eb 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRouteResultWithPF.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapRouteResultWithPF.rst @@ -17,4 +17,5 @@ See :ref:`Result Transformation Directives-java` for similar directives. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#mapRouteResultWithPF diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapSettings.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapSettings.rst index 763ca2fc73..b54127a8fc 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapSettings.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapSettings.rst @@ -12,4 +12,5 @@ See also :ref:`-withSettings-java-` or :ref:`-extractSettings-java-`. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#mapSettings diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapUnmatchedPath.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapUnmatchedPath.rst index 6cef0c4cc3..de38d61c31 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapUnmatchedPath.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/mapUnmatchedPath.rst @@ -14,4 +14,5 @@ Use ``extractUnmatchedPath`` for extracting the current value of the unmatched p Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#mapUnmatchedPath diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/pass.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/pass.rst index 3547026189..06dc518837 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/pass.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/pass.rst @@ -11,4 +11,5 @@ It is usually used as a "neutral element" when combining directives generically. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#pass diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/provide.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/provide.rst index 290f0f07ef..305ea9319a 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/provide.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/provide.rst @@ -13,4 +13,5 @@ See :ref:`ProvideDirectives-java` for an overview of similar directives. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#provide diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/recoverRejections.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/recoverRejections.rst index e561f9c515..78994357c8 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/recoverRejections.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/recoverRejections.rst @@ -17,4 +17,5 @@ rejections. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#recoverRejections diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/recoverRejectionsWith.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/recoverRejectionsWith.rst index 7b010dbdbc..7220a2cfe7 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/recoverRejectionsWith.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/recoverRejectionsWith.rst @@ -20,4 +20,5 @@ See :ref:`-recoverRejections-java-` (the synchronous equivalent of this directiv Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#recoverRejectionsWith diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/withExecutionContext.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/withExecutionContext.rst index 746cdbb2be..d8de735585 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/withExecutionContext.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/withExecutionContext.rst @@ -14,4 +14,5 @@ or used by directives which internally extract the materializer without sufracin Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#withExecutionContext diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/withLog.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/withLog.rst index e183d088b9..e98d6ef0c2 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/withLog.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/withLog.rst @@ -14,4 +14,5 @@ or used by directives which internally extract the materializer without surfacin Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#withLog diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/withMaterializer.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/withMaterializer.rst index 8037dd11ff..510b02058e 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/withMaterializer.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/withMaterializer.rst @@ -14,4 +14,5 @@ or used by directives which internally extract the materializer without sufracin Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#withMaterializer diff --git a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/withSettings.rst b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/withSettings.rst index 362e269ab1..b284726c08 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/withSettings.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/basic-directives/withSettings.rst @@ -13,4 +13,6 @@ or used by directives which internally extract the materializer without sufracin Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java#withSettings + diff --git a/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/BasicDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/BasicDirectivesExamplesSpec.scala index 64b292a3a5..368a19328e 100644 --- a/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/BasicDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/BasicDirectivesExamplesSpec.scala @@ -303,13 +303,11 @@ class BasicDirectivesExamplesSpec extends RoutingSpec { "mapRouteResult" in { //#mapRouteResult // this directive is a joke, don't do that :-) - val makeEverythingOk = mapRouteResult { r => - r match { - case Complete(response) => - // "Everything is OK!" - Complete(response.copy(status = 200)) - case _ => r - } + val makeEverythingOk = mapRouteResult { + case Complete(response) => + // "Everything is OK!" + Complete(response.copy(status = 200)) + case r => r } val route = @@ -591,11 +589,9 @@ class BasicDirectivesExamplesSpec extends RoutingSpec { //#mapRouteResultWith-0 case object MyCustomRejection extends Rejection val rejectRejections = // not particularly useful directive - mapRouteResultWith { res => - res match { - case Rejected(_) => Future(Rejected(List(AuthorizationFailedRejection))) - case _ => Future(res) - } + mapRouteResultWith { + case Rejected(_) => Future(Rejected(List(AuthorizationFailedRejection))) + case res => Future(res) } val route = rejectRejections { @@ -694,7 +690,7 @@ class BasicDirectivesExamplesSpec extends RoutingSpec { // tests: Get("/") ~> route ~> check { - responseAs[String] shouldEqual s"RoutingSettings.renderVanityFooter = true" + responseAs[String] shouldEqual "RoutingSettings.renderVanityFooter = true" } //# } @@ -767,7 +763,7 @@ class BasicDirectivesExamplesSpec extends RoutingSpec { pathPrefix("123") { ignoring456 { path("abc") { - complete(s"Content") + complete("Content") } } } diff --git a/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/TestRouteResult.scala b/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/TestRouteResult.scala index aee99e08b9..91edde58ac 100644 --- a/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/TestRouteResult.scala +++ b/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/TestRouteResult.scala @@ -194,6 +194,15 @@ abstract class TestRouteResult(_result: RouteResult, awaitAtMost: FiniteDuration this } + /** + * Assert that a header of the given type does not exist. + */ + def assertHeaderKindNotExists(name: String): TestRouteResult = { + val lowercased = name.toRootLowerCase + assertTrue(response.headers.forall(!_.is(lowercased)), s"`$name` header was not expected to appear.") + this + } + /** * Assert that a header of the given name and value exists. */ @@ -235,4 +244,4 @@ abstract class TestRouteResult(_result: RouteResult, awaitAtMost: FiniteDuration protected def assertEquals(expected: AnyRef, actual: AnyRef, message: String): Unit protected def assertEquals(expected: Int, actual: Int, message: String): Unit protected def assertTrue(predicate: Boolean, message: String): Unit -} \ No newline at end of file +} diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/RouteResult.scala b/akka-http/src/main/scala/akka/http/javadsl/server/RouteResult.scala index 7a3a5642ec..0985375a2e 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/RouteResult.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/RouteResult.scala @@ -11,3 +11,21 @@ trait Complete extends RouteResult { trait Rejected extends RouteResult { def getRejections: java.lang.Iterable[Rejection] } + +object RouteResults { + import akka.http.scaladsl.{ server ⇒ s } + import akka.japi.Util + import scala.language.implicitConversions + import akka.http.impl.util.JavaMapping + import JavaMapping.Implicits._ + import RoutingJavaMapping._ + + def complete(response: HttpResponse): Complete = { + s.RouteResult.Complete(JavaMapping.toScala(response)) + } + + def rejected(rejections: java.lang.Iterable[Rejection]): Rejected = { + s.RouteResult.Rejected(Util.immutableSeq(rejections).map(_.asScala)) + } + +} diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/directives/BasicDirectives.scala b/akka-http/src/main/scala/akka/http/javadsl/server/directives/BasicDirectives.scala index 2fc58edd0d..291453393b 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/directives/BasicDirectives.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/directives/BasicDirectives.scala @@ -8,7 +8,7 @@ import java.util.function.{ Function ⇒ JFunction } import akka.http.impl.util.JavaMapping import akka.http.javadsl.settings.ParserSettings -import akka.http.scaladsl.settings.RoutingSettings +import akka.http.javadsl.settings.RoutingSettings import akka.japi.Util import scala.concurrent.ExecutionContextExecutor @@ -73,6 +73,10 @@ abstract class BasicDirectives { D.mapRouteResult(route ⇒ f(route.asJava).asScala) { inner.get.delegate } } + def mapRouteResultPF(f: PartialFunction[RouteResult, RouteResult], inner: Supplier[Route]): Route = RouteAdapter { + D.mapRouteResult(route ⇒ f(route.asJava).asScala) { inner.get.delegate } + } + def mapRouteResultFuture(f: JFunction[CompletionStage[RouteResult], CompletionStage[RouteResult]], inner: Supplier[Route]): Route = RouteAdapter { D.mapRouteResultFuture(stage ⇒ f(toJava(stage.fast.map(_.asJava)(ExecutionContexts.sameThreadExecutionContext))).toScala.fast.map(_.asScala)(ExecutionContexts.sameThreadExecutionContext)) { @@ -84,11 +88,15 @@ abstract class BasicDirectives { D.mapRouteResultWith(r ⇒ f(r.asJava).toScala.fast.map(_.asScala)(ExecutionContexts.sameThreadExecutionContext)) { inner.get.delegate } } + def mapRouteResultWithPF(f: PartialFunction[RouteResult, CompletionStage[RouteResult]], inner: Supplier[Route]): Route = RouteAdapter { + D.mapRouteResultWith(r ⇒ f(r.asJava).toScala.fast.map(_.asScala)(ExecutionContexts.sameThreadExecutionContext)) { inner.get.delegate } + } + /** * Runs the inner route with settings mapped by the given function. */ def mapSettings(f: JFunction[RoutingSettings, RoutingSettings], inner: Supplier[Route]): Route = RouteAdapter { - D.mapSettings(rs ⇒ f(rs)) { inner.get.delegate } + D.mapSettings(rs ⇒ f(rs.asJava).asScala) { inner.get.delegate } } /** @@ -215,11 +223,18 @@ abstract class BasicDirectives { D.withExecutionContext(ec) { inner.get.delegate } } + /** + * Runs its inner route with the given alternative [[akka.stream.Materializer]]. + */ + def withMaterializer(mat: Materializer, inner: Supplier[Route]): Route = RouteAdapter { + D.withMaterializer(mat) { inner.get.delegate } + } + /** * Runs its inner route with the given alternative [[RoutingSettings]]. */ def withSettings(s: RoutingSettings, inner: Supplier[Route]): Route = RouteAdapter { - D.withSettings(s) { inner.get.delegate } + D.withSettings(s.asScala) { inner.get.delegate } } /** From 32e72f8208ec0195f10051068c672b976131b875 Mon Sep 17 00:00:00 2001 From: drewhk Date: Fri, 3 Jun 2016 13:53:20 +0200 Subject: [PATCH 02/85] 20224: Fix onSubscribe-request-onNext reentrancy in VirtualProc (#20670) --- .../scala/akka/stream/impl/StreamLayout.scala | 76 ++++++++++++++----- 1 file changed, 56 insertions(+), 20 deletions(-) diff --git a/akka-stream/src/main/scala/akka/stream/impl/StreamLayout.scala b/akka-stream/src/main/scala/akka/stream/impl/StreamLayout.scala index 3d7e0752fd..a646eadf02 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/StreamLayout.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/StreamLayout.scala @@ -3,24 +3,24 @@ */ package akka.stream.impl -import java.util.concurrent.atomic.{ AtomicReference } +import java.util.concurrent.atomic.AtomicReference import java.{ util ⇒ ju } + import akka.NotUsed +import akka.event.Logging +import akka.event.Logging.simpleName +import akka.stream._ import akka.stream.impl.MaterializerSession.MaterializationPanic import akka.stream.impl.Stages.DefaultAttributes import akka.stream.impl.StreamLayout.Module -import akka.stream.scaladsl.Keep -import akka.stream._ -import org.reactivestreams.{ Processor, Subscription, Publisher, Subscriber } -import scala.util.control.{ NoStackTrace, NonFatal } -import akka.event.Logging.simpleName -import scala.annotation.tailrec -import java.util.concurrent.atomic.AtomicLong -import scala.collection.JavaConverters._ -import akka.stream.impl.fusing.GraphStageModule -import akka.stream.impl.fusing.GraphStages.MaterializedValueSource import akka.stream.impl.fusing.GraphModule -import akka.event.Logging +import akka.stream.impl.fusing.GraphStages.MaterializedValueSource +import akka.stream.scaladsl.Keep +import org.reactivestreams.{ Processor, Publisher, Subscriber, Subscription } + +import scala.annotation.tailrec +import scala.collection.JavaConverters._ +import scala.util.control.{ NoStackTrace, NonFatal } /** * INTERNAL API @@ -552,8 +552,8 @@ private[stream] object VirtualProcessor { * it must ensure that we drop the Subscriber reference when `cancel` is invoked. */ private[stream] final class VirtualProcessor[T] extends AtomicReference[AnyRef] with Processor[T, T] { - import VirtualProcessor._ import ReactiveStreamsCompliance._ + import VirtualProcessor._ override def subscribe(s: Subscriber[_ >: T]): Unit = { @tailrec def rec(sub: Subscriber[Any]): Unit = @@ -605,8 +605,12 @@ private[stream] final class VirtualProcessor[T] extends AtomicReference[AnyRef] private def establishSubscription(subscriber: Subscriber[_], subscription: Subscription): Unit = { val wrapped = new WrappedSubscription(subscription) - try subscriber.onSubscribe(wrapped) - catch { + try { + subscriber.onSubscribe(wrapped) + // Requests will be only allowed once onSubscribe has returned to avoid reentering on an onNext before + // onSubscribe completed + wrapped.ungateDemandAndRequestBuffered() + } catch { case NonFatal(ex) ⇒ set(Inert) tryCancel(subscription) @@ -697,19 +701,51 @@ private[stream] final class VirtualProcessor[T] extends AtomicReference[AnyRef] private def noDemand = "spec violation: onNext was signaled from upstream without demand" - private class WrappedSubscription(real: Subscription) extends Subscription { + object WrappedSubscription { + sealed trait SubscriptionState { def demand: Long } + case object PassThrough extends SubscriptionState { override def demand: Long = 0 } + final case class Buffering(demand: Long) extends SubscriptionState + + val NoBufferedDemand = Buffering(0) + } + + // Extdending AtomicReference to make the hot memory location share the same cache line with the Subscription + private class WrappedSubscription(real: Subscription) + extends AtomicReference[WrappedSubscription.SubscriptionState](WrappedSubscription.NoBufferedDemand) with Subscription { + import WrappedSubscription._ + + // Release + def ungateDemandAndRequestBuffered(): Unit = { + // Ungate demand + val requests = getAndSet(PassThrough).demand + // And request buffered demand + if (requests > 0) real.request(requests) + } + override def request(n: Long): Unit = { if (n < 1) { tryCancel(real) - getAndSet(Inert) match { + VirtualProcessor.this.getAndSet(Inert) match { case Both(s) ⇒ rejectDueToNonPositiveDemand(s) case Inert ⇒ // another failure has won the race case _ ⇒ // this cannot possibly happen, but signaling errors is impossible at this point } - } else real.request(n) + } else { + // NOTE: At this point, batched requests might not have been dispatched, i.e. this can reorder requests. + // This does not violate the Spec though, since we are a "Processor" here and although we, in reality, + // proxy downstream requests, it is virtually *us* that emit the requests here and we are free to follow + // any pattern of emitting them. + // The only invariant we need to keep is to never emit more requests than the downstream emitted so far. + @tailrec def bufferDemand(n: Long): Unit = { + val current = get() + if (current eq PassThrough) real.request(n) + else if (!compareAndSet(current, Buffering(current.demand + n))) bufferDemand(n) + } + bufferDemand(n) + } } override def cancel(): Unit = { - set(Inert) + VirtualProcessor.this.set(Inert) real.cancel() } } @@ -734,8 +770,8 @@ private[stream] final class VirtualProcessor[T] extends AtomicReference[AnyRef] * the use of `Inert.subscriber` as a tombstone. */ private[impl] class VirtualPublisher[T] extends AtomicReference[AnyRef] with Publisher[T] { - import VirtualProcessor.Inert import ReactiveStreamsCompliance._ + import VirtualProcessor.Inert override def subscribe(subscriber: Subscriber[_ >: T]): Unit = { requireNonNullSubscriber(subscriber) From 896ea53dd3f8a5b79a98c2563b93cc3334766595 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johan=20Andr=C3=A9n?= Date: Fri, 3 Jun 2016 14:17:41 +0200 Subject: [PATCH 03/85] recovery timeout for persistent actors #20698 --- .../scala/akka/cluster/QuickRestartSpec.scala | 6 +- .../src/main/resources/reference.conf | 36 +++++---- .../scala/akka/persistence/Eventsourced.scala | 79 ++++++++++++------- .../scala/akka/persistence/Persistence.scala | 3 + .../akka/persistence/PersistentActor.scala | 5 ++ .../PersistentActorRecoveryTimeoutSpec.scala | 76 ++++++++++++++++++ 6 files changed, 161 insertions(+), 44 deletions(-) create mode 100644 akka-persistence/src/test/scala/akka/persistence/PersistentActorRecoveryTimeoutSpec.scala diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/QuickRestartSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/QuickRestartSpec.scala index 0a4e68143a..713c02160a 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/QuickRestartSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/QuickRestartSpec.scala @@ -65,11 +65,13 @@ abstract class QuickRestartSpec runOn(second) { restartingSystem = if (restartingSystem == null) - ActorSystem(system.name, + ActorSystem( + system.name, ConfigFactory.parseString(s"akka.cluster.roles = [round-$n]") .withFallback(system.settings.config)) else - ActorSystem(system.name, + ActorSystem( + system.name, ConfigFactory.parseString(s""" akka.cluster.roles = [round-$n] akka.remote.netty.tcp.port = ${Cluster(restartingSystem).selfAddress.port.get}""") // same port diff --git a/akka-persistence/src/main/resources/reference.conf b/akka-persistence/src/main/resources/reference.conf index c246b5c89c..4619f42756 100644 --- a/akka-persistence/src/main/resources/reference.conf +++ b/akka-persistence/src/main/resources/reference.conf @@ -92,11 +92,11 @@ akka.persistence { } } } - + # Fallback settings for journal plugin configurations. # These settings are used if they are not defined in plugin config section. journal-plugin-fallback { - + # Fully qualified class name providing journal plugin api implementation. # It is mandatory to specify this property. # The class must have a constructor without parameters or constructor with @@ -105,40 +105,46 @@ akka.persistence { # Dispatcher for the plugin actor. plugin-dispatcher = "akka.persistence.dispatchers.default-plugin-dispatcher" - + # Dispatcher for message replay. replay-dispatcher = "akka.persistence.dispatchers.default-replay-dispatcher" - + # Removed: used to be the Maximum size of a persistent message batch written to the journal. # Now this setting is without function, PersistentActor will write as many messages # as it has accumulated since the last write. max-message-batch-size = 200 - + + # If there is more time in between individual events gotten from the journal + # recovery than this the recovery will fail. + # Note that it also affects reading the snapshot before replaying events on + # top of it, even though it is configured for the journal. + recovery-event-timeout = 30s + circuit-breaker { max-failures = 10 call-timeout = 10s reset-timeout = 30s } - - # The replay filter can detect a corrupt event stream by inspecting - # sequence numbers and writerUuid when replaying events. + + # The replay filter can detect a corrupt event stream by inspecting + # sequence numbers and writerUuid when replaying events. replay-filter { # What the filter should do when detecting invalid events. # Supported values: - # `repair-by-discard-old` : discard events from old writers, + # `repair-by-discard-old` : discard events from old writers, # warning is logged # `fail` : fail the replay, error is logged # `warn` : log warning but emit events untouched # `off` : disable this feature completely mode = repair-by-discard-old - + # It uses a look ahead buffer for analyzing the events. # This defines the size (in number of events) of the buffer. window-size = 100 - + # How many old writerUuid to remember max-old-writers = 10 - + # Set this to `on` to enable detailed debug logging of each # replayed event. debug = off @@ -148,8 +154,8 @@ akka.persistence { # Fallback settings for snapshot store plugin configurations # These settings are used if they are not defined in plugin config section. snapshot-store-plugin-fallback { - - # Fully qualified class name providing snapshot store plugin api + + # Fully qualified class name providing snapshot store plugin api # implementation. It is mandatory to specify this property if # snapshot store is enabled. # The class must have a constructor without parameters or constructor with @@ -158,7 +164,7 @@ akka.persistence { # Dispatcher for the plugin actor. plugin-dispatcher = "akka.persistence.dispatchers.default-plugin-dispatcher" - + circuit-breaker { max-failures = 5 call-timeout = 20s diff --git a/akka-persistence/src/main/scala/akka/persistence/Eventsourced.scala b/akka-persistence/src/main/scala/akka/persistence/Eventsourced.scala index 6b8c1d1b20..2208a638bc 100644 --- a/akka-persistence/src/main/scala/akka/persistence/Eventsourced.scala +++ b/akka-persistence/src/main/scala/akka/persistence/Eventsourced.scala @@ -9,11 +9,13 @@ import java.util.UUID import scala.collection.immutable import scala.util.control.NonFatal -import akka.actor.DeadLetter -import akka.actor.StashOverflowException +import akka.actor.{ DeadLetter, ReceiveTimeout, StashOverflowException } +import akka.util.Helpers.ConfigOps import akka.event.Logging import akka.event.LoggingAdapter +import scala.concurrent.duration.{ Duration, FiniteDuration } + /** * INTERNAL API */ @@ -461,6 +463,10 @@ private[persistence] trait Eventsourced extends Snapshotter with PersistenceStas */ private def recoveryStarted(replayMax: Long) = new State { + // protect against replay stalling forever because of journal overloaded and such + private val previousRecieveTimeout = context.receiveTimeout + context.setReceiveTimeout(extension.journalConfigFor(journalPluginId).getMillisDuration("recovery-event-timeout")) + private val recoveryBehavior: Receive = { val _receiveRecover = receiveRecover @@ -471,6 +477,7 @@ private[persistence] trait Eventsourced extends Snapshotter with PersistenceStas _receiveRecover(s) case RecoveryCompleted if _receiveRecover.isDefinedAt(RecoveryCompleted) ⇒ _receiveRecover(RecoveryCompleted) + } } @@ -485,8 +492,13 @@ private[persistence] trait Eventsourced extends Snapshotter with PersistenceStas // Since we are recovering we can ignore the receive behavior from the stack Eventsourced.super.aroundReceive(recoveryBehavior, SnapshotOffer(metadata, snapshot)) } - changeState(recovering(recoveryBehavior)) + changeState(recovering(recoveryBehavior, previousRecieveTimeout)) journal ! ReplayMessages(lastSequenceNr + 1L, toSnr, replayMax, persistenceId, self) + case ReceiveTimeout ⇒ + try onRecoveryFailure( + new RecoveryTimedOut(s"Recovery timed out, didn't get snapshot within ${context.receiveTimeout.toSeconds}s"), + event = None) + finally context.stop(self) case other ⇒ stashInternally(other) } @@ -502,32 +514,45 @@ private[persistence] trait Eventsourced extends Snapshotter with PersistenceStas * * All incoming messages are stashed. */ - private def recovering(recoveryBehavior: Receive) = new State { - override def toString: String = "replay started" - override def recoveryRunning: Boolean = true + private def recovering(recoveryBehavior: Receive, previousReceiveTimeout: Duration) = + new State { + override def toString: String = "replay started" - override def stateReceive(receive: Receive, message: Any) = message match { - case ReplayedMessage(p) ⇒ - try { - updateLastSequenceNr(p) - Eventsourced.super.aroundReceive(recoveryBehavior, p) - } catch { - case NonFatal(t) ⇒ - try onRecoveryFailure(t, Some(p.payload)) finally context.stop(self) - } - case RecoverySuccess(highestSeqNr) ⇒ - onReplaySuccess() // callback for subclass implementation - changeState(processingCommands) - sequenceNr = highestSeqNr - setLastSequenceNr(highestSeqNr) - internalStash.unstashAll() - Eventsourced.super.aroundReceive(recoveryBehavior, RecoveryCompleted) - case ReplayMessagesFailure(cause) ⇒ - try onRecoveryFailure(cause, event = None) finally context.stop(self) - case other ⇒ - stashInternally(other) + override def recoveryRunning: Boolean = true + + override def stateReceive(receive: Receive, message: Any) = message match { + case ReplayedMessage(p) ⇒ + try { + updateLastSequenceNr(p) + Eventsourced.super.aroundReceive(recoveryBehavior, p) + } catch { + case NonFatal(t) ⇒ + try onRecoveryFailure(t, Some(p.payload)) finally context.stop(self) + } + case RecoverySuccess(highestSeqNr) ⇒ + resetRecieveTimeout() + onReplaySuccess() // callback for subclass implementation + changeState(processingCommands) + sequenceNr = highestSeqNr + setLastSequenceNr(highestSeqNr) + internalStash.unstashAll() + Eventsourced.super.aroundReceive(recoveryBehavior, RecoveryCompleted) + case ReplayMessagesFailure(cause) ⇒ + resetRecieveTimeout() + try onRecoveryFailure(cause, event = None) finally context.stop(self) + case ReceiveTimeout ⇒ + try onRecoveryFailure( + new RecoveryTimedOut(s"Recovery timed out, didn't get event within ${context.receiveTimeout.toSeconds}s, highest sequence number seen ${sequenceNr}"), + event = None) + finally context.stop(self) + case other ⇒ + stashInternally(other) + } + + private def resetRecieveTimeout(): Unit = { + context.setReceiveTimeout(previousReceiveTimeout) + } } - } private def flushBatch() { if (eventBatch.nonEmpty) { diff --git a/akka-persistence/src/main/scala/akka/persistence/Persistence.scala b/akka-persistence/src/main/scala/akka/persistence/Persistence.scala index c8dd4b7e49..f76893c117 100644 --- a/akka-persistence/src/main/scala/akka/persistence/Persistence.scala +++ b/akka-persistence/src/main/scala/akka/persistence/Persistence.scala @@ -6,15 +6,18 @@ package akka.persistence import java.util.concurrent.atomic.AtomicReference import java.util.function.Consumer + import akka.actor._ import akka.event.{ Logging, LoggingAdapter } import akka.persistence.journal.{ EventAdapters, IdentityEventAdapters } import akka.util.Collections.EmptyImmutableSeq import akka.util.Helpers.ConfigOps import com.typesafe.config.Config + import scala.annotation.tailrec import scala.concurrent.duration._ import akka.util.Reflect + import scala.util.control.NonFatal /** diff --git a/akka-persistence/src/main/scala/akka/persistence/PersistentActor.scala b/akka-persistence/src/main/scala/akka/persistence/PersistentActor.scala index b95689458b..520c9c3b24 100644 --- a/akka-persistence/src/main/scala/akka/persistence/PersistentActor.scala +++ b/akka-persistence/src/main/scala/akka/persistence/PersistentActor.scala @@ -4,11 +4,14 @@ package akka.persistence import java.lang.{ Iterable ⇒ JIterable } + import akka.actor._ import akka.japi.Procedure import akka.japi.Util import com.typesafe.config.Config +import scala.util.control.NoStackTrace + abstract class RecoveryCompleted /** * Sent to a [[PersistentActor]] when the journal replay has been finished. @@ -98,6 +101,8 @@ object Recovery { val none: Recovery = Recovery(toSequenceNr = 0L) } +final class RecoveryTimedOut(message: String) extends RuntimeException(message) with NoStackTrace + /** * This defines how to handle the current received message which failed to stash, when the size of * Stash exceeding the capacity of Stash. diff --git a/akka-persistence/src/test/scala/akka/persistence/PersistentActorRecoveryTimeoutSpec.scala b/akka-persistence/src/test/scala/akka/persistence/PersistentActorRecoveryTimeoutSpec.scala new file mode 100644 index 0000000000..5f5c7e964b --- /dev/null +++ b/akka-persistence/src/test/scala/akka/persistence/PersistentActorRecoveryTimeoutSpec.scala @@ -0,0 +1,76 @@ +package akka.persistence + +import akka.actor.Status.Failure +import akka.actor.{ Actor, ActorRef, Props } +import akka.persistence.journal.SteppingInmemJournal +import akka.testkit.{ AkkaSpec, ImplicitSender, TestProbe } +import com.typesafe.config.ConfigFactory + +import scala.concurrent.duration._ + +object PersistentActorRecoveryTimeoutSpec { + val journalId = "persistent-actor-recovery-timeout-spec" + + def config = + SteppingInmemJournal.config(PersistentActorRecoveryTimeoutSpec.journalId).withFallback( + ConfigFactory.parseString( + """ + |akka.persistence.journal.stepping-inmem.recovery-event-timeout=100ms + """.stripMargin)).withFallback(PersistenceSpec.config("stepping-inmem", "PersistentActorRecoveryTimeoutSpec")) + + class TestActor(probe: ActorRef) extends NamedPersistentActor("recovery-timeout-actor") { + override def receiveRecover: Receive = Actor.emptyBehavior + + override def receiveCommand: Receive = { + case x ⇒ persist(x) { _ ⇒ + sender() ! x + } + } + + override protected def onRecoveryFailure(cause: Throwable, event: Option[Any]): Unit = { + probe ! Failure(cause) + } + } + +} + +class PersistentActorRecoveryTimeoutSpec extends AkkaSpec(PersistentActorRecoveryTimeoutSpec.config) with ImplicitSender { + + import PersistentActorRecoveryTimeoutSpec.journalId + + "The recovery timeout" should { + + "fail recovery if timeout is not met when recovering" in { + val probe = TestProbe() + val persisting = system.actorOf(Props(classOf[PersistentActorRecoveryTimeoutSpec.TestActor], probe.ref)) + + awaitAssert(SteppingInmemJournal.getRef(journalId), 3.seconds) + val journal = SteppingInmemJournal.getRef(journalId) + + // initial read highest + SteppingInmemJournal.step(journal) + + persisting ! "A" + SteppingInmemJournal.step(journal) + expectMsg("A") + + watch(persisting) + system.stop(persisting) + expectTerminated(persisting) + + // now replay, but don't give the journal any tokens to replay events + // so that we cause the timeout to trigger + val replaying = system.actorOf(Props(classOf[PersistentActorRecoveryTimeoutSpec.TestActor], probe.ref)) + watch(replaying) + + // initial read highest + SteppingInmemJournal.step(journal) + + probe.expectMsgType[Failure].cause shouldBe a[RecoveryTimedOut] + expectTerminated(replaying) + + } + + } + +} From b84c6c527124b57e9e69b67f33041fdc6aec60b5 Mon Sep 17 00:00:00 2001 From: Stefano Bonetti Date: Fri, 3 Jun 2016 13:23:11 +0100 Subject: [PATCH 04/85] Adding docs for KillSwitch #20265 --- .../code/docs/stream/KillSwitchDocTest.java | 140 ++++++++++++++++++ akka-docs/rst/java/stream/index.rst | 1 + akka-docs/rst/java/stream/stream-dynamic.rst | 63 ++++++++ .../code/docs/stream/KillSwitchDocSpec.scala | 108 ++++++++++++++ akka-docs/rst/scala/stream/index.rst | 1 + akka-docs/rst/scala/stream/stream-dynamic.rst | 63 ++++++++ .../main/scala/akka/stream/KillSwitch.scala | 6 +- 7 files changed, 380 insertions(+), 2 deletions(-) create mode 100644 akka-docs/rst/java/code/docs/stream/KillSwitchDocTest.java create mode 100644 akka-docs/rst/java/stream/stream-dynamic.rst create mode 100644 akka-docs/rst/scala/code/docs/stream/KillSwitchDocSpec.scala create mode 100644 akka-docs/rst/scala/stream/stream-dynamic.rst diff --git a/akka-docs/rst/java/code/docs/stream/KillSwitchDocTest.java b/akka-docs/rst/java/code/docs/stream/KillSwitchDocTest.java new file mode 100644 index 0000000000..e70e7e48d3 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/KillSwitchDocTest.java @@ -0,0 +1,140 @@ +package docs.stream; + +import akka.NotUsed; +import akka.actor.ActorSystem; +import akka.japi.Pair; +import akka.stream.*; +import akka.stream.javadsl.Keep; +import akka.stream.javadsl.Sink; +import akka.stream.javadsl.Source; +import akka.testkit.JavaTestKit; +import docs.AbstractJavaTest; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import scala.concurrent.duration.FiniteDuration; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; + +import static org.junit.Assert.assertEquals; + +class KillSwitchDocTest extends AbstractJavaTest { + + static ActorSystem system; + static Materializer mat; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("GraphDSLDocTest"); + mat = ActorMaterializer.create(system); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + mat = null; + } + + @Test + public void compileOnlyTest() { + } + + public void uniqueKillSwitchShutdownExample() throws Exception { + //#unique-shutdown + final Source countingSrc = Source.from(new ArrayList<>(Arrays.asList(1, 2, 3, 4))) + .delay(FiniteDuration.apply(1, TimeUnit.SECONDS), DelayOverflowStrategy.backpressure()); + final Sink> lastSnk = Sink.last(); + + final Pair> stream = countingSrc + .viaMat(KillSwitches.single(), Keep.right()) + .toMat(lastSnk, Keep.both()).run(mat); + + final UniqueKillSwitch killSwitch = stream.first(); + final CompletionStage completionStage = stream.second(); + + doSomethingElse(); + killSwitch.shutdown(); + + final int finalCount = completionStage.toCompletableFuture().get(1, TimeUnit.SECONDS); + assertEquals(2, finalCount); + //#unique-shutdown + } + + public static void uniqueKillSwitchAbortExample() throws Exception { + //#unique-abort + final Source countingSrc = Source.from(new ArrayList<>(Arrays.asList(1, 2, 3, 4))) + .delay(FiniteDuration.apply(1, TimeUnit.SECONDS), DelayOverflowStrategy.backpressure()); + final Sink> lastSnk = Sink.last(); + + final Pair> stream = countingSrc + .viaMat(KillSwitches.single(), Keep.right()) + .toMat(lastSnk, Keep.both()).run(mat); + + final UniqueKillSwitch killSwitch = stream.first(); + final CompletionStage completionStage = stream.second(); + + final Exception error = new Exception("boom!"); + killSwitch.abort(error); + + final int result = completionStage.toCompletableFuture().exceptionally(e -> -1).get(1, TimeUnit.SECONDS); + assertEquals(-1, result); + //#unique-abort + } + + public void sharedKillSwitchShutdownExample() throws Exception { + //#shared-shutdown + final Source countingSrc = Source.from(new ArrayList<>(Arrays.asList(1, 2, 3, 4))) + .delay(FiniteDuration.apply(1, TimeUnit.SECONDS), DelayOverflowStrategy.backpressure()); + final Sink> lastSnk = Sink.last(); + final SharedKillSwitch killSwitch = KillSwitches.shared("my-kill-switch"); + + final CompletionStage completionStage = countingSrc + .viaMat(killSwitch.flow(), Keep.right()) + .toMat(lastSnk, Keep.right()).run(mat); + final CompletionStage completionStageDelayed = countingSrc + .delay(FiniteDuration.apply(1, TimeUnit.SECONDS), DelayOverflowStrategy.backpressure()) + .viaMat(killSwitch.flow(), Keep.right()) + .toMat(lastSnk, Keep.right()).run(mat); + + doSomethingElse(); + killSwitch.shutdown(); + + final int finalCount = completionStage.toCompletableFuture().get(1, TimeUnit.SECONDS); + final int finalCountDelayed = completionStageDelayed.toCompletableFuture().get(1, TimeUnit.SECONDS); + assertEquals(2, finalCount); + assertEquals(1, finalCountDelayed); + //#shared-shutdown + } + + public static void sharedKillSwitchAbortExample() throws Exception { + //#shared-abort + final Source countingSrc = Source.from(new ArrayList<>(Arrays.asList(1, 2, 3, 4))) + .delay(FiniteDuration.apply(1, TimeUnit.SECONDS), DelayOverflowStrategy.backpressure()); + final Sink> lastSnk = Sink.last(); + final SharedKillSwitch killSwitch = KillSwitches.shared("my-kill-switch"); + + final CompletionStage completionStage1 = countingSrc + .viaMat(killSwitch.flow(), Keep.right()) + .toMat(lastSnk, Keep.right()).run(mat); + final CompletionStage completionStage2 = countingSrc + .viaMat(killSwitch.flow(), Keep.right()) + .toMat(lastSnk, Keep.right()).run(mat); + + final Exception error = new Exception("boom!"); + killSwitch.abort(error); + + final int result1 = completionStage1.toCompletableFuture().exceptionally(e -> -1).get(1, TimeUnit.SECONDS); + final int result2 = completionStage2.toCompletableFuture().exceptionally(e -> -1).get(1, TimeUnit.SECONDS); + assertEquals(-1, result1); + assertEquals(-1, result2); + //#shared-abort + } + + private static void doSomethingElse(){ + } +} diff --git a/akka-docs/rst/java/stream/index.rst b/akka-docs/rst/java/stream/index.rst index e1deaf2748..3bc5c32c61 100644 --- a/akka-docs/rst/java/stream/index.rst +++ b/akka-docs/rst/java/stream/index.rst @@ -13,6 +13,7 @@ Streams stream-graphs stream-composition stream-rate + stream-dynamic stream-customize stream-integrations stream-error diff --git a/akka-docs/rst/java/stream/stream-dynamic.rst b/akka-docs/rst/java/stream/stream-dynamic.rst new file mode 100644 index 0000000000..77a0e0a436 --- /dev/null +++ b/akka-docs/rst/java/stream/stream-dynamic.rst @@ -0,0 +1,63 @@ +.. _stream-dynamic-scala: + +####################### +Dynamic stream handling +####################### + +.. _kill-switch-scala: + +Controlling graph completion with KillSwitch +-------------------------------------------- + +A ``KillSwitch`` allows the completion of graphs of ``FlowShape`` from the outside. It consists of a flow element that +can be linked to a graph of ``FlowShape`` needing completion control. +The ``KillSwitch`` trait allows to complete or fail the graph(s). + +.. includecode:: ../../../../akka-stream/src/main/scala/akka/stream/KillSwitch.scala + :include: kill-switch + +After the first call to either ``shutdown`` and ``abort``, all subsequent calls to any of these methods will be ignored. +Graph completion is performed by both + +* completing its downstream +* cancelling (in case of ``shutdown``) or failing (in case of ``abort``) its upstream. + +A ``KillSwitch`` can control the completion of one or multiple streams, and therefore comes in two different flavours. + +.. _unique-kill-switch-scala: + +UniqueKillSwitch +^^^^^^^^^^^^^^^^ + +``UniqueKillSwitch`` allows to control the completion of **one** materialized ``Graph`` of ``FlowShape``. Refer to the +below for usage examples. + +* **Shutdown** + +.. includecode:: ../code/docs/stream/KillSwitchDocTest.java#unique-shutdown + +* **Abort** + +.. includecode:: ../code/docs/stream/KillSwitchDocTest.java#unique-abort + +.. _shared-kill-switch-scala: + +SharedKillSwitch +^^^^^^^^^^^^^^^^ + +A ``SharedKillSwitch`` allows to control the completion of an arbitrary number graphs of ``FlowShape``. It can be +materialized multiple times via its ``flow`` method, and all materialized graphs linked to it are controlled by the switch. +Refer to the below for usage examples. + +* **Shutdown** + +.. includecode:: ../code/docs/stream/KillSwitchDocTest.java#shared-shutdown + +* **Abort** + +.. includecode:: ../code/docs/stream/KillSwitchDocTest.java#shared-abort + +.. note:: + A ``UniqueKillSwitch`` is always a result of a materialization, whilst ``SharedKillSwitch`` needs to be constructed + before any materialization takes place. + diff --git a/akka-docs/rst/scala/code/docs/stream/KillSwitchDocSpec.scala b/akka-docs/rst/scala/code/docs/stream/KillSwitchDocSpec.scala new file mode 100644 index 0000000000..b7a8b04201 --- /dev/null +++ b/akka-docs/rst/scala/code/docs/stream/KillSwitchDocSpec.scala @@ -0,0 +1,108 @@ +package docs.stream + +import akka.stream.scaladsl._ +import akka.stream.{ ActorMaterializer, DelayOverflowStrategy, KillSwitches } +import akka.testkit.AkkaSpec +import docs.CompileOnlySpec + +import scala.concurrent.Await +import scala.concurrent.duration._ + +class KillSwitchDocSpec extends AkkaSpec with CompileOnlySpec { + + implicit val materializer = ActorMaterializer() + + "Unique kill switch" must { + + "control graph completion with shutdown" in compileOnlySpec { + + // format: OFF + //#unique-shutdown + val countingSrc = Source(Stream.from(1)).delay(1.second, DelayOverflowStrategy.backpressure) + val lastSnk = Sink.last[Int] + + val (killSwitch, last) = countingSrc + .viaMat(KillSwitches.single)(Keep.right) + .toMat(lastSnk)(Keep.both) + .run() + + doSomethingElse() + + killSwitch.shutdown() + + Await.result(last, 1.second) shouldBe 2 + //#unique-shutdown + // format: ON + } + + "control graph completion with abort" in compileOnlySpec { + + // format: OFF + //#unique-abort + val countingSrc = Source(Stream.from(1)).delay(1.second, DelayOverflowStrategy.backpressure) + val lastSnk = Sink.last[Int] + + val (killSwitch, last) = countingSrc + .viaMat(KillSwitches.single)(Keep.right) + .toMat(lastSnk)(Keep.both).run() + + val error = new RuntimeException("boom!") + killSwitch.abort(error) + + Await.result(last.failed, 1.second) shouldBe error + //#unique-abort + // format: ON + } + } + + "Shared kill switch" must { + + "control graph completion with shutdown" in compileOnlySpec { + // format: OFF + //#shared-shutdown + val countingSrc = Source(Stream.from(1)).delay(1.second, DelayOverflowStrategy.backpressure) + val lastSnk = Sink.last[Int] + val sharedKillSwitch = KillSwitches.shared("my-kill-switch") + + val last = countingSrc + .via(sharedKillSwitch.flow) + .runWith(lastSnk) + + val delayedLast = countingSrc + .delay(1.second, DelayOverflowStrategy.backpressure) + .via(sharedKillSwitch.flow) + .runWith(lastSnk) + + doSomethingElse() + + sharedKillSwitch.shutdown() + + Await.result(last, 1.second) shouldBe 2 + Await.result(delayedLast, 1.second) shouldBe 1 + //#shared-shutdown + // format: ON + } + + "control graph completion with abort" in compileOnlySpec { + + // format: OFF + //#shared-abort + val countingSrc = Source(Stream.from(1)).delay(1.second) + val lastSnk = Sink.last[Int] + val sharedKillSwitch = KillSwitches.shared("my-kill-switch") + + val last1 = countingSrc.via(sharedKillSwitch.flow).runWith(lastSnk) + val last2 = countingSrc.via(sharedKillSwitch.flow).runWith(lastSnk) + + val error = new RuntimeException("boom!") + sharedKillSwitch.abort(error) + + Await.result(last1.failed, 1.second) shouldBe error + Await.result(last2.failed, 1.second) shouldBe error + //#shared-abort + // format: ON + } + } + + private def doSomethingElse() = ??? +} diff --git a/akka-docs/rst/scala/stream/index.rst b/akka-docs/rst/scala/stream/index.rst index 485f4d00a3..a337529ac0 100644 --- a/akka-docs/rst/scala/stream/index.rst +++ b/akka-docs/rst/scala/stream/index.rst @@ -13,6 +13,7 @@ Streams stream-graphs stream-composition stream-rate + stream-dynamic stream-customize stream-integrations stream-error diff --git a/akka-docs/rst/scala/stream/stream-dynamic.rst b/akka-docs/rst/scala/stream/stream-dynamic.rst new file mode 100644 index 0000000000..8716f934a6 --- /dev/null +++ b/akka-docs/rst/scala/stream/stream-dynamic.rst @@ -0,0 +1,63 @@ +.. _stream-dynamic-scala: + +####################### +Dynamic stream handling +####################### + +.. _kill-switch-scala: + +Controlling graph completion with KillSwitch +-------------------------------------------- + +A ``KillSwitch`` allows the completion of graphs of ``FlowShape`` from the outside. It consists of a flow element that +can be linked to a graph of ``FlowShape`` needing completion control. +The ``KillSwitch`` trait allows to complete or fail the graph(s). + +.. includecode:: ../../../../akka-stream/src/main/scala/akka/stream/KillSwitch.scala + :include: kill-switch + +After the first call to either ``shutdown`` and ``abort``, all subsequent calls to any of these methods will be ignored. +Graph completion is performed by both + +* completing its downstream +* cancelling (in case of ``shutdown``) or failing (in case of ``abort``) its upstream. + +A ``KillSwitch`` can control the completion of one or multiple streams, and therefore comes in two different flavours. + +.. _unique-kill-switch-scala: + +UniqueKillSwitch +^^^^^^^^^^^^^^^^ + +``UniqueKillSwitch`` allows to control the completion of **one** materialized ``Graph`` of ``FlowShape``. Refer to the +below for usage examples. + +* **Shutdown** + +.. includecode:: ../code/docs/stream/KillSwitchDocSpec.scala#unique-shutdown + +* **Abort** + +.. includecode:: ../code/docs/stream/KillSwitchDocSpec.scala#unique-abort + +.. _shared-kill-switch-scala: + +SharedKillSwitch +^^^^^^^^^^^^^^^^ + +A ``SharedKillSwitch`` allows to control the completion of an arbitrary number graphs of ``FlowShape``. It can be +materialized multiple times via its ``flow`` method, and all materialized graphs linked to it are controlled by the switch. +Refer to the below for usage examples. + +* **Shutdown** + +.. includecode:: ../code/docs/stream/KillSwitchDocSpec.scala#shared-shutdown + +* **Abort** + +.. includecode:: ../code/docs/stream/KillSwitchDocSpec.scala#shared-abort + +.. note:: + A ``UniqueKillSwitch`` is always a result of a materialization, whilst ``SharedKillSwitch`` needs to be constructed + before any materialization takes place. + diff --git a/akka-stream/src/main/scala/akka/stream/KillSwitch.scala b/akka-stream/src/main/scala/akka/stream/KillSwitch.scala index c0bb6800ab..0aaa7de794 100644 --- a/akka-stream/src/main/scala/akka/stream/KillSwitch.scala +++ b/akka-stream/src/main/scala/akka/stream/KillSwitch.scala @@ -132,6 +132,7 @@ object KillSwitches { * multiple streams might be linked with the switch. For details see the documentation of the concrete subclasses of * this interface. */ +//#kill-switch trait KillSwitch { /** * After calling [[KillSwitch#shutdown()]] the linked [[Graph]]s of [[FlowShape]] are completed normally. @@ -142,6 +143,7 @@ trait KillSwitch { */ def abort(ex: Throwable): Unit } +//#kill-switch /** * A [[UniqueKillSwitch]] is always a result of a materialization (unlike [[SharedKillSwitch]] which is constructed @@ -182,7 +184,7 @@ final class UniqueKillSwitch private[stream] (private val promise: Promise[Done] /** * A [[SharedKillSwitch]] is a provider for [[Graph]]s of [[FlowShape]] that can be completed or failed from the outside. * A [[Graph]] returned by the switch can be materialized arbitrary amount of times: every newly materialized [[Graph]] - * belongs to the switch from which it was aquired. Multiple [[SharedKillSwitch]] instances are isolated from each other, + * belongs to the switch from which it was acquired. Multiple [[SharedKillSwitch]] instances are isolated from each other, * shutting down or aborting on instance does not affect the [[Graph]]s provided by another instance. * * After calling [[SharedKillSwitch#shutdown()]] all materialized, running instances of all [[Graph]]s provided by the @@ -226,7 +228,7 @@ final class SharedKillSwitch private[stream] (val name: String) extends KillSwit def abort(reason: Throwable): Unit = shutdownPromise.tryFailure(reason) /** - * Retrurns a typed Flow of a requested type that will be linked to this [[SharedKillSwitch]] instance. By invoking + * Returns a typed Flow of a requested type that will be linked to this [[SharedKillSwitch]] instance. By invoking * [[SharedKillSwitch#shutdown()]] or [[SharedKillSwitch#abort()]] all running instances of all provided [[Graph]]s by this * switch will be stopped normally or failed. * From 2643857e8243b1b3a4d1b69f42290b61b0d274c3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johan=20Andr=C3=A9n?= Date: Fri, 3 Jun 2016 14:24:31 +0200 Subject: [PATCH 05/85] =doc Invalid FrameParser example fixed #20519 --- akka-docs/rst/java/code/docs/stream/BidiFlowDocTest.java | 3 +++ akka-docs/rst/scala/code/docs/stream/BidiFlowDocSpec.scala | 3 +++ 2 files changed, 6 insertions(+) diff --git a/akka-docs/rst/java/code/docs/stream/BidiFlowDocTest.java b/akka-docs/rst/java/code/docs/stream/BidiFlowDocTest.java index 3c266a6215..27eb549e8c 100644 --- a/akka-docs/rst/java/code/docs/stream/BidiFlowDocTest.java +++ b/akka-docs/rst/java/code/docs/stream/BidiFlowDocTest.java @@ -165,9 +165,12 @@ public class BidiFlowDocTest extends AbstractJavaTest { @Override public void onUpstreamFinish() throws Exception { + // either we are done if (stash.isEmpty()) completeStage(); + // or we still have bytes to emit // wait with completion and let run() complete when the // rest of the stash has been sent downstream + else if (isAvailable(out)) run(); } }); diff --git a/akka-docs/rst/scala/code/docs/stream/BidiFlowDocSpec.scala b/akka-docs/rst/scala/code/docs/stream/BidiFlowDocSpec.scala index d69e1ca39b..f8bd88e7ba 100644 --- a/akka-docs/rst/scala/code/docs/stream/BidiFlowDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/stream/BidiFlowDocSpec.scala @@ -93,9 +93,12 @@ object BidiFlowDocSpec { } override def onUpstreamFinish(): Unit = { + // either we are done if (stash.isEmpty) completeStage() + // or we still have bytes to emit // wait with completion and let run() complete when the // rest of the stash has been sent downstream + else if (isAvailable(out)) run() } }) From f49708d8b79c05de692e655d04677a88ab462363 Mon Sep 17 00:00:00 2001 From: Konrad Malawski Date: Fri, 3 Jun 2016 14:57:12 +0200 Subject: [PATCH 06/85] +htp #20705 allows javadsl creating custom rejections --- .../server/directives/BasicDirectivesExamplesTest.java | 2 +- .../src/main/scala/akka/http/javadsl/server/Rejections.scala | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/akka-docs/rst/java/code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java b/akka-docs/rst/java/code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java index fa5a520460..21d7fcc471 100644 --- a/akka-docs/rst/java/code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/directives/BasicDirectivesExamplesTest.java @@ -544,7 +544,7 @@ public class BasicDirectivesExamplesTest extends JUnitRouteTest { //#mapRouteResult } - public static final class MyCustomRejection implements akka.http.scaladsl.server.Rejection {} + public static final class MyCustomRejection implements CustomRejection {} @Test public void testMapRouteResultPF() { diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/Rejections.scala b/akka-http/src/main/scala/akka/http/javadsl/server/Rejections.scala index a9cbe31da9..95d2e3b011 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/Rejections.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/Rejections.scala @@ -27,9 +27,14 @@ import scala.collection.JavaConverters._ * A rejection encapsulates a specific reason why a Route was not able to handle a request. Rejections are gathered * up over the course of a Route evaluation and finally converted to [[akka.http.scaladsl.model.HttpResponse]]s by the * `handleRejections` directive, if there was no way for the request to be completed. + * + * If providing custom rejections, extend [[CustomRejection]] instead. */ trait Rejection +/** To be extended by user-provided custom rejections, such that they may be consumed in either Java or Scala DSLs. */ +trait CustomRejection extends akka.http.scaladsl.server.Rejection + /** * Rejection created by method filters. * Signals that the request was rejected because the HTTP method is unsupported. From ab83603733167eb4f7ff53ca35881c7dac2469ba Mon Sep 17 00:00:00 2001 From: Hawstein Date: Mon, 6 Jun 2016 03:29:58 +0800 Subject: [PATCH 07/85] +doc #20466 example snippet for akka http java dsl: ExecutionDirectives (#20710) --- .../ExecutionDirectivesExamplesTest.java | 75 +++++++++++++++++++ .../execution-directives/handleExceptions.rst | 3 +- .../execution-directives/handleRejections.rst | 3 +- 3 files changed, 79 insertions(+), 2 deletions(-) create mode 100644 akka-docs/rst/java/code/docs/http/javadsl/server/directives/ExecutionDirectivesExamplesTest.java diff --git a/akka-docs/rst/java/code/docs/http/javadsl/server/directives/ExecutionDirectivesExamplesTest.java b/akka-docs/rst/java/code/docs/http/javadsl/server/directives/ExecutionDirectivesExamplesTest.java new file mode 100644 index 0000000000..b8e1809732 --- /dev/null +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/directives/ExecutionDirectivesExamplesTest.java @@ -0,0 +1,75 @@ +/* + * Copyright (C) 2016-2016 Lightbend Inc. + */ +package docs.http.javadsl.server.directives; + +import akka.http.javadsl.model.HttpRequest; +import akka.http.javadsl.model.StatusCodes; +import akka.http.javadsl.server.ExceptionHandler; +import akka.http.javadsl.server.PathMatchers; +import akka.http.javadsl.server.RejectionHandler; +import akka.http.javadsl.server.Rejections; +import akka.http.javadsl.server.Route; +import akka.http.javadsl.server.ValidationRejection; +import akka.http.javadsl.testkit.JUnitRouteTest; +import org.junit.Test; + +import static akka.http.javadsl.server.PathMatchers.integerSegment; + +public class ExecutionDirectivesExamplesTest extends JUnitRouteTest { + + @Test + public void testHandleExceptions() { + //#handleExceptions + final ExceptionHandler divByZeroHandler = ExceptionHandler.newBuilder() + .match(ArithmeticException.class, x -> + complete(StatusCodes.BAD_REQUEST, "You've got your arithmetic wrong, fool!")) + .build(); + + final Route route = + path(PathMatchers.segment("divide").slash(integerSegment()).slash(integerSegment()), (a, b) -> + handleExceptions(divByZeroHandler, () -> complete("The result is " + (a / b))) + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/divide/10/5")) + .assertEntity("The result is 2"); + testRoute(route).run(HttpRequest.GET("/divide/10/0")) + .assertStatusCode(StatusCodes.BAD_REQUEST) + .assertEntity("You've got your arithmetic wrong, fool!"); + //#handleExceptions + } + + @Test + public void testHandleRejections() { + //#handleRejections + final RejectionHandler totallyMissingHandler = RejectionHandler.newBuilder() + .handleNotFound(complete(StatusCodes.NOT_FOUND, "Oh man, what you are looking for is long gone.")) + .handle(ValidationRejection.class, r -> complete(StatusCodes.INTERNAL_SERVER_ERROR, r.message())) + .build(); + + final Route route = pathPrefix("handled", () -> + handleRejections(totallyMissingHandler, () -> + route( + path("existing", () -> complete("This path exists")), + path("boom", () -> reject(Rejections.validationRejection("This didn't work."))) + ) + ) + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/handled/existing")) + .assertEntity("This path exists"); + // applies default handler + testRoute(route).run(HttpRequest.GET("/missing")) + .assertStatusCode(StatusCodes.NOT_FOUND) + .assertEntity("The requested resource could not be found."); + testRoute(route).run(HttpRequest.GET("/handled/missing")) + .assertStatusCode(StatusCodes.NOT_FOUND) + .assertEntity("Oh man, what you are looking for is long gone."); + testRoute(route).run(HttpRequest.GET("/handled/boom")) + .assertStatusCode(StatusCodes.INTERNAL_SERVER_ERROR) + .assertEntity("This didn't work."); + //#handleRejections + } +} diff --git a/akka-docs/rst/java/http/routing-dsl/directives/execution-directives/handleExceptions.rst b/akka-docs/rst/java/http/routing-dsl/directives/execution-directives/handleExceptions.rst index 888fb4447d..56165e0340 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/execution-directives/handleExceptions.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/execution-directives/handleExceptions.rst @@ -14,4 +14,5 @@ See :ref:`exception-handling-java` for general information about options for han Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/ExecutionDirectivesExamplesTest.java#handleExceptions diff --git a/akka-docs/rst/java/http/routing-dsl/directives/execution-directives/handleRejections.rst b/akka-docs/rst/java/http/routing-dsl/directives/execution-directives/handleRejections.rst index 8c0ef2d868..619155c4e7 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/execution-directives/handleRejections.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/execution-directives/handleRejections.rst @@ -13,4 +13,5 @@ See :ref:`rejections-java` for general information about options for handling re Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/ExecutionDirectivesExamplesTest.java#handleRejections From 16e8f8f54ac7789687f63ed8e3ca3cae099adce6 Mon Sep 17 00:00:00 2001 From: Hawstein Date: Mon, 6 Jun 2016 03:31:28 +0800 Subject: [PATCH 08/85] +doc #20466 example snippet for akka http java dsl: CodingDirectives (#20718) --- .../CodingDirectivesExamplesTest.java | 156 ++++++++++++++++++ .../coding-directives/decodeRequest.rst | 3 +- .../coding-directives/decodeRequestWith.rst | 3 +- .../coding-directives/encodeResponse.rst | 3 +- .../coding-directives/encodeResponseWith.rst | 3 +- .../responseEncodingAccepted.rst | 3 +- 6 files changed, 166 insertions(+), 5 deletions(-) create mode 100644 akka-docs/rst/java/code/docs/http/javadsl/server/directives/CodingDirectivesExamplesTest.java diff --git a/akka-docs/rst/java/code/docs/http/javadsl/server/directives/CodingDirectivesExamplesTest.java b/akka-docs/rst/java/code/docs/http/javadsl/server/directives/CodingDirectivesExamplesTest.java new file mode 100644 index 0000000000..0fde3f7599 --- /dev/null +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/directives/CodingDirectivesExamplesTest.java @@ -0,0 +1,156 @@ +/* + * Copyright (C) 2016-2016 Lightbend Inc. + */ +package docs.http.javadsl.server.directives; + +import akka.http.javadsl.model.HttpRequest; +import akka.http.javadsl.model.headers.AcceptEncoding; +import akka.http.javadsl.model.headers.ContentEncoding; +import akka.http.javadsl.model.headers.HttpEncodings; +import akka.http.javadsl.server.Coder; +import akka.http.javadsl.server.Rejections; +import akka.http.javadsl.server.Route; +import akka.http.javadsl.testkit.JUnitRouteTest; +import akka.util.ByteString; +import org.junit.Test; + +import java.util.Collections; + +import static akka.http.javadsl.server.Unmarshaller.entityToString; + +public class CodingDirectivesExamplesTest extends JUnitRouteTest { + + @Test + public void testResponseEncodingAccepted() { + //#responseEncodingAccepted + final Route route = responseEncodingAccepted(HttpEncodings.GZIP, () -> + complete("content") + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/")) + .assertEntity("content"); + runRouteUnSealed(route, + HttpRequest.GET("/") + .addHeader(AcceptEncoding.create(HttpEncodings.DEFLATE))) + .assertRejections(Rejections.unacceptedResponseEncoding(HttpEncodings.GZIP)); + //#responseEncodingAccepted + } + + @Test + public void testEncodeResponse() { + //#encodeResponse + final Route route = encodeResponse(() -> complete("content")); + + // tests: + testRoute(route).run( + HttpRequest.GET("/") + .addHeader(AcceptEncoding.create(HttpEncodings.GZIP)) + .addHeader(AcceptEncoding.create(HttpEncodings.DEFLATE)) + ).assertHeaderExists(ContentEncoding.create(HttpEncodings.GZIP)); + + testRoute(route).run( + HttpRequest.GET("/") + .addHeader(AcceptEncoding.create(HttpEncodings.DEFLATE)) + ).assertHeaderExists(ContentEncoding.create(HttpEncodings.DEFLATE)); + + // This case failed! +// testRoute(route).run( +// HttpRequest.GET("/") +// .addHeader(AcceptEncoding.create(HttpEncodings.IDENTITY)) +// ).assertHeaderExists(ContentEncoding.create(HttpEncodings.IDENTITY)); + + //#encodeResponse + } + + @Test + public void testEncodeResponseWith() { + //#encodeResponseWith + final Route route = encodeResponseWith( + Collections.singletonList(Coder.Gzip), + () -> complete("content") + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/")) + .assertHeaderExists(ContentEncoding.create(HttpEncodings.GZIP)); + + testRoute(route).run( + HttpRequest.GET("/") + .addHeader(AcceptEncoding.create(HttpEncodings.GZIP)) + .addHeader(AcceptEncoding.create(HttpEncodings.DEFLATE)) + ).assertHeaderExists(ContentEncoding.create(HttpEncodings.GZIP)); + + runRouteUnSealed(route, + HttpRequest.GET("/") + .addHeader(AcceptEncoding.create(HttpEncodings.DEFLATE)) + ).assertRejections(Rejections.unacceptedResponseEncoding(HttpEncodings.GZIP)); + + runRouteUnSealed(route, + HttpRequest.GET("/") + .addHeader(AcceptEncoding.create(HttpEncodings.IDENTITY)) + ).assertRejections(Rejections.unacceptedResponseEncoding(HttpEncodings.GZIP)); + //#encodeResponseWith + } + + @Test + public void testDecodeRequest() { + //#decodeRequest + final ByteString helloGzipped = Coder.Gzip.encode(ByteString.fromString("Hello")); + final ByteString helloDeflated = Coder.Deflate.encode(ByteString.fromString("Hello")); + + final Route route = decodeRequest(() -> + entity(entityToString(), content -> + complete("Request content: '" + content + "'") + ) + ); + + // tests: + testRoute(route).run( + HttpRequest.POST("/").withEntity(helloGzipped) + .addHeader(ContentEncoding.create(HttpEncodings.GZIP))) + .assertEntity("Request content: 'Hello'"); + + testRoute(route).run( + HttpRequest.POST("/").withEntity(helloDeflated) + .addHeader(ContentEncoding.create(HttpEncodings.DEFLATE))) + .assertEntity("Request content: 'Hello'"); + + testRoute(route).run( + HttpRequest.POST("/").withEntity("hello uncompressed") + .addHeader(ContentEncoding.create(HttpEncodings.IDENTITY))) + .assertEntity( "Request content: 'hello uncompressed'"); + //#decodeRequest + } + + @Test + public void testDecodeRequestWith() { + //#decodeRequestWith + final ByteString helloGzipped = Coder.Gzip.encode(ByteString.fromString("Hello")); + final ByteString helloDeflated = Coder.Deflate.encode(ByteString.fromString("Hello")); + + final Route route = decodeRequestWith(Coder.Gzip, () -> + entity(entityToString(), content -> + complete("Request content: '" + content + "'") + ) + ); + + // tests: + testRoute(route).run( + HttpRequest.POST("/").withEntity(helloGzipped) + .addHeader(ContentEncoding.create(HttpEncodings.GZIP))) + .assertEntity("Request content: 'Hello'"); + + runRouteUnSealed(route, + HttpRequest.POST("/").withEntity(helloDeflated) + .addHeader(ContentEncoding.create(HttpEncodings.DEFLATE))) + .assertRejections(Rejections.unsupportedRequestEncoding(HttpEncodings.GZIP)); + + runRouteUnSealed(route, + HttpRequest.POST("/").withEntity("hello") + .addHeader(ContentEncoding.create(HttpEncodings.IDENTITY))) + .assertRejections(Rejections.unsupportedRequestEncoding(HttpEncodings.GZIP)); + //#decodeRequestWith + } + +} diff --git a/akka-docs/rst/java/http/routing-dsl/directives/coding-directives/decodeRequest.rst b/akka-docs/rst/java/http/routing-dsl/directives/coding-directives/decodeRequest.rst index a8d38d2fda..75d3fe1f27 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/coding-directives/decodeRequest.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/coding-directives/decodeRequest.rst @@ -10,4 +10,5 @@ Decompresses the incoming request if it is ``gzip`` or ``deflate`` compressed. U Example ------- -..TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/CodingDirectivesExamplesTest.java#decodeRequest diff --git a/akka-docs/rst/java/http/routing-dsl/directives/coding-directives/decodeRequestWith.rst b/akka-docs/rst/java/http/routing-dsl/directives/coding-directives/decodeRequestWith.rst index 430bfcb8e2..d4c151a2ac 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/coding-directives/decodeRequestWith.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/coding-directives/decodeRequestWith.rst @@ -10,4 +10,5 @@ Decodes the incoming request if it is encoded with one of the given encoders. If Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/CodingDirectivesExamplesTest.java#decodeRequestWith diff --git a/akka-docs/rst/java/http/routing-dsl/directives/coding-directives/encodeResponse.rst b/akka-docs/rst/java/http/routing-dsl/directives/coding-directives/encodeResponse.rst index 5030bc6c18..8a6eb7cf17 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/coding-directives/encodeResponse.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/coding-directives/encodeResponse.rst @@ -14,6 +14,7 @@ If the ``Accept-Encoding`` header is missing or empty or specifies an encoding o Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/CodingDirectivesExamplesTest.java#encodeResponse .. _RFC7231: http://tools.ietf.org/html/rfc7231#section-5.3.4 diff --git a/akka-docs/rst/java/http/routing-dsl/directives/coding-directives/encodeResponseWith.rst b/akka-docs/rst/java/http/routing-dsl/directives/coding-directives/encodeResponseWith.rst index b3ffe1413a..f49ac53ef0 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/coding-directives/encodeResponseWith.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/coding-directives/encodeResponseWith.rst @@ -17,6 +17,7 @@ response encoding is used. Otherwise the request is rejected. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/CodingDirectivesExamplesTest.java#encodeResponseWith .. _RFC7231: http://tools.ietf.org/html/rfc7231#section-5.3.4 diff --git a/akka-docs/rst/java/http/routing-dsl/directives/coding-directives/responseEncodingAccepted.rst b/akka-docs/rst/java/http/routing-dsl/directives/coding-directives/responseEncodingAccepted.rst index 100019fcee..c3ca799b13 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/coding-directives/responseEncodingAccepted.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/coding-directives/responseEncodingAccepted.rst @@ -10,4 +10,5 @@ Passes the request to the inner route if the request accepts the argument encodi Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/CodingDirectivesExamplesTest.java#responseEncodingAccepted From 0263774f8281c0a1b7277b3c3204624565a43d65 Mon Sep 17 00:00:00 2001 From: Hawstein Date: Mon, 6 Jun 2016 16:24:49 +0800 Subject: [PATCH 09/85] +doc #20466 example snippet for akka http java dsl: RouteDirectives (#20721) --- .../RouteDirectivesExamplesTest.java | 125 ++++++++++++++++++ .../directives/route-directives/complete.rst | 3 +- .../directives/route-directives/failWith.rst | 3 +- .../directives/route-directives/redirect.rst | 5 +- .../directives/route-directives/reject.rst | 3 +- 5 files changed, 133 insertions(+), 6 deletions(-) create mode 100644 akka-docs/rst/java/code/docs/http/javadsl/server/directives/RouteDirectivesExamplesTest.java diff --git a/akka-docs/rst/java/code/docs/http/javadsl/server/directives/RouteDirectivesExamplesTest.java b/akka-docs/rst/java/code/docs/http/javadsl/server/directives/RouteDirectivesExamplesTest.java new file mode 100644 index 0000000000..7771245b79 --- /dev/null +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/directives/RouteDirectivesExamplesTest.java @@ -0,0 +1,125 @@ +/* + * Copyright (C) 2016-2016 Lightbend Inc. + */ +package docs.http.javadsl.server.directives; + +import akka.http.javadsl.model.HttpEntities; +import akka.http.javadsl.model.HttpRequest; +import akka.http.javadsl.model.Uri; +import akka.http.javadsl.model.headers.ContentType; +import akka.http.javadsl.model.ContentTypes; +import akka.http.javadsl.model.HttpResponse; +import akka.http.javadsl.model.StatusCodes; +import akka.http.javadsl.server.Rejections; +import akka.http.javadsl.server.Route; +import akka.http.javadsl.testkit.JUnitRouteTest; +import org.junit.Test; + +import java.util.Collections; + +public class RouteDirectivesExamplesTest extends JUnitRouteTest { + + @Test + public void testComplete() { + //#complete + final Route route = route( + path("a", () -> complete(HttpResponse.create().withEntity("foo"))), + path("b", () -> complete(StatusCodes.OK)), + path("c", () -> complete(StatusCodes.CREATED, "bar")), + path("d", () -> complete(StatusCodes.get(201), "bar")), + path("e", () -> + complete(StatusCodes.CREATED, + Collections.singletonList(ContentType.create(ContentTypes.TEXT_PLAIN_UTF8)), + HttpEntities.create("bar"))), + path("f", () -> + complete(StatusCodes.get(201), + Collections.singletonList(ContentType.create(ContentTypes.TEXT_PLAIN_UTF8)), + HttpEntities.create("bar"))), + path("g", () -> complete("baz")) + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/a")) + .assertStatusCode(StatusCodes.OK) + .assertEntity("foo"); + + testRoute(route).run(HttpRequest.GET("/b")) + .assertStatusCode(StatusCodes.OK) + .assertEntity("OK"); + + testRoute(route).run(HttpRequest.GET("/c")) + .assertStatusCode(StatusCodes.CREATED) + .assertEntity("bar"); + + testRoute(route).run(HttpRequest.GET("/d")) + .assertStatusCode(StatusCodes.CREATED) + .assertEntity("bar"); + + testRoute(route).run(HttpRequest.GET("/e")) + .assertStatusCode(StatusCodes.CREATED) + .assertHeaderExists(ContentType.create(ContentTypes.TEXT_PLAIN_UTF8)) + .assertEntity("bar"); + + testRoute(route).run(HttpRequest.GET("/f")) + .assertStatusCode(StatusCodes.CREATED) + .assertHeaderExists(ContentType.create(ContentTypes.TEXT_PLAIN_UTF8)) + .assertEntity("bar"); + + testRoute(route).run(HttpRequest.GET("/g")) + .assertStatusCode(StatusCodes.OK) + .assertEntity("baz"); + //#complete + } + + @Test + public void testReject() { + //#reject + final Route route = route( + path("a", this::reject), // don't handle here, continue on + path("a", () -> complete("foo")), + path("b", () -> reject(Rejections.validationRejection("Restricted!"))) + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/a")) + .assertEntity("foo"); + + runRouteUnSealed(route, HttpRequest.GET("/b")) + .assertRejections(Rejections.validationRejection("Restricted!")); + //#reject + } + + @Test + public void testRedirect() { + //#redirect + final Route route = pathPrefix("foo", () -> + route( + pathSingleSlash(() -> complete("yes")), + pathEnd(() -> redirect(Uri.create("/foo/"), StatusCodes.PERMANENT_REDIRECT)) + ) + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/foo/")) + .assertEntity("yes"); + + testRoute(route).run(HttpRequest.GET("/foo")) + .assertStatusCode(StatusCodes.PERMANENT_REDIRECT) + .assertEntity("The request, and all future requests should be repeated using this URI."); + //#redirect + } + + @Test + public void testFailWith() { + //#failWith + final Route route = path("foo", () -> + failWith(new RuntimeException("Oops.")) + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/foo")) + .assertStatusCode(StatusCodes.INTERNAL_SERVER_ERROR) + .assertEntity("There was an internal server error."); + //#failWith + } +} diff --git a/akka-docs/rst/java/http/routing-dsl/directives/route-directives/complete.rst b/akka-docs/rst/java/http/routing-dsl/directives/route-directives/complete.rst index 0380a70a9b..d5e8f70629 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/route-directives/complete.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/route-directives/complete.rst @@ -17,4 +17,5 @@ Please note that the ``complete`` directive has multiple variants, like Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/RouteDirectivesExamplesTest.java#complete diff --git a/akka-docs/rst/java/http/routing-dsl/directives/route-directives/failWith.rst b/akka-docs/rst/java/http/routing-dsl/directives/route-directives/failWith.rst index 5e5aae085c..66d6e1e8d6 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/route-directives/failWith.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/route-directives/failWith.rst @@ -24,4 +24,5 @@ exception. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/RouteDirectivesExamplesTest.java#failWith diff --git a/akka-docs/rst/java/http/routing-dsl/directives/route-directives/redirect.rst b/akka-docs/rst/java/http/routing-dsl/directives/route-directives/redirect.rst index bd98427222..07f9db4377 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/route-directives/redirect.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/route-directives/redirect.rst @@ -10,9 +10,8 @@ Completes the request with a redirection response to a given targer URI and of a ``redirect`` is a convenience helper for completing the request with a redirection response. It is equivalent to this snippet relying on the ``complete`` directive: -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. - Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/RouteDirectivesExamplesTest.java#redirect diff --git a/akka-docs/rst/java/http/routing-dsl/directives/route-directives/reject.rst b/akka-docs/rst/java/http/routing-dsl/directives/route-directives/reject.rst index 2a21de1c68..e31ec91878 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/route-directives/reject.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/route-directives/reject.rst @@ -19,4 +19,5 @@ modifier for "filtering out" certain cases. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/RouteDirectivesExamplesTest.java#reject From fc58fa1f0555f365ed5fcadc3b85f4c9026ccd7e Mon Sep 17 00:00:00 2001 From: Stefano Bonetti Date: Mon, 6 Jun 2016 10:28:04 +0100 Subject: [PATCH 10/85] Amending docs for KillSwitch #20708 (#20716) --- akka-docs/rst/java/stream/stream-dynamic.rst | 8 ++++---- akka-docs/rst/scala/stream/stream-dynamic.rst | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/akka-docs/rst/java/stream/stream-dynamic.rst b/akka-docs/rst/java/stream/stream-dynamic.rst index 77a0e0a436..f90cbdcacb 100644 --- a/akka-docs/rst/java/stream/stream-dynamic.rst +++ b/akka-docs/rst/java/stream/stream-dynamic.rst @@ -11,12 +11,12 @@ Controlling graph completion with KillSwitch A ``KillSwitch`` allows the completion of graphs of ``FlowShape`` from the outside. It consists of a flow element that can be linked to a graph of ``FlowShape`` needing completion control. -The ``KillSwitch`` trait allows to complete or fail the graph(s). +The ``KillSwitch`` interface allows to: -.. includecode:: ../../../../akka-stream/src/main/scala/akka/stream/KillSwitch.scala - :include: kill-switch +* complete the graph(s) via ``shutdown()`` +* fail the graph(s) via ``abort(Throwable error)`` -After the first call to either ``shutdown`` and ``abort``, all subsequent calls to any of these methods will be ignored. +After the first call to either ``shutdown`` or ``abort``, all subsequent calls to any of these methods will be ignored. Graph completion is performed by both * completing its downstream diff --git a/akka-docs/rst/scala/stream/stream-dynamic.rst b/akka-docs/rst/scala/stream/stream-dynamic.rst index 8716f934a6..cd4f5d6690 100644 --- a/akka-docs/rst/scala/stream/stream-dynamic.rst +++ b/akka-docs/rst/scala/stream/stream-dynamic.rst @@ -16,7 +16,7 @@ The ``KillSwitch`` trait allows to complete or fail the graph(s). .. includecode:: ../../../../akka-stream/src/main/scala/akka/stream/KillSwitch.scala :include: kill-switch -After the first call to either ``shutdown`` and ``abort``, all subsequent calls to any of these methods will be ignored. +After the first call to either ``shutdown`` or ``abort``, all subsequent calls to any of these methods will be ignored. Graph completion is performed by both * completing its downstream From 87a919608871b5c3e39a11a58be395658d3db253 Mon Sep 17 00:00:00 2001 From: Hawstein Date: Tue, 7 Jun 2016 02:45:36 +0800 Subject: [PATCH 11/85] +doc #20466 example snippet for akka http java dsl: FormFieldDirectives (#20731) --- .../FormFieldDirectivesExamplesTest.java | 137 ++++++++++++++++++ .../form-field-directives/formField.rst | 3 +- .../form-field-directives/formFieldList.rst | 3 +- .../form-field-directives/formFieldMap.rst | 3 +- .../formFieldMultiMap.rst | 3 +- 5 files changed, 145 insertions(+), 4 deletions(-) create mode 100644 akka-docs/rst/java/code/docs/http/javadsl/server/directives/FormFieldDirectivesExamplesTest.java diff --git a/akka-docs/rst/java/code/docs/http/javadsl/server/directives/FormFieldDirectivesExamplesTest.java b/akka-docs/rst/java/code/docs/http/javadsl/server/directives/FormFieldDirectivesExamplesTest.java new file mode 100644 index 0000000000..69c0e11239 --- /dev/null +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/directives/FormFieldDirectivesExamplesTest.java @@ -0,0 +1,137 @@ +/* + * Copyright (C) 2016-2016 Lightbend Inc. + */ +package docs.http.javadsl.server.directives; + +import akka.http.javadsl.model.FormData; +import akka.http.javadsl.model.HttpRequest; +import akka.http.javadsl.model.StatusCodes; +import akka.http.javadsl.server.Route; +import akka.http.javadsl.server.StringUnmarshallers; +import akka.http.javadsl.testkit.JUnitRouteTest; +import akka.japi.Pair; +import org.junit.Test; + +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.function.Function; +import java.util.stream.Collectors; + +public class FormFieldDirectivesExamplesTest extends JUnitRouteTest { + + @Test + public void testFormField() { + //#formField + final Route route = route( + formField("color", color -> + complete("The color is '" + color + "'") + ), + formField(StringUnmarshallers.INTEGER, "id", id -> + complete("The id is '" + id + "'") + ) + ); + + // tests: + final FormData formData = FormData.create(Pair.create("color", "blue")); + testRoute(route).run(HttpRequest.POST("/").withEntity(formData.toEntity())) + .assertEntity("The color is 'blue'"); + + testRoute(route).run(HttpRequest.GET("/")) + .assertStatusCode(StatusCodes.BAD_REQUEST) + .assertEntity("Request is missing required form field 'color'"); + //#formField + } + + @Test + public void testFormFieldMap() { + //#formFieldMap + final Function, String> mapToString = map -> + map.entrySet() + .stream() + .map(e -> e.getKey() + " = '" + e.getValue() +"'") + .collect(Collectors.joining(", ")); + + + final Route route = formFieldMap(fields -> + complete("The form fields are " + mapToString.apply(fields)) + ); + + // tests: + final FormData formDataDiffKey = + FormData.create( + Pair.create("color", "blue"), + Pair.create("count", "42")); + testRoute(route).run(HttpRequest.POST("/").withEntity(formDataDiffKey.toEntity())) + .assertEntity("The form fields are color = 'blue', count = '42'"); + + final FormData formDataSameKey = + FormData.create( + Pair.create("x", "1"), + Pair.create("x", "5")); + testRoute(route).run(HttpRequest.POST("/").withEntity(formDataSameKey.toEntity())) + .assertEntity( "The form fields are x = '5'"); + //#formFieldMap + } + + @Test + public void testFormFieldMultiMap() { + //#formFieldMultiMap + final Function>, String> mapToString = map -> + map.entrySet() + .stream() + .map(e -> e.getKey() + " -> " + e.getValue().size()) + .collect(Collectors.joining(", ")); + + final Route route = formFieldMultiMap(fields -> + complete("There are form fields " + mapToString.apply(fields)) + ); + + // test: + final FormData formDataDiffKey = + FormData.create( + Pair.create("color", "blue"), + Pair.create("count", "42")); + testRoute(route).run(HttpRequest.POST("/").withEntity(formDataDiffKey.toEntity())) + .assertEntity("There are form fields color -> 1, count -> 1"); + + final FormData formDataSameKey = + FormData.create( + Pair.create("x", "23"), + Pair.create("x", "4"), + Pair.create("x", "89")); + testRoute(route).run(HttpRequest.POST("/").withEntity(formDataSameKey.toEntity())) + .assertEntity("There are form fields x -> 3"); + //#formFieldMultiMap + } + + @Test + public void testFormFieldList() { + //#formFieldList + final Function>, String> listToString = list -> + list.stream() + .map(e -> e.getKey() + " = '" + e.getValue() +"'") + .collect(Collectors.joining(", ")); + + final Route route = formFieldList(fields -> + complete("The form fields are " + listToString.apply(fields)) + ); + + // tests: + final FormData formDataDiffKey = + FormData.create( + Pair.create("color", "blue"), + Pair.create("count", "42")); + testRoute(route).run(HttpRequest.POST("/").withEntity(formDataDiffKey.toEntity())) + .assertEntity("The form fields are color = 'blue', count = '42'"); + + final FormData formDataSameKey = + FormData.create( + Pair.create("x", "23"), + Pair.create("x", "4"), + Pair.create("x", "89")); + testRoute(route).run(HttpRequest.POST("/").withEntity(formDataSameKey.toEntity())) + .assertEntity("The form fields are x = '23', x = '4', x = '89'"); + //#formFieldList + } +} diff --git a/akka-docs/rst/java/http/routing-dsl/directives/form-field-directives/formField.rst b/akka-docs/rst/java/http/routing-dsl/directives/form-field-directives/formField.rst index 6711ae2b37..5b9265ea8d 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/form-field-directives/formField.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/form-field-directives/formField.rst @@ -8,4 +8,5 @@ Allows extracting a single Form field sent in the request. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/FormFieldDirectivesExamplesTest.java#formField diff --git a/akka-docs/rst/java/http/routing-dsl/directives/form-field-directives/formFieldList.rst b/akka-docs/rst/java/http/routing-dsl/directives/form-field-directives/formFieldList.rst index 260e57db43..7f6ba96934 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/form-field-directives/formFieldList.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/form-field-directives/formFieldList.rst @@ -17,4 +17,5 @@ can cause performance issues or even an ``OutOfMemoryError`` s. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/FormFieldDirectivesExamplesTest.java#formFieldList diff --git a/akka-docs/rst/java/http/routing-dsl/directives/form-field-directives/formFieldMap.rst b/akka-docs/rst/java/http/routing-dsl/directives/form-field-directives/formFieldMap.rst index f8c34c0f8e..5b678f0518 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/form-field-directives/formFieldMap.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/form-field-directives/formFieldMap.rst @@ -16,4 +16,5 @@ See :ref:`-formFieldList-java-` for details. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/FormFieldDirectivesExamplesTest.java#formFieldMap diff --git a/akka-docs/rst/java/http/routing-dsl/directives/form-field-directives/formFieldMultiMap.rst b/akka-docs/rst/java/http/routing-dsl/directives/form-field-directives/formFieldMultiMap.rst index 7e4322023c..a922975c5b 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/form-field-directives/formFieldMultiMap.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/form-field-directives/formFieldMultiMap.rst @@ -19,4 +19,5 @@ Use of this directive can result in performance degradation or even in ``OutOfMe Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/FormFieldDirectivesExamplesTest.java#formFieldMultiMap From bf76d6c389167acd57114cac04ad33c355c99e2b Mon Sep 17 00:00:00 2001 From: Michal Sitko Date: Tue, 7 Jun 2016 00:17:23 +0200 Subject: [PATCH 12/85] +doc #20699 improve docs for akka-http (#20722) Made imports in docs snippets more complete and a few minor improvements --- .../javadsl/WebSocketClientExampleTest.java | 13 +- .../rst/java/http/common/marshalling.rst | 2 +- .../rst/java/http/common/unmarshalling.rst | 2 +- akka-docs/rst/java/http/routing-dsl/index.rst | 2 +- .../http/scaladsl/HttpClientExampleSpec.scala | 46 +++-- .../http/scaladsl/HttpServerExampleSpec.scala | 82 +++++---- .../http/scaladsl/HttpsExamplesSpec.scala | 7 +- .../http/scaladsl/SprayJsonExampleSpec.scala | 16 +- .../scaladsl/WebSocketClientExampleSpec.scala | 164 +++++++++--------- .../server/WebSocketExampleSpec.scala | 9 +- .../TimeoutDirectivesExamplesSpec.scala | 2 - .../rst/scala/http/common/marshalling.rst | 2 +- .../rst/scala/http/common/unmarshalling.rst | 2 +- akka-docs/rst/scala/http/introduction.rst | 7 +- .../rst/scala/http/routing-dsl/index.rst | 2 +- 15 files changed, 197 insertions(+), 161 deletions(-) diff --git a/akka-docs/rst/java/code/docs/http/javadsl/WebSocketClientExampleTest.java b/akka-docs/rst/java/code/docs/http/javadsl/WebSocketClientExampleTest.java index 1348c65636..0fd90fddc4 100644 --- a/akka-docs/rst/java/code/docs/http/javadsl/WebSocketClientExampleTest.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/WebSocketClientExampleTest.java @@ -14,7 +14,6 @@ import akka.http.javadsl.model.ws.TextMessage; import akka.http.javadsl.model.ws.WebSocketRequest; import akka.http.javadsl.model.ws.WebSocketUpgradeResponse; import akka.japi.Pair; -import akka.japi.function.Procedure; import akka.stream.ActorMaterializer; import akka.stream.Materializer; import akka.stream.javadsl.Flow; @@ -63,9 +62,9 @@ public class WebSocketClientExampleTest { // The first value in the pair is a CompletionStage that // completes when the WebSocket request has connected successfully (or failed) final CompletionStage connected = pair.first().thenApply(upgrade -> { - // just like a regular http request we can get 404 NotFound, - // with a response body, that will be available from upgrade.response - if (upgrade.response().status().equals(StatusCodes.OK)) { + // just like a regular http request we can access response status which is available via upgrade.response.status + // status code 101 (Switching Protocols) indicates that server support WebSockets + if (upgrade.response().status().equals(StatusCodes.SWITCHING_PROTOCOLS)) { return Done.getInstance(); } else { throw new RuntimeException("Connection failed: " + upgrade.response().status()); @@ -220,9 +219,9 @@ public class WebSocketClientExampleTest { CompletionStage connected = upgradeCompletion.thenApply(upgrade-> { - // just like a regular http request we can get 404 NotFound, - // with a response body, that will be available from upgrade.response - if (upgrade.response().status().equals(StatusCodes.OK)) { + // just like a regular http request we can access response status which is available via upgrade.response.status + // status code 101 (Switching Protocols) indicates that server support WebSockets + if (upgrade.response().status().equals(StatusCodes.SWITCHING_PROTOCOLS)) { return Done.getInstance(); } else { throw new RuntimeException(("Connection failed: " + upgrade.response().status())); diff --git a/akka-docs/rst/java/http/common/marshalling.rst b/akka-docs/rst/java/http/common/marshalling.rst index cf098e1b88..b3feb49fed 100644 --- a/akka-docs/rst/java/http/common/marshalling.rst +++ b/akka-docs/rst/java/http/common/marshalling.rst @@ -119,7 +119,7 @@ If, however, your marshaller also needs to set things like the response status c or any headers then a ``ToEntityMarshaller[T]`` won't work. You'll need to fall down to providing a ``ToResponseMarshaller[T]`` or a ``ToRequestMarshaller[T]`` directly. -For writing you own marshallers you won't have to "manually" implement the ``Marshaller`` trait directly. +For writing your own marshallers you won't have to "manually" implement the ``Marshaller`` trait directly. Rather, it should be possible to use one of the convenience construction helpers defined on the ``Marshaller`` companion: diff --git a/akka-docs/rst/java/http/common/unmarshalling.rst b/akka-docs/rst/java/http/common/unmarshalling.rst index 9aaea15c66..37979b5c91 100644 --- a/akka-docs/rst/java/http/common/unmarshalling.rst +++ b/akka-docs/rst/java/http/common/unmarshalling.rst @@ -77,7 +77,7 @@ Custom Unmarshallers Akka HTTP gives you a few convenience tools for constructing unmarshallers for your own types. Usually you won't have to "manually" implement the ``Unmarshaller`` trait directly. -Rather, it should be possible to use one of the convenience construction helpers defined on the ``Marshaller`` +Rather, it should be possible to use one of the convenience construction helpers defined on the ``Unmarshaller`` companion: TODO rewrite sample for Java diff --git a/akka-docs/rst/java/http/routing-dsl/index.rst b/akka-docs/rst/java/http/routing-dsl/index.rst index d5e754af0b..f84f20759f 100644 --- a/akka-docs/rst/java/http/routing-dsl/index.rst +++ b/akka-docs/rst/java/http/routing-dsl/index.rst @@ -32,7 +32,7 @@ Bind failures ^^^^^^^^^^^^^ For example the server might be unable to bind to the given port. For example when the port is already taken by another application, or if the port is privileged (i.e. only usable by ``root``). -In this case the "binding future" will fail immediatly, and we can react to if by listening on the CompletionStage's completion: +In this case the "binding future" will fail immediately, and we can react to if by listening on the CompletionStage's completion: .. includecode:: ../../code/docs/http/javadsl/server/HighLevelServerBindFailureExample.java :include: binding-failure-high-level-example diff --git a/akka-docs/rst/scala/code/docs/http/scaladsl/HttpClientExampleSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/HttpClientExampleSpec.scala index 8f13d319cb..85603e37ad 100644 --- a/akka-docs/rst/scala/code/docs/http/scaladsl/HttpClientExampleSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/HttpClientExampleSpec.scala @@ -5,36 +5,48 @@ package docs.http.scaladsl import akka.actor.{ ActorLogging, ActorSystem } -import akka.stream.{ ActorMaterializerSettings } import akka.util.ByteString +import docs.CompileOnlySpec import org.scalatest.{ Matchers, WordSpec } -class HttpClientExampleSpec extends WordSpec with Matchers { +class HttpClientExampleSpec extends WordSpec with Matchers with CompileOnlySpec { - "outgoing-connection-example" in { - pending // compile-time only test + "outgoing-connection-example" in compileOnlySpec { //#outgoing-connection-example + import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.model._ import akka.stream.ActorMaterializer import akka.stream.scaladsl._ import scala.concurrent.Future + import scala.util.{ Failure, Success } - implicit val system = ActorSystem() - implicit val materializer = ActorMaterializer() + object WebClient { + def main(args: Array[String]): Unit = { + implicit val system = ActorSystem() + implicit val materializer = ActorMaterializer() + implicit val executionContext = system.dispatcher - val connectionFlow: Flow[HttpRequest, HttpResponse, Future[Http.OutgoingConnection]] = - Http().outgoingConnection("akka.io") - val responseFuture: Future[HttpResponse] = - Source.single(HttpRequest(uri = "/")) - .via(connectionFlow) - .runWith(Sink.head) + val connectionFlow: Flow[HttpRequest, HttpResponse, Future[Http.OutgoingConnection]] = + Http().outgoingConnection("akka.io") + val responseFuture: Future[HttpResponse] = + Source.single(HttpRequest(uri = "/")) + .via(connectionFlow) + .runWith(Sink.head) + + responseFuture.andThen { + case Success(_) => println("request succeded") + case Failure(_) => println("request failed") + }.andThen { + case _ => system.terminate() + } + } + } //#outgoing-connection-example } - "host-level-example" in { - pending // compile-time only test + "host-level-example" in compileOnlySpec { //#host-level-example import akka.http.scaladsl.Http import akka.http.scaladsl.model._ @@ -55,8 +67,7 @@ class HttpClientExampleSpec extends WordSpec with Matchers { //#host-level-example } - "single-request-example" in { - pending // compile-time only test + "single-request-example" in compileOnlySpec { //#single-request-example import akka.http.scaladsl.Http import akka.http.scaladsl.model._ @@ -72,8 +83,7 @@ class HttpClientExampleSpec extends WordSpec with Matchers { //#single-request-example } - "single-request-in-actor-example" in { - pending // compile-time only test + "single-request-in-actor-example" in compileOnlySpec { //#single-request-in-actor-example import akka.actor.Actor import akka.http.scaladsl.Http diff --git a/akka-docs/rst/scala/code/docs/http/scaladsl/HttpServerExampleSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/HttpServerExampleSpec.scala index 9514448a70..6d9e964297 100644 --- a/akka-docs/rst/scala/code/docs/http/scaladsl/HttpServerExampleSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/HttpServerExampleSpec.scala @@ -5,10 +5,6 @@ package docs.http.scaladsl import akka.event.LoggingAdapter -import akka.http.scaladsl.Http.ServerBinding -import akka.http.scaladsl.model._ -import akka.stream.ActorMaterializer -import akka.stream.scaladsl.{ Flow, Sink } import akka.testkit.TestActors import docs.CompileOnlySpec import org.scalatest.{ Matchers, WordSpec } @@ -44,37 +40,50 @@ class HttpServerExampleSpec extends WordSpec with Matchers "binding-failure-high-level-example" in compileOnlySpec { import akka.actor.ActorSystem import akka.http.scaladsl.Http + import akka.http.scaladsl.Http.ServerBinding import akka.http.scaladsl.server.Directives._ import akka.stream.ActorMaterializer - implicit val system = ActorSystem() - implicit val materializer = ActorMaterializer() - // needed for the future onFailure in the end - implicit val executionContext = system.dispatcher + import scala.concurrent.Future - val handler = get { - complete("Hello world!") + object WebServer { + def main(args: Array[String]) { + implicit val system = ActorSystem() + implicit val materializer = ActorMaterializer() + // needed for the future onFailure in the end + implicit val executionContext = system.dispatcher + + val handler = get { + complete("Hello world!") + } + + // let's say the OS won't allow us to bind to 80. + val (host, port) = ("localhost", 80) + val bindingFuture: Future[ServerBinding] = + Http().bindAndHandle(handler, host, port) + + bindingFuture.onFailure { + case ex: Exception => + log.error(ex, "Failed to bind to {}:{}!", host, port) + } + } } - - // let's say the OS won't allow us to bind to 80. - val (host, port) = ("localhost", 80) - val bindingFuture: Future[ServerBinding] = - Http().bindAndHandle(handler, host, port) - - bindingFuture.onFailure { - case ex: Exception => - log.error(ex, "Failed to bind to {}:{}!", host, port) - } - } // mock values: - import akka.http.scaladsl.Http - import akka.actor.ActorSystem - val handleConnections: Sink[Http.IncomingConnection, Future[Http.ServerBinding]] = + val handleConnections = { + import akka.stream.scaladsl.Sink Sink.ignore.mapMaterializedValue(_ => Future.failed(new Exception(""))) + } "binding-failure-handling" in compileOnlySpec { + import akka.actor.ActorSystem + import akka.http.scaladsl.Http + import akka.http.scaladsl.Http.ServerBinding + import akka.stream.ActorMaterializer + + import scala.concurrent.Future + implicit val system = ActorSystem() implicit val materializer = ActorMaterializer() // needed for the future onFailure in the end @@ -102,11 +111,8 @@ class HttpServerExampleSpec extends WordSpec with Matchers import akka.actor.ActorSystem import akka.actor.ActorRef import akka.http.scaladsl.Http - import akka.http.scaladsl.model.HttpEntity - import akka.http.scaladsl.model.ContentTypes - import akka.http.scaladsl.server.Directives._ import akka.stream.ActorMaterializer - import scala.io.StdIn + import akka.stream.scaladsl.Flow implicit val system = ActorSystem() implicit val materializer = ActorMaterializer() @@ -132,8 +138,9 @@ class HttpServerExampleSpec extends WordSpec with Matchers "connection-stream-failure-handling" in compileOnlySpec { import akka.actor.ActorSystem import akka.http.scaladsl.Http - import akka.http.scaladsl.model.{ ContentTypes, HttpEntity } + import akka.http.scaladsl.model._ import akka.stream.ActorMaterializer + import akka.stream.scaladsl.Flow implicit val system = ActorSystem() implicit val materializer = ActorMaterializer() @@ -203,6 +210,7 @@ class HttpServerExampleSpec extends WordSpec with Matchers } "low-level-server-example" in compileOnlySpec { + import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.model.HttpMethods._ import akka.http.scaladsl.model._ @@ -286,7 +294,9 @@ class HttpServerExampleSpec extends WordSpec with Matchers } "minimal-routing-example" in compileOnlySpec { + import akka.actor.ActorSystem import akka.http.scaladsl.Http + import akka.http.scaladsl.model._ import akka.http.scaladsl.server.Directives._ import akka.stream.ActorMaterializer import scala.io.StdIn @@ -319,13 +329,14 @@ class HttpServerExampleSpec extends WordSpec with Matchers "long-routing-example" in compileOnlySpec { //#long-routing-example - import akka.actor.ActorRef + import akka.actor.{ActorRef, ActorSystem} import akka.http.scaladsl.coding.Deflate import akka.http.scaladsl.marshalling.ToResponseMarshaller import akka.http.scaladsl.model.StatusCodes.MovedPermanently import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.unmarshalling.FromRequestUnmarshaller import akka.pattern.ask + import akka.stream.ActorMaterializer import akka.util.Timeout // types used by the API routes @@ -427,6 +438,7 @@ class HttpServerExampleSpec extends WordSpec with Matchers "stream random numbers" in compileOnlySpec { //#stream-random-numbers + import akka.actor.ActorSystem import akka.stream.scaladsl._ import akka.util.ByteString import akka.http.scaladsl.Http @@ -483,15 +495,15 @@ class HttpServerExampleSpec extends WordSpec with Matchers "interact with an actor" in compileOnlySpec { //#actor-interaction import akka.actor.ActorSystem - import akka.actor.Props - import scala.concurrent.duration._ - import akka.util.Timeout - import akka.pattern.ask - import akka.stream.ActorMaterializer import akka.http.scaladsl.Http + import akka.http.scaladsl.model.StatusCodes import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._ + import akka.pattern.ask + import akka.stream.ActorMaterializer + import akka.util.Timeout import spray.json.DefaultJsonProtocol._ + import scala.concurrent.duration._ import scala.io.StdIn object WebServer { diff --git a/akka-docs/rst/scala/code/docs/http/scaladsl/HttpsExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/HttpsExamplesSpec.scala index ff30a2479c..64aeafa5e2 100644 --- a/akka-docs/rst/scala/code/docs/http/scaladsl/HttpsExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/HttpsExamplesSpec.scala @@ -9,13 +9,12 @@ import akka.http.scaladsl.Http import akka.stream.ActorMaterializer import akka.util.ByteString import com.typesafe.sslconfig.akka.AkkaSSLConfig +import docs.CompileOnlySpec import org.scalatest.{ Matchers, WordSpec } -class HttpsExamplesSpec extends WordSpec with Matchers { - - "disable SNI for connection" in { - pending // compile-time only test +class HttpsExamplesSpec extends WordSpec with Matchers with CompileOnlySpec { + "disable SNI for connection" in compileOnlySpec { val unsafeHost = "example.com" //#disable-sni-connection implicit val system = ActorSystem() diff --git a/akka-docs/rst/scala/code/docs/http/scaladsl/SprayJsonExampleSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/SprayJsonExampleSpec.scala index fe5208a5f2..7da0505134 100644 --- a/akka-docs/rst/scala/code/docs/http/scaladsl/SprayJsonExampleSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/SprayJsonExampleSpec.scala @@ -8,8 +8,6 @@ import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport import akka.http.scaladsl.server.Directives import org.scalatest.{ Matchers, WordSpec } -import scala.concurrent.Future - class SprayJsonExampleSpec extends WordSpec with Matchers { def compileOnlySpec(body: => Unit) = () @@ -53,6 +51,7 @@ class SprayJsonExampleSpec extends WordSpec with Matchers { "second-spray-json-example" in compileOnlySpec { //#second-spray-json-example import akka.actor.ActorSystem + import akka.http.scaladsl.Http import akka.stream.ActorMaterializer import akka.Done import akka.http.scaladsl.server.Route @@ -61,6 +60,10 @@ class SprayJsonExampleSpec extends WordSpec with Matchers { import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._ import spray.json.DefaultJsonProtocol._ + import scala.io.StdIn + + import scala.concurrent.Future + object WebServer { // domain model @@ -80,6 +83,8 @@ class SprayJsonExampleSpec extends WordSpec with Matchers { // needed to run the route implicit val system = ActorSystem() implicit val materializer = ActorMaterializer() + // needed for the future map/flatmap in the end + implicit val executionContext = system.dispatcher val route: Route = get { @@ -104,6 +109,13 @@ class SprayJsonExampleSpec extends WordSpec with Matchers { } } + val bindingFuture = Http().bindAndHandle(route, "localhost", 8080) + println(s"Server online at http://localhost:8080/\nPress RETURN to stop...") + StdIn.readLine() // let it run until user presses return + bindingFuture + .flatMap(_.unbind()) // trigger unbinding from the port + .onComplete(_ ⇒ system.terminate()) // and shutdown when done + } } //#second-spray-json-example diff --git a/akka-docs/rst/scala/code/docs/http/scaladsl/WebSocketClientExampleSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/WebSocketClientExampleSpec.scala index 67baf6628a..fdc88ff200 100644 --- a/akka-docs/rst/scala/code/docs/http/scaladsl/WebSocketClientExampleSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/WebSocketClientExampleSpec.scala @@ -3,17 +3,14 @@ */ package docs.http.scaladsl -import akka.actor.ActorSystem -import akka.http.scaladsl.model.headers.{ Authorization, BasicHttpCredentials } import docs.CompileOnlySpec import org.scalatest.{ Matchers, WordSpec } -import scala.concurrent.Promise - class WebSocketClientExampleSpec extends WordSpec with Matchers with CompileOnlySpec { "singleWebSocket-request-example" in compileOnlySpec { //#single-WebSocket-request + import akka.actor.ActorSystem import akka.{ Done, NotUsed } import akka.http.scaladsl.Http import akka.stream.ActorMaterializer @@ -23,59 +20,60 @@ class WebSocketClientExampleSpec extends WordSpec with Matchers with CompileOnly import scala.concurrent.Future - implicit val system = ActorSystem() - implicit val materializer = ActorMaterializer() - import system.dispatcher + object SingleWebSocketRequest { + def main(args: Array[String]) = { + implicit val system = ActorSystem() + implicit val materializer = ActorMaterializer() + import system.dispatcher - // print each incoming strict text message - val printSink: Sink[Message, Future[Done]] = - Sink.foreach { - case message: TextMessage.Strict => - println(message.text) - } + // print each incoming strict text message + val printSink: Sink[Message, Future[Done]] = + Sink.foreach { + case message: TextMessage.Strict => + println(message.text) + } - val helloSource: Source[Message, NotUsed] = - Source.single(TextMessage("hello world!")) + val helloSource: Source[Message, NotUsed] = + Source.single(TextMessage("hello world!")) - // the Future[Done] is the materialized value of Sink.foreach - // and it is completed when the stream completes - val flow: Flow[Message, Message, Future[Done]] = - Flow.fromSinkAndSourceMat(printSink, helloSource)(Keep.left) + // the Future[Done] is the materialized value of Sink.foreach + // and it is completed when the stream completes + val flow: Flow[Message, Message, Future[Done]] = + Flow.fromSinkAndSourceMat(printSink, helloSource)(Keep.left) - // upgradeResponse is a Future[WebSocketUpgradeResponse] that - // completes or fails when the connection succeeds or fails - // and closed is a Future[Done] representing the stream completion from above - val (upgradeResponse, closed) = - Http().singleWebSocketRequest(WebSocketRequest("ws://echo.websocket.org"), flow) + // upgradeResponse is a Future[WebSocketUpgradeResponse] that + // completes or fails when the connection succeeds or fails + // and closed is a Future[Done] representing the stream completion from above + val (upgradeResponse, closed) = + Http().singleWebSocketRequest(WebSocketRequest("ws://echo.websocket.org"), flow) - val connected = upgradeResponse.map { upgrade => - // just like a regular http request we can get 404 NotFound, - // with a response body, that will be available from upgrade.response - if (upgrade.response.status == StatusCodes.OK) { - Done - } else { - throw new RuntimeException(s"Connection failed: ${upgrade.response.status}") + val connected = upgradeResponse.map { upgrade => + // just like a regular http request we can access response status which is available via upgrade.response.status + // status code 101 (Switching Protocols) indicates that server support WebSockets + if (upgrade.response.status == StatusCodes.SwitchingProtocols) { + Done + } else { + throw new RuntimeException(s"Connection failed: ${upgrade.response.status}") + } + } + + // in a real application you would not side effect here + // and handle errors more carefully + connected.onComplete(println) + closed.foreach(_ => println("closed")) } } - - // in a real application you would not side effect here - // and handle errors more carefully - connected.onComplete(println) - closed.foreach(_ => println("closed")) - //#single-WebSocket-request } "half-closed-WebSocket-closing-example" in compileOnlySpec { + import akka.actor.ActorSystem import akka.{ Done, NotUsed } import akka.http.scaladsl.Http import akka.stream.ActorMaterializer import akka.stream.scaladsl._ - import akka.http.scaladsl.model._ import akka.http.scaladsl.model.ws._ - import scala.concurrent.Future - implicit val system = ActorSystem() implicit val materializer = ActorMaterializer() import system.dispatcher @@ -97,14 +95,13 @@ class WebSocketClientExampleSpec extends WordSpec with Matchers with CompileOnly } "half-closed-WebSocket-working-example" in compileOnlySpec { - import akka.{ Done, NotUsed } + import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.stream.ActorMaterializer import akka.stream.scaladsl._ - import akka.http.scaladsl.model._ import akka.http.scaladsl.model.ws._ - import scala.concurrent.Future + import scala.concurrent.Promise implicit val system = ActorSystem() implicit val materializer = ActorMaterializer() @@ -130,14 +127,14 @@ class WebSocketClientExampleSpec extends WordSpec with Matchers with CompileOnly } "half-closed-WebSocket-finite-working-example" in compileOnlySpec { + import akka.actor.ActorSystem import akka.{ Done, NotUsed } import akka.http.scaladsl.Http import akka.stream.ActorMaterializer import akka.stream.scaladsl._ - import akka.http.scaladsl.model._ import akka.http.scaladsl.model.ws._ - import scala.concurrent.Future + import scala.concurrent.Promise implicit val system = ActorSystem() implicit val materializer = ActorMaterializer() @@ -163,11 +160,14 @@ class WebSocketClientExampleSpec extends WordSpec with Matchers with CompileOnly } "authorized-singleWebSocket-request-example" in compileOnlySpec { + import akka.actor.ActorSystem import akka.NotUsed import akka.http.scaladsl.Http import akka.stream.ActorMaterializer import akka.stream.scaladsl._ + import akka.http.scaladsl.model.headers.{ Authorization, BasicHttpCredentials } import akka.http.scaladsl.model.ws._ + implicit val system = ActorSystem() implicit val materializer = ActorMaterializer() import collection.immutable.Seq @@ -187,6 +187,7 @@ class WebSocketClientExampleSpec extends WordSpec with Matchers with CompileOnly "WebSocketClient-flow-example" in compileOnlySpec { //#WebSocket-client-flow + import akka.actor.ActorSystem import akka.Done import akka.http.scaladsl.Http import akka.stream.ActorMaterializer @@ -196,48 +197,51 @@ class WebSocketClientExampleSpec extends WordSpec with Matchers with CompileOnly import scala.concurrent.Future - implicit val system = ActorSystem() - implicit val materializer = ActorMaterializer() - import system.dispatcher + object WebSocketClientFlow { + def main(args: Array[String]) = { + implicit val system = ActorSystem() + implicit val materializer = ActorMaterializer() + import system.dispatcher - // Future[Done] is the materialized value of Sink.foreach, - // emitted when the stream completes - val incoming: Sink[Message, Future[Done]] = - Sink.foreach[Message] { - case message: TextMessage.Strict => - println(message.text) - } + // Future[Done] is the materialized value of Sink.foreach, + // emitted when the stream completes + val incoming: Sink[Message, Future[Done]] = + Sink.foreach[Message] { + case message: TextMessage.Strict => + println(message.text) + } - // send this as a message over the WebSocket - val outgoing = Source.single(TextMessage("hello world!")) + // send this as a message over the WebSocket + val outgoing = Source.single(TextMessage("hello world!")) - // flow to use (note: not re-usable!) - val webSocketFlow = Http().webSocketClientFlow(WebSocketRequest("ws://echo.websocket.org")) + // flow to use (note: not re-usable!) + val webSocketFlow = Http().webSocketClientFlow(WebSocketRequest("ws://echo.websocket.org")) - // the materialized value is a tuple with - // upgradeResponse is a Future[WebSocketUpgradeResponse] that - // completes or fails when the connection succeeds or fails - // and closed is a Future[Done] with the stream completion from the incoming sink - val (upgradeResponse, closed) = - outgoing - .viaMat(webSocketFlow)(Keep.right) // keep the materialized Future[WebSocketUpgradeResponse] - .toMat(incoming)(Keep.both) // also keep the Future[Done] - .run() + // the materialized value is a tuple with + // upgradeResponse is a Future[WebSocketUpgradeResponse] that + // completes or fails when the connection succeeds or fails + // and closed is a Future[Done] with the stream completion from the incoming sink + val (upgradeResponse, closed) = + outgoing + .viaMat(webSocketFlow)(Keep.right) // keep the materialized Future[WebSocketUpgradeResponse] + .toMat(incoming)(Keep.both) // also keep the Future[Done] + .run() - // just like a regular http request we can get 404 NotFound etc. - // that will be available from upgrade.response - val connected = upgradeResponse.flatMap { upgrade => - if (upgrade.response.status == StatusCodes.OK) { - Future.successful(Done) - } else { - throw new RuntimeException(s"Connection failed: ${upgrade.response.status}") + // just like a regular http request we can access response status which is available via upgrade.response.status + // status code 101 (Switching Protocols) indicates that server support WebSockets + val connected = upgradeResponse.flatMap { upgrade => + if (upgrade.response.status == StatusCodes.SwitchingProtocols) { + Future.successful(Done) + } else { + throw new RuntimeException(s"Connection failed: ${upgrade.response.status}") + } + } + + // in a real application you would not side effect here + connected.onComplete(println) + closed.foreach(_ => println("closed")) } } - - // in a real application you would not side effect here - connected.onComplete(println) - closed.foreach(_ => println("closed")) - //#WebSocket-client-flow } diff --git a/akka-docs/rst/scala/code/docs/http/scaladsl/server/WebSocketExampleSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/WebSocketExampleSpec.scala index a1c4541ba0..fa552bea27 100644 --- a/akka-docs/rst/scala/code/docs/http/scaladsl/server/WebSocketExampleSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/WebSocketExampleSpec.scala @@ -6,11 +6,11 @@ package docs.http.scaladsl.server import akka.http.scaladsl.model.ws.BinaryMessage import akka.stream.scaladsl.Sink +import docs.CompileOnlySpec import org.scalatest.{ Matchers, WordSpec } -class WebSocketExampleSpec extends WordSpec with Matchers { - "core-example" in { - pending // compile-time only test +class WebSocketExampleSpec extends WordSpec with Matchers with CompileOnlySpec { + "core-example" in compileOnlySpec { //#websocket-example-using-core import akka.actor.ActorSystem import akka.stream.ActorMaterializer @@ -64,8 +64,7 @@ class WebSocketExampleSpec extends WordSpec with Matchers { .flatMap(_.unbind()) // trigger unbinding from the port .onComplete(_ => system.terminate()) // and shutdown when done } - "routing-example" in { - pending // compile-time only test + "routing-example" in compileOnlySpec { import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl.{ Source, Flow } diff --git a/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/TimeoutDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/TimeoutDirectivesExamplesSpec.scala index f0bda7181a..3ea141bce4 100644 --- a/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/TimeoutDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/TimeoutDirectivesExamplesSpec.scala @@ -55,8 +55,6 @@ class TimeoutDirectivesExamplesSpec extends RoutingSpec with CompileOnlySpec { } "allow mapping the response" in compileOnlySpec { - pending // compile only spec since requires actuall Http server to be run - //#withRequestTimeoutResponse val timeoutResponse = HttpResponse( StatusCodes.EnhanceYourCalm, diff --git a/akka-docs/rst/scala/http/common/marshalling.rst b/akka-docs/rst/scala/http/common/marshalling.rst index 487fe73e3b..c97867d637 100644 --- a/akka-docs/rst/scala/http/common/marshalling.rst +++ b/akka-docs/rst/scala/http/common/marshalling.rst @@ -118,7 +118,7 @@ If, however, your marshaller also needs to set things like the response status c or any headers then a ``ToEntityMarshaller[T]`` won't work. You'll need to fall down to providing a ``ToResponseMarshaller[T]`` or a ``ToRequestMarshaller[T]`` directly. -For writing you own marshallers you won't have to "manually" implement the ``Marshaller`` trait directly. +For writing your own marshallers you won't have to "manually" implement the ``Marshaller`` trait directly. Rather, it should be possible to use one of the convenience construction helpers defined on the ``Marshaller`` companion: diff --git a/akka-docs/rst/scala/http/common/unmarshalling.rst b/akka-docs/rst/scala/http/common/unmarshalling.rst index 9f6426335c..be1e772e84 100644 --- a/akka-docs/rst/scala/http/common/unmarshalling.rst +++ b/akka-docs/rst/scala/http/common/unmarshalling.rst @@ -76,7 +76,7 @@ Custom Unmarshallers Akka HTTP gives you a few convenience tools for constructing unmarshallers for your own types. Usually you won't have to "manually" implement the ``Unmarshaller`` trait directly. -Rather, it should be possible to use one of the convenience construction helpers defined on the ``Marshaller`` +Rather, it should be possible to use one of the convenience construction helpers defined on the ``Unmarshaller`` companion: .. includecode2:: /../../akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/Unmarshaller.scala diff --git a/akka-docs/rst/scala/http/introduction.rst b/akka-docs/rst/scala/http/introduction.rst index 1c46f92c08..453720a88f 100644 --- a/akka-docs/rst/scala/http/introduction.rst +++ b/akka-docs/rst/scala/http/introduction.rst @@ -25,11 +25,14 @@ Akka HTTP was designed specifically as “not-a-framework”, not because we don Using Akka HTTP --------------- -Akka HTTP is provided in separate jar files, to use it make sure to include the following dependencies:: +Akka HTTP is provided in a separate jar file, to use it make sure to include the following dependency:: - "com.typesafe.akka" %% "akka-http-core" % "@version@" @crossString@ "com.typesafe.akka" %% "akka-http-experimental" % "@version@" @crossString@ +Mind that ``akka-http`` comes in two modules: ``akka-http-experimental`` and ``akka-http-core``. Because ``akka-http-experimental`` +depends on ``akka-http-core`` you don't need to bring the latter explicitly. Still you may need to this in case you rely +solely on low-level API. + Routing DSL for HTTP servers ---------------------------- diff --git a/akka-docs/rst/scala/http/routing-dsl/index.rst b/akka-docs/rst/scala/http/routing-dsl/index.rst index 3795d5acbe..10073cd27c 100644 --- a/akka-docs/rst/scala/http/routing-dsl/index.rst +++ b/akka-docs/rst/scala/http/routing-dsl/index.rst @@ -58,7 +58,7 @@ Bind failures ^^^^^^^^^^^^^ For example the server might be unable to bind to the given port. For example when the port is already taken by another application, or if the port is privileged (i.e. only usable by ``root``). -In this case the "binding future" will fail immediatly, and we can react to if by listening on the Future's completion: +In this case the "binding future" will fail immediately, and we can react to if by listening on the Future's completion: .. includecode2:: ../../code/docs/http/scaladsl/HttpServerExampleSpec.scala :snippet: binding-failure-high-level-example From 6932012007605df3456e829b5a5fd8f12fd9e701 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Endre=20S=C3=A1ndor=20Varga?= Date: Tue, 7 Jun 2016 14:25:04 +0200 Subject: [PATCH 13/85] Reduce internal allocation in ActorGraphInterpreter --- .../akka/stream/impl/fusing/ActorGraphInterpreter.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/akka-stream/src/main/scala/akka/stream/impl/fusing/ActorGraphInterpreter.scala b/akka-stream/src/main/scala/akka/stream/impl/fusing/ActorGraphInterpreter.scala index 1258ec103a..3f6c9a0de8 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/fusing/ActorGraphInterpreter.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/fusing/ActorGraphInterpreter.scala @@ -553,11 +553,11 @@ private[stream] class ActorGraphInterpreter(_initial: GraphInterpreterShell) ext private val eventLimit: Int = _initial.mat.settings.syncProcessingLimit private var currentLimit: Int = eventLimit //this is a var in order to save the allocation when no short-circuiting actually happens - private var shortCircuitBuffer: util.LinkedList[Any] = null + private var shortCircuitBuffer: util.ArrayDeque[Any] = null def enqueueToShortCircuit(input: Any): Unit = { - if (shortCircuitBuffer == null) shortCircuitBuffer = new util.LinkedList[Any]() - shortCircuitBuffer.add(input) + if (shortCircuitBuffer == null) shortCircuitBuffer = new util.ArrayDeque[Any]() + shortCircuitBuffer.addLast(input) } def registerShell(shell: GraphInterpreterShell): ActorRef = { From 049b95077f05078c1be015f56ee890498c45211c Mon Sep 17 00:00:00 2001 From: Kam Kasravi Date: Tue, 3 May 2016 18:58:26 -0700 Subject: [PATCH 14/85] fixes #20414 Allow different ActorMaterializer subtypes --- .../engine/server/HttpServerBluePrint.scala | 2 +- .../scaladsl/server/RequestContextImpl.scala | 10 ++-- .../akka/http/scaladsl/server/Route.scala | 10 ++-- .../MultipartUnmarshallers.scala | 6 +-- .../akka/stream/impl/StreamLayoutSpec.scala | 4 +- .../akka/stream/scaladsl/FlowLogSpec.scala | 6 +-- .../scala/akka/stream/ActorMaterializer.scala | 17 +++++-- .../main/scala/akka/stream/Materializer.scala | 3 +- .../stream/impl/ActorMaterializerImpl.scala | 47 ++++++++++++++++--- .../main/scala/akka/stream/impl/Modules.scala | 4 +- .../main/scala/akka/stream/impl/Sinks.scala | 8 ++-- .../scala/akka/stream/impl/StreamLayout.scala | 12 ++--- .../impl/fusing/ActorGraphInterpreter.scala | 19 ++++---- .../akka/stream/impl/fusing/GraphStages.scala | 2 +- .../scala/akka/stream/impl/fusing/Ops.scala | 4 +- .../stream/impl/fusing/StreamOfStreams.scala | 8 ++-- .../scala/akka/stream/impl/io/IOSinks.scala | 9 ++-- .../scala/akka/stream/impl/io/IOSources.scala | 4 +- .../impl/io/OutputStreamSourceStage.scala | 3 +- .../scala/akka/stream/stage/GraphStage.scala | 2 +- project/MiMa.scala | 3 ++ 21 files changed, 114 insertions(+), 69 deletions(-) diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpServerBluePrint.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpServerBluePrint.scala index dc8f453c56..49b5af2e3a 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpServerBluePrint.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpServerBluePrint.scala @@ -609,7 +609,7 @@ private[http] object HttpServerBluePrint { }) private var activeTimers = 0 - private def timeout = ActorMaterializer.downcast(materializer).settings.subscriptionTimeoutSettings.timeout + private def timeout = ActorMaterializerHelper.downcast(materializer).settings.subscriptionTimeoutSettings.timeout private def addTimeout(s: SubscriptionTimeout): Unit = { if (activeTimers == 0) setKeepGoing(true) activeTimers += 1 diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContextImpl.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContextImpl.scala index 2e17c3cbd0..b477b3858d 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContextImpl.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContextImpl.scala @@ -4,11 +4,11 @@ package akka.http.scaladsl.server -import scala.concurrent.{ Future, ExecutionContextExecutor } -import akka.stream.{ ActorMaterializer, Materializer } +import scala.concurrent.{ExecutionContextExecutor, Future} +import akka.stream.{ActorMaterializer, ActorMaterializerHelper, Materializer} import akka.event.LoggingAdapter -import akka.http.scaladsl.settings.{ RoutingSettings, ParserSettings } -import akka.http.scaladsl.marshalling.{ Marshal, ToResponseMarshallable } +import akka.http.scaladsl.settings.{ParserSettings, RoutingSettings} +import akka.http.scaladsl.marshalling.{Marshal, ToResponseMarshallable} import akka.http.scaladsl.model._ import akka.http.scaladsl.util.FastFuture import akka.http.scaladsl.util.FastFuture._ @@ -29,7 +29,7 @@ private[http] class RequestContextImpl( this(request, request.uri.path, ec, materializer, log, settings, parserSettings) def this(request: HttpRequest, log: LoggingAdapter, settings: RoutingSettings)(implicit ec: ExecutionContextExecutor, materializer: Materializer) = - this(request, request.uri.path, ec, materializer, log, settings, ParserSettings(ActorMaterializer.downcast(materializer).system)) + this(request, request.uri.path, ec, materializer, log, settings, ParserSettings(ActorMaterializerHelper.downcast(materializer).system)) def reconfigure(executionContext: ExecutionContextExecutor, materializer: Materializer, log: LoggingAdapter, settings: RoutingSettings): RequestContext = copy(executionContext = executionContext, materializer = materializer, log = log, routingSettings = settings) diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/Route.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/Route.scala index 41f6c8bf49..d1de8d5a44 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/Route.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/Route.scala @@ -5,12 +5,12 @@ package akka.http.scaladsl.server import akka.NotUsed -import akka.http.scaladsl.settings.{ RoutingSettings, ParserSettings } -import akka.stream.{ ActorMaterializer, Materializer } +import akka.http.scaladsl.settings.{ParserSettings, RoutingSettings} +import akka.stream.{ActorMaterializer, ActorMaterializerHelper, Materializer} -import scala.concurrent.{ ExecutionContextExecutor, Future } +import scala.concurrent.{ExecutionContextExecutor, Future} import akka.stream.scaladsl.Flow -import akka.http.scaladsl.model.{ HttpRequest, HttpResponse } +import akka.http.scaladsl.model.{HttpRequest, HttpResponse} import akka.http.scaladsl.util.FastFuture._ object Route { @@ -66,7 +66,7 @@ object Route { { implicit val executionContext = effectiveEC // overrides parameter - val effectiveParserSettings = if (parserSettings ne null) parserSettings else ParserSettings(ActorMaterializer.downcast(materializer).system) + val effectiveParserSettings = if (parserSettings ne null) parserSettings else ParserSettings(ActorMaterializerHelper.downcast(materializer).system) val sealedRoute = seal(route) request ⇒ diff --git a/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/MultipartUnmarshallers.scala b/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/MultipartUnmarshallers.scala index 38aa5efe3b..8bf35e5656 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/MultipartUnmarshallers.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/MultipartUnmarshallers.scala @@ -9,8 +9,8 @@ import akka.http.scaladsl.settings.ParserSettings import scala.collection.immutable import scala.collection.immutable.VectorBuilder import akka.util.ByteString -import akka.event.{ NoLogging, LoggingAdapter } -import akka.stream.ActorMaterializer +import akka.event.{LoggingAdapter, NoLogging} +import akka.stream.{ActorMaterializer, ActorMaterializerHelper} import akka.stream.impl.fusing.IteratorInterpreter import akka.stream.scaladsl._ import akka.http.impl.engine.parsing.BodyPartParser @@ -75,7 +75,7 @@ trait MultipartUnmarshallers { FastFuture.failed(new RuntimeException("Content-Type with a multipart media type must have a 'boundary' parameter")) case Some(boundary) ⇒ import BodyPartParser._ - val effectiveParserSettings = Option(parserSettings).getOrElse(ParserSettings(ActorMaterializer.downcast(mat).system)) + val effectiveParserSettings = Option(parserSettings).getOrElse(ParserSettings(ActorMaterializerHelper.downcast(mat).system)) val parser = new BodyPartParser(defaultContentType, boundary, log, effectiveParserSettings) FastFuture.successful { entity match { diff --git a/akka-stream-tests/src/test/scala/akka/stream/impl/StreamLayoutSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/impl/StreamLayoutSpec.scala index 63b25ec15b..7f829cc215 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/impl/StreamLayoutSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/impl/StreamLayoutSpec.scala @@ -3,10 +3,10 @@ */ package akka.stream.impl +import akka.stream._ import akka.stream.scaladsl._ import akka.testkit.AkkaSpec -import org.reactivestreams.{ Subscription, Subscriber, Publisher } -import akka.stream._ +import org.reactivestreams.{ Publisher, Subscriber, Subscription } import scala.concurrent.duration._ diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowLogSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowLogSpec.scala index a86294da6f..3f8521f030 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowLogSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowLogSpec.scala @@ -9,9 +9,9 @@ import akka.stream.ActorAttributes._ import akka.stream.Attributes.LogLevels import akka.stream.Supervision._ import akka.stream.testkit.ScriptedTest -import akka.stream.javadsl -import akka.stream.{ ActorMaterializer, Materializer, Attributes } +import akka.stream._ import akka.testkit.TestProbe + import scala.concurrent.duration._ import scala.concurrent.Await import scala.util.control.NoStackTrace @@ -29,7 +29,7 @@ class FlowLogSpec extends AkkaSpec("akka.loglevel = DEBUG") with ScriptedTest { "A Log" must { - val supervisorPath = ActorMaterializer.downcast(mat).supervisor.path + val supervisorPath = ActorMaterializerHelper.downcast(mat).supervisor.path val LogSrc = s"akka.stream.Log($supervisorPath)" val LogClazz = classOf[Materializer] diff --git a/akka-stream/src/main/scala/akka/stream/ActorMaterializer.scala b/akka-stream/src/main/scala/akka/stream/ActorMaterializer.scala index 1e7ede7b60..848de90e74 100644 --- a/akka-stream/src/main/scala/akka/stream/ActorMaterializer.scala +++ b/akka-stream/src/main/scala/akka/stream/ActorMaterializer.scala @@ -5,7 +5,7 @@ package akka.stream import java.util.Locale import java.util.concurrent.TimeUnit -import java.util.concurrent.atomic.{ AtomicBoolean } +import java.util.concurrent.atomic.AtomicBoolean import akka.actor.{ ActorContext, ActorRef, ActorRefFactory, ActorSystem, ExtendedActorSystem, Props } import akka.event.LoggingAdapter @@ -16,6 +16,7 @@ import com.typesafe.config.Config import scala.concurrent.duration._ import akka.japi.function +import akka.stream.impl.fusing.GraphInterpreterShell import scala.util.control.NoStackTrace @@ -126,6 +127,12 @@ object ActorMaterializer { system } +} + +/** + * INTERNAL API + */ +private[akka] object ActorMaterializerHelper { /** * INTERNAL API */ @@ -163,21 +170,23 @@ abstract class ActorMaterializer extends Materializer { def isShutdown: Boolean /** - * INTERNAL API: this might become public later + * INTERNAL API */ private[akka] def actorOf(context: MaterializationContext, props: Props): ActorRef /** * INTERNAL API */ - private[akka] def system: ActorSystem + def system: ActorSystem /** * INTERNAL API */ private[akka] def logger: LoggingAdapter - /** INTERNAL API */ + /** + * INTERNAL API + */ private[akka] def supervisor: ActorRef } diff --git a/akka-stream/src/main/scala/akka/stream/Materializer.scala b/akka-stream/src/main/scala/akka/stream/Materializer.scala index 5b0a6b3729..4467c21944 100644 --- a/akka-stream/src/main/scala/akka/stream/Materializer.scala +++ b/akka-stream/src/main/scala/akka/stream/Materializer.scala @@ -73,11 +73,10 @@ private[akka] object NoMaterializer extends Materializer { } /** - * INTERNAL API: this might become public later * * Context parameter to the `create` methods of sources and sinks. */ -private[akka] case class MaterializationContext( +case class MaterializationContext( materializer: Materializer, effectiveAttributes: Attributes, stageName: String) diff --git a/akka-stream/src/main/scala/akka/stream/impl/ActorMaterializerImpl.scala b/akka-stream/src/main/scala/akka/stream/impl/ActorMaterializerImpl.scala index a8077aa412..a3eb77d2f1 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/ActorMaterializerImpl.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/ActorMaterializerImpl.scala @@ -3,19 +3,21 @@ */ package akka.stream.impl -import java.util.concurrent.atomic.{ AtomicBoolean } +import java.util.concurrent.atomic.AtomicBoolean import java.{ util ⇒ ju } + import akka.NotUsed import akka.actor._ -import akka.event.Logging +import akka.event.{ Logging, LoggingAdapter } import akka.dispatch.Dispatchers import akka.pattern.ask import akka.stream._ -import akka.stream.impl.StreamLayout.{ Module, AtomicModule } +import akka.stream.impl.StreamLayout.{ AtomicModule, Module } import akka.stream.impl.fusing.{ ActorGraphInterpreter, GraphModule } import akka.stream.impl.io.TLSActor import akka.stream.impl.io.TlsModule import org.reactivestreams._ + import scala.concurrent.duration.FiniteDuration import scala.concurrent.{ Await, ExecutionContextExecutor } import akka.stream.impl.fusing.GraphStageModule @@ -23,6 +25,37 @@ import akka.stream.impl.fusing.GraphInterpreter.GraphAssembly import akka.stream.impl.fusing.Fusing import akka.stream.impl.fusing.GraphInterpreterShell +/** + * ExtendedActorMaterializer used by subtypes which materializer using GraphInterpreterShell + */ +abstract class ExtendedActorMaterializer extends ActorMaterializer { + + override def withNamePrefix(name: String): ExtendedActorMaterializer + + /** + * INTERNAL API + */ + def materialize[Mat]( + _runnableGraph: Graph[ClosedShape, Mat], + subflowFuser: GraphInterpreterShell ⇒ ActorRef): Mat + + /** + * INTERNAL API + */ + def actorOf(context: MaterializationContext, props: Props): ActorRef + + /** + * INTERNAL API + */ + override def logger: LoggingAdapter + + /** + * INTERNAL API + */ + override def supervisor: ActorRef + +} + /** * INTERNAL API */ @@ -32,7 +65,7 @@ private[akka] case class ActorMaterializerImpl( dispatchers: Dispatchers, supervisor: ActorRef, haveShutDown: AtomicBoolean, - flowNames: SeqActorName) extends ActorMaterializer { + flowNames: SeqActorName) extends ExtendedActorMaterializer { import akka.stream.impl.Stages._ private val _logger = Logging.getLogger(system, this) override def logger = _logger @@ -79,7 +112,7 @@ private[akka] case class ActorMaterializerImpl( override def materialize[Mat](_runnableGraph: Graph[ClosedShape, Mat]): Mat = materialize(_runnableGraph, null) - private[stream] def materialize[Mat]( + override def materialize[Mat]( _runnableGraph: Graph[ClosedShape, Mat], subflowFuser: GraphInterpreterShell ⇒ ActorRef): Mat = { val runnableGraph = @@ -213,7 +246,7 @@ private[akka] case class ActorMaterializerImpl( } -private[akka] class SubFusingActorMaterializerImpl(val delegate: ActorMaterializerImpl, registerShell: GraphInterpreterShell ⇒ ActorRef) extends Materializer { +private[akka] class SubFusingActorMaterializerImpl(val delegate: ExtendedActorMaterializer, registerShell: GraphInterpreterShell ⇒ ActorRef) extends Materializer { override def executionContext: ExecutionContextExecutor = delegate.executionContext override def materialize[Mat](runnable: Graph[ClosedShape, Mat]): Mat = delegate.materialize(runnable, registerShell) @@ -223,7 +256,7 @@ private[akka] class SubFusingActorMaterializerImpl(val delegate: ActorMaterializ override def schedulePeriodically(initialDelay: FiniteDuration, interval: FiniteDuration, task: Runnable): Cancellable = delegate.schedulePeriodically(initialDelay, interval, task) - def withNamePrefix(name: String): SubFusingActorMaterializerImpl = + override def withNamePrefix(name: String): SubFusingActorMaterializerImpl = new SubFusingActorMaterializerImpl(delegate.withNamePrefix(name), registerShell) } diff --git a/akka-stream/src/main/scala/akka/stream/impl/Modules.scala b/akka-stream/src/main/scala/akka/stream/impl/Modules.scala index ae5771f15d..21afc53aa4 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/Modules.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/Modules.scala @@ -94,7 +94,7 @@ private[akka] final class MaybeSource[Out](val attributes: Attributes, shape: So private[akka] final class ActorPublisherSource[Out](props: Props, val attributes: Attributes, shape: SourceShape[Out]) extends SourceModule[Out, ActorRef](shape) { override def create(context: MaterializationContext) = { - val publisherRef = ActorMaterializer.downcast(context.materializer).actorOf(context, props) + val publisherRef = ActorMaterializerHelper.downcast(context.materializer).actorOf(context, props) (akka.stream.actor.ActorPublisher[Out](publisherRef), publisherRef) } @@ -113,7 +113,7 @@ private[akka] final class ActorRefSource[Out]( override protected def label: String = s"ActorRefSource($bufferSize, $overflowStrategy)" override def create(context: MaterializationContext) = { - val mat = ActorMaterializer.downcast(context.materializer) + val mat = ActorMaterializerHelper.downcast(context.materializer) val ref = mat.actorOf(context, ActorRefSourceActor.props(bufferSize, overflowStrategy, mat.settings)) (akka.stream.actor.ActorPublisher[Out](ref), ref) } diff --git a/akka-stream/src/main/scala/akka/stream/impl/Sinks.scala b/akka-stream/src/main/scala/akka/stream/impl/Sinks.scala index 0c4295627a..840f6a5d80 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/Sinks.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/Sinks.scala @@ -99,7 +99,7 @@ private[akka] final class FanoutPublisherSink[In]( extends SinkModule[In, Publisher[In]](shape) { override def create(context: MaterializationContext): (Subscriber[In], Publisher[In]) = { - val actorMaterializer = ActorMaterializer.downcast(context.materializer) + val actorMaterializer = ActorMaterializerHelper.downcast(context.materializer) val impl = actorMaterializer.actorOf( context, FanoutProcessorImpl.props(actorMaterializer.effectiveSettings(attributes))) @@ -124,7 +124,7 @@ private[akka] final class FanoutPublisherSink[In]( private[akka] final class SinkholeSink(val attributes: Attributes, shape: SinkShape[Any]) extends SinkModule[Any, Future[Done]](shape) { override def create(context: MaterializationContext) = { - val effectiveSettings = ActorMaterializer.downcast(context.materializer).effectiveSettings(context.effectiveAttributes) + val effectiveSettings = ActorMaterializerHelper.downcast(context.materializer).effectiveSettings(context.effectiveAttributes) val p = Promise[Done]() (new SinkholeSubscriber[Any](p), p.future) } @@ -163,7 +163,7 @@ private[akka] final class CancelSink(val attributes: Attributes, shape: SinkShap private[akka] final class ActorSubscriberSink[In](props: Props, val attributes: Attributes, shape: SinkShape[In]) extends SinkModule[In, ActorRef](shape) { override def create(context: MaterializationContext) = { - val subscriberRef = ActorMaterializer.downcast(context.materializer).actorOf(context, props) + val subscriberRef = ActorMaterializerHelper.downcast(context.materializer).actorOf(context, props) (akka.stream.actor.ActorSubscriber[In](subscriberRef), subscriberRef) } @@ -179,7 +179,7 @@ private[akka] final class ActorRefSink[In](ref: ActorRef, onCompleteMessage: Any shape: SinkShape[In]) extends SinkModule[In, NotUsed](shape) { override def create(context: MaterializationContext) = { - val actorMaterializer = ActorMaterializer.downcast(context.materializer) + val actorMaterializer = ActorMaterializerHelper.downcast(context.materializer) val effectiveSettings = actorMaterializer.effectiveSettings(context.effectiveAttributes) val subscriberRef = actorMaterializer.actorOf( context, diff --git a/akka-stream/src/main/scala/akka/stream/impl/StreamLayout.scala b/akka-stream/src/main/scala/akka/stream/impl/StreamLayout.scala index a646eadf02..41128e3ad5 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/StreamLayout.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/StreamLayout.scala @@ -801,7 +801,7 @@ private[impl] class VirtualPublisher[T] extends AtomicReference[AnyRef] with Pub /** * INERNAL API */ -private[stream] object MaterializerSession { +object MaterializerSession { class MaterializationPanic(cause: Throwable) extends RuntimeException("Materialization aborted.", cause) with NoStackTrace final val Debug = false @@ -810,7 +810,7 @@ private[stream] object MaterializerSession { /** * INTERNAL API */ -private[stream] abstract class MaterializerSession(val topLevel: StreamLayout.Module, val initialAttributes: Attributes) { +abstract class MaterializerSession(val topLevel: StreamLayout.Module, val initialAttributes: Attributes) { import StreamLayout._ // the contained maps store either Subscriber[Any] or VirtualPublisher, but the type system cannot express that @@ -839,7 +839,7 @@ private[stream] abstract class MaterializerSession(val topLevel: StreamLayout.Mo // Enters a copied module and establishes a scope that prevents internals to leak out and interfere with copies // of the same module. // We don't store the enclosing CopiedModule itself as state since we don't use it anywhere else than exit and enter - private def enterScope(enclosing: CopiedModule): Unit = { + protected def enterScope(enclosing: CopiedModule): Unit = { if (MaterializerSession.Debug) println(f"entering scope [${System.identityHashCode(enclosing)}%08x]") subscribersStack ::= new ju.HashMap publishersStack ::= new ju.HashMap @@ -851,7 +851,7 @@ private[stream] abstract class MaterializerSession(val topLevel: StreamLayout.Mo // them to the copied ports instead of the original ones (since there might be multiple copies of the same module // leading to port identity collisions) // We don't store the enclosing CopiedModule itself as state since we don't use it anywhere else than exit and enter - private def exitScope(enclosing: CopiedModule): Unit = { + protected def exitScope(enclosing: CopiedModule): Unit = { if (MaterializerSession.Debug) println(f"exiting scope [${System.identityHashCode(enclosing)}%08x]") val scopeSubscribers = subscribers val scopePublishers = publishers @@ -969,7 +969,7 @@ private[stream] abstract class MaterializerSession(val topLevel: StreamLayout.Mo ret } - final protected def assignPort(in: InPort, subscriberOrVirtual: AnyRef): Unit = { + protected def assignPort(in: InPort, subscriberOrVirtual: AnyRef): Unit = { subscribers.put(in, subscriberOrVirtual) currentLayout.upstreams.get(in) match { @@ -981,7 +981,7 @@ private[stream] abstract class MaterializerSession(val topLevel: StreamLayout.Mo } } - final protected def assignPort(out: OutPort, publisher: Publisher[Any]): Unit = { + protected def assignPort(out: OutPort, publisher: Publisher[Any]): Unit = { publishers.put(out, publisher) currentLayout.downstreams.get(out) match { diff --git a/akka-stream/src/main/scala/akka/stream/impl/fusing/ActorGraphInterpreter.scala b/akka-stream/src/main/scala/akka/stream/impl/fusing/ActorGraphInterpreter.scala index 3f6c9a0de8..d50078b098 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/fusing/ActorGraphInterpreter.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/fusing/ActorGraphInterpreter.scala @@ -5,20 +5,19 @@ package akka.stream.impl.fusing import java.util import java.util.concurrent.TimeoutException + import akka.actor._ import akka.event.Logging import akka.stream._ -import akka.stream.impl._ import akka.stream.impl.ReactiveStreamsCompliance._ -import akka.stream.impl.StreamLayout.{ CompositeModule, CopiedModule, Module, AtomicModule } -import akka.stream.impl.fusing.GraphInterpreter.{ DownstreamBoundaryStageLogic, UpstreamBoundaryStageLogic, GraphAssembly } +import akka.stream.impl.StreamLayout.{ AtomicModule, CompositeModule, CopiedModule, Module } +import akka.stream.impl.{ SubFusingActorMaterializerImpl, _ } +import akka.stream.impl.fusing.GraphInterpreter.{ DownstreamBoundaryStageLogic, GraphAssembly, UpstreamBoundaryStageLogic } import akka.stream.stage.{ GraphStageLogic, InHandler, OutHandler } import org.reactivestreams.{ Subscriber, Subscription } -import scala.concurrent.forkjoin.ThreadLocalRandom -import scala.util.control.NonFatal -import akka.stream.impl.ActorMaterializerImpl -import akka.stream.impl.SubFusingActorMaterializerImpl + import scala.annotation.tailrec +import scala.util.control.NonFatal /** * INTERNAL API @@ -307,14 +306,14 @@ private[stream] object ActorGraphInterpreter { /** * INTERNAL API */ -private[stream] final class GraphInterpreterShell( +final class GraphInterpreterShell( assembly: GraphAssembly, inHandlers: Array[InHandler], outHandlers: Array[OutHandler], logics: Array[GraphStageLogic], shape: Shape, settings: ActorMaterializerSettings, - val mat: ActorMaterializerImpl) { + val mat: ExtendedActorMaterializer) { import ActorGraphInterpreter._ @@ -643,4 +642,4 @@ private[stream] class ActorGraphInterpreter(_initial: GraphInterpreterShell) ext activeInterpreters = Set.empty[GraphInterpreterShell] newShells.foreach(s ⇒ if (tryInit(s)) s.tryAbort(ex)) } -} \ No newline at end of file +} diff --git a/akka-stream/src/main/scala/akka/stream/impl/fusing/GraphStages.scala b/akka-stream/src/main/scala/akka/stream/impl/fusing/GraphStages.scala index 7b53a33510..aa5d0bd6f4 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/fusing/GraphStages.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/fusing/GraphStages.scala @@ -267,7 +267,7 @@ object GraphStages { * * This source is not reusable, it is only created internally. */ - private[stream] final class MaterializedValueSource[T](val computation: MaterializedValueNode, val out: Outlet[T]) extends GraphStage[SourceShape[T]] { + final class MaterializedValueSource[T](val computation: MaterializedValueNode, val out: Outlet[T]) extends GraphStage[SourceShape[T]] { def this(computation: MaterializedValueNode) = this(computation, Outlet[T]("matValue")) override def initialAttributes: Attributes = DefaultAttributes.materializedValueSource override val shape = SourceShape(out) diff --git a/akka-stream/src/main/scala/akka/stream/impl/fusing/Ops.scala b/akka-stream/src/main/scala/akka/stream/impl/fusing/Ops.scala index 9e50f57ea2..4781dc1422 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/fusing/Ops.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/fusing/Ops.scala @@ -922,7 +922,7 @@ private[akka] final case class Log[T](name: String, extract: T ⇒ Any, log = logAdapter match { case Some(l) ⇒ l case _ ⇒ - val mat = try ActorMaterializer.downcast(ctx.materializer) + val mat = try ActorMaterializerHelper.downcast(ctx.materializer) catch { case ex: Exception ⇒ throw new RuntimeException("Log stage can only provide LoggingAdapter when used with ActorMaterializer! " + @@ -984,7 +984,7 @@ private[akka] object Log { override def getClazz(t: LifecycleContext): Class[_] = classOf[Materializer] override def genString(t: LifecycleContext): String = { - try s"$DefaultLoggerName(${ActorMaterializer.downcast(t.materializer).supervisor.path})" + try s"$DefaultLoggerName(${ActorMaterializerHelper.downcast(t.materializer).supervisor.path})" catch { case ex: Exception ⇒ LogSource.fromString.genString(DefaultLoggerName) } diff --git a/akka-stream/src/main/scala/akka/stream/impl/fusing/StreamOfStreams.scala b/akka-stream/src/main/scala/akka/stream/impl/fusing/StreamOfStreams.scala index 5e7b3cec69..20a586be75 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/fusing/StreamOfStreams.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/fusing/StreamOfStreams.scala @@ -121,7 +121,7 @@ final class PrefixAndTail[T](n: Int) extends GraphStage[FlowShape[T, (immutable. private val SubscriptionTimer = "SubstreamSubscriptionTimer" override protected def onTimer(timerKey: Any): Unit = { - val materializer = ActorMaterializer.downcast(interpreter.materializer) + val materializer = ActorMaterializerHelper.downcast(interpreter.materializer) val timeoutSettings = materializer.settings.subscriptionTimeoutSettings val timeout = timeoutSettings.timeout @@ -150,7 +150,7 @@ final class PrefixAndTail[T](n: Int) extends GraphStage[FlowShape[T, (immutable. } private def openSubstream(): Source[T, NotUsed] = { - val timeout = ActorMaterializer.downcast(interpreter.materializer).settings.subscriptionTimeoutSettings.timeout + val timeout = ActorMaterializerHelper.downcast(interpreter.materializer).settings.subscriptionTimeoutSettings.timeout tailSource = new SubSourceOutlet[T]("TailSource") tailSource.setHandler(subHandler) setKeepGoing(true) @@ -254,7 +254,7 @@ final class GroupBy[T, K](maxSubstreams: Int, keyFor: T ⇒ K) extends GraphStag private def needToPull: Boolean = !(hasBeenPulled(in) || isClosed(in) || hasNextElement) override def preStart(): Unit = - timeout = ActorMaterializer.downcast(interpreter.materializer).settings.subscriptionTimeoutSettings.timeout + timeout = ActorMaterializerHelper.downcast(interpreter.materializer).settings.subscriptionTimeoutSettings.timeout override def onPull(): Unit = { substreamWaitingToBePushed match { @@ -424,7 +424,7 @@ final class Split[T](decision: Split.SplitDecision, p: T ⇒ Boolean, substreamC private var substreamCancelled = false override def preStart(): Unit = { - timeout = ActorMaterializer.downcast(interpreter.materializer).settings.subscriptionTimeoutSettings.timeout + timeout = ActorMaterializerHelper.downcast(interpreter.materializer).settings.subscriptionTimeoutSettings.timeout } setHandler(out, new OutHandler { diff --git a/akka-stream/src/main/scala/akka/stream/impl/io/IOSinks.scala b/akka-stream/src/main/scala/akka/stream/impl/io/IOSinks.scala index d0f8db5a46..4e14ca8e6e 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/io/IOSinks.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/io/IOSinks.scala @@ -5,13 +5,14 @@ package akka.stream.impl.io import java.io.OutputStream import java.nio.file.{ Path, StandardOpenOption } -import akka.stream.IOResult + +import akka.stream._ import akka.stream.impl.SinkModule import akka.stream.impl.StreamLayout.Module import akka.stream.impl.Stages.DefaultAttributes.IODispatcher -import akka.stream.{ ActorMaterializer, MaterializationContext, Attributes, SinkShape } import akka.stream.ActorAttributes.Dispatcher import akka.util.ByteString + import scala.concurrent.{ Future, Promise } /** @@ -25,7 +26,7 @@ private[akka] final class FileSink(f: Path, options: Set[StandardOpenOption], va override protected def label: String = s"FileSink($f, $options)" override def create(context: MaterializationContext) = { - val materializer = ActorMaterializer.downcast(context.materializer) + val materializer = ActorMaterializerHelper.downcast(context.materializer) val settings = materializer.effectiveSettings(context.effectiveAttributes) val ioResultPromise = Promise[IOResult]() @@ -51,7 +52,7 @@ private[akka] final class OutputStreamSink(createOutput: () ⇒ OutputStream, va extends SinkModule[ByteString, Future[IOResult]](shape) { override def create(context: MaterializationContext) = { - val materializer = ActorMaterializer.downcast(context.materializer) + val materializer = ActorMaterializerHelper.downcast(context.materializer) val settings = materializer.effectiveSettings(context.effectiveAttributes) val ioResultPromise = Promise[IOResult]() diff --git a/akka-stream/src/main/scala/akka/stream/impl/io/IOSources.scala b/akka-stream/src/main/scala/akka/stream/impl/io/IOSources.scala index 7fec482d18..082287f5f2 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/io/IOSources.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/io/IOSources.scala @@ -25,7 +25,7 @@ private[akka] final class FileSource(f: Path, chunkSize: Int, val attributes: At require(chunkSize > 0, "chunkSize must be greater than 0") override def create(context: MaterializationContext) = { // FIXME rewrite to be based on GraphStage rather than dangerous downcasts - val materializer = ActorMaterializer.downcast(context.materializer) + val materializer = ActorMaterializerHelper.downcast(context.materializer) val settings = materializer.effectiveSettings(context.effectiveAttributes) val ioResultPromise = Promise[IOResult]() @@ -53,7 +53,7 @@ private[akka] final class FileSource(f: Path, chunkSize: Int, val attributes: At private[akka] final class InputStreamSource(createInputStream: () ⇒ InputStream, chunkSize: Int, val attributes: Attributes, shape: SourceShape[ByteString]) extends SourceModule[ByteString, Future[IOResult]](shape) { override def create(context: MaterializationContext) = { - val materializer = ActorMaterializer.downcast(context.materializer) + val materializer = ActorMaterializerHelper.downcast(context.materializer) val ioResultPromise = Promise[IOResult]() val pub = try { diff --git a/akka-stream/src/main/scala/akka/stream/impl/io/OutputStreamSourceStage.scala b/akka-stream/src/main/scala/akka/stream/impl/io/OutputStreamSourceStage.scala index 441444af95..21832b4832 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/io/OutputStreamSourceStage.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/io/OutputStreamSourceStage.scala @@ -21,6 +21,7 @@ import akka.stream.impl.Stages.DefaultAttributes.IODispatcher import akka.stream.ActorAttributes.Dispatcher import scala.concurrent.ExecutionContext import akka.stream.ActorMaterializer +import akka.stream.ActorMaterializerHelper private[stream] object OutputStreamSourceStage { sealed trait AdapterToStageMessage @@ -112,7 +113,7 @@ final private[stream] class OutputStreamSourceStage(writeTimeout: FiniteDuration } override def preStart(): Unit = { - dispatcher = ActorMaterializer.downcast(materializer).system.dispatchers.lookup(dispatcherId) + dispatcher = ActorMaterializerHelper.downcast(materializer).system.dispatchers.lookup(dispatcherId) super.preStart() } diff --git a/akka-stream/src/main/scala/akka/stream/stage/GraphStage.scala b/akka-stream/src/main/scala/akka/stream/stage/GraphStage.scala index c6bf767138..cab6d14a26 100644 --- a/akka-stream/src/main/scala/akka/stream/stage/GraphStage.scala +++ b/akka-stream/src/main/scala/akka/stream/stage/GraphStage.scala @@ -900,7 +900,7 @@ abstract class GraphStageLogic private[stream] (val inCount: Int, val outCount: final protected def getStageActor(receive: ((ActorRef, Any)) ⇒ Unit): StageActor = { _stageActor match { case null ⇒ - val actorMaterializer = ActorMaterializer.downcast(interpreter.materializer) + val actorMaterializer = ActorMaterializerHelper.downcast(interpreter.materializer) _stageActor = new StageActor(actorMaterializer, getAsyncCallback, receive) _stageActor case existing ⇒ diff --git a/project/MiMa.scala b/project/MiMa.scala index 9b09a7c6da..a695fb63c2 100644 --- a/project/MiMa.scala +++ b/project/MiMa.scala @@ -873,6 +873,9 @@ object MiMa extends AutoPlugin { ProblemFilters.exclude[DirectMissingMethodProblem]("akka.stream.scaladsl.Framing#LengthFieldFramingStage.onUpstreamFinish"), ProblemFilters.exclude[DirectMissingMethodProblem]("akka.stream.scaladsl.Framing#LengthFieldFramingStage.onPull"), ProblemFilters.exclude[DirectMissingMethodProblem]("akka.stream.scaladsl.Framing#LengthFieldFramingStage.postStop"), + + // #20414 Allow different ActorMaterializer subtypes + ProblemFilters.exclude[DirectMissingMethodProblem]("akka.stream.ActorMaterializer.downcast"), // #20531 adding refuseUid to Gated FilterAnyProblem("akka.remote.EndpointManager$Gated") From 735060da20f9e9a083c63c64b9180a4b8d86f1b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Piotr=20Krzemi=C5=84ski?= Date: Tue, 7 Jun 2016 20:53:22 +0200 Subject: [PATCH 15/85] +doc updated Sphinx install guide (#20743) * updated path to texlive basic 2016 * added few missing packages --- akka-docs/rst/dev/documentation.rst | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/akka-docs/rst/dev/documentation.rst b/akka-docs/rst/dev/documentation.rst index 4f8cd1df23..936da82f96 100644 --- a/akka-docs/rst/dev/documentation.rst +++ b/akka-docs/rst/dev/documentation.rst @@ -116,7 +116,7 @@ Add texlive bin to $PATH: :: - export TEXLIVE_PATH=/usr/local/texlive/2015basic/bin/universal-darwin + export TEXLIVE_PATH=/usr/local/texlive/2016basic/bin/universal-darwin export PATH=$TEXLIVE_PATH:$PATH Add missing tex packages: @@ -131,6 +131,11 @@ Add missing tex packages: sudo tlmgr install helvetic sudo tlmgr install courier sudo tlmgr install multirow + sudo tlmgr install capt-of + sudo tlmgr install needspace + sudo tlmgr install eqparbox + sudo tlmgr install environ + sudo tlmgr install trimspaces If you get the error "unknown locale: UTF-8" when generating the documentation the solution is to define the following environment variables: From 7fdd5983a3d8c034ce6ed2ee9244304e4a663376 Mon Sep 17 00:00:00 2001 From: kwyczesany Date: Tue, 7 Jun 2016 21:02:38 +0200 Subject: [PATCH 16/85] +htp #19756: Add extractData and extractRequestEntity directives. (#20730) * 19756: Add extractData and extractRequestEntity directives. remove unnecessary import * #19756: add documentation to extractDataBytes and extractRequestEntity directives --- .../BasicDirectivesExamplesSpec.scala | 31 +++++++++++++++++++ .../routing-dsl/directives/alphabetically.rst | 2 ++ .../basic-directives/extractDataBytes.rst | 24 ++++++++++++++ .../basic-directives/extractRequestEntity.rst | 25 +++++++++++++++ .../directives/basic-directives/index.rst | 4 +++ .../directives/BasicDirectivesSpec.scala | 27 ++++++++++++++++ .../server/directives/BasicDirectives.scala | 16 +++++++++- .../server/directives/BasicDirectives.scala | 19 ++++++++++++ 8 files changed, 147 insertions(+), 1 deletion(-) create mode 100644 akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractDataBytes.rst create mode 100644 akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractRequestEntity.rst diff --git a/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/BasicDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/BasicDirectivesExamplesSpec.scala index 368a19328e..0b5ff6247e 100644 --- a/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/BasicDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/BasicDirectivesExamplesSpec.scala @@ -795,5 +795,36 @@ class BasicDirectivesExamplesSpec extends RoutingSpec { } //# } + "extractRequestEntity-example" in { + //#extractRequestEntity-example + val route = + extractRequestEntity { entity => + complete(s"Request entity content-type is ${entity.contentType}") + } + + // tests: + val httpEntity = HttpEntity(ContentTypes.`text/plain(UTF-8)`, "req") + Post("/abc", httpEntity) ~> route ~> check { + responseAs[String] shouldEqual s"Request entity content-type is text/plain; charset=UTF-8" + } + //# + } + "extractDataBytes-example" in { + //#extractDataBytes-example + val route = + extractDataBytes { data ⇒ + val sum = data.runFold(0) { (acc, i) ⇒ acc + i.utf8String.toInt } + onSuccess(sum) { s ⇒ + complete(HttpResponse(entity = HttpEntity(s.toString))) + } + } + + // tests: + val dataBytes = Source.fromIterator(() ⇒ Iterator.range(1, 10).map(x ⇒ ByteString(x.toString))) + Post("/abc", HttpEntity(ContentTypes.`text/plain(UTF-8)`, data = dataBytes)) ~> route ~> check { + responseAs[String] shouldEqual "45" + } + //# + } } diff --git a/akka-docs/rst/scala/http/routing-dsl/directives/alphabetically.rst b/akka-docs/rst/scala/http/routing-dsl/directives/alphabetically.rst index 7e718bff58..9093b04a75 100644 --- a/akka-docs/rst/scala/http/routing-dsl/directives/alphabetically.rst +++ b/akka-docs/rst/scala/http/routing-dsl/directives/alphabetically.rst @@ -47,6 +47,7 @@ Directive Description via the ``Accept-Encoding`` header (from a user-defined set) :ref:`-entity-` Extracts the request entity unmarshalled to a given type :ref:`-extract-` Extracts a single value using a ``RequestContext ⇒ T`` function +:ref:`-extractDataBytes-` Extracts the entities data bytes as a stream ``Source[ByteString, Any]`` :ref:`-extractClientIP-` Extracts the client's IP from either the ``X-Forwarded-``, ``Remote-Address`` or ``X-Real-IP`` header :ref:`-extractCredentials-` Extracts the potentially present ``HttpCredentials`` provided with the @@ -58,6 +59,7 @@ Directive Description :ref:`-extractMethod-` Extracts the request method :ref:`-extractRequest-` Extracts the current ``HttpRequest`` instance :ref:`-extractRequestContext-` Extracts the ``RequestContext`` itself +:ref:`-extractRequestEntity-` Extracts the ``RequestEntity`` from the ``RequestContext`` :ref:`-extractScheme-` Extracts the URI scheme from the request :ref:`-extractSettings-` Extracts the ``RoutingSettings`` from the ``RequestContext`` :ref:`-extractUnmatchedPath-` Extracts the yet unmatched path from the ``RequestContext`` diff --git a/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractDataBytes.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractDataBytes.rst new file mode 100644 index 0000000000..5b962b2de5 --- /dev/null +++ b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractDataBytes.rst @@ -0,0 +1,24 @@ +.. _-extractDataBytes-: + +extractDataBytes +================ + +Signature +--------- + +.. includecode2:: /../../akka-http/src/main/scala/akka/http/scaladsl/server/directives/BasicDirectives.scala + :snippet: extractDataBytes + +Description +----------- + +Extracts the entities data bytes as ``Source[ByteString, Any]`` from the :class:`RequestContext`. + +The directive returns a stream containing the request data bytes. + + +Example +------- + +.. includecode2:: ../../../../code/docs/http/scaladsl/server/directives/BasicDirectivesExamplesSpec.scala + :snippet: extractDataBytes-example diff --git a/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractRequestEntity.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractRequestEntity.rst new file mode 100644 index 0000000000..53e142c5ca --- /dev/null +++ b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractRequestEntity.rst @@ -0,0 +1,25 @@ +.. _-extractRequestEntity-: + +extractRequestEntity +==================== + +Signature +--------- + +.. includecode2:: /../../akka-http/src/main/scala/akka/http/scaladsl/server/directives/BasicDirectives.scala + :snippet: extractRequestEntity + +Description +----------- + +Extracts the ``RequestEntity`` from the :class:`RequestContext`. + +The directive returns a ``RequestEntity`` without unmarshalling the request. To extract domain entity, +:ref:`-entity-` should be used. + + +Example +------- + +.. includecode2:: ../../../../code/docs/http/scaladsl/server/directives/BasicDirectivesExamplesSpec.scala + :snippet: extractRequestEntity-example diff --git a/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/index.rst index 2b5c0bd4cd..709f7d7b29 100644 --- a/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/index.rst +++ b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/index.rst @@ -17,11 +17,13 @@ on two axes: a) provide a constant value or extract a value from the ``RequestCo a single value or a tuple of values. * :ref:`-extract-` + * :ref:`-extractDataBytes-` * :ref:`-extractExecutionContext-` * :ref:`-extractMaterializer-` * :ref:`-extractLog-` * :ref:`-extractRequest-` * :ref:`-extractRequestContext-` + * :ref:`-extractRequestEntity-` * :ref:`-extractSettings-` * :ref:`-extractUnmatchedPath-` * :ref:`-extractUri-` @@ -94,10 +96,12 @@ Alphabetically cancelRejections extract extractExecutionContext + extractDataBytes extractMaterializer extractLog extractRequest extractRequestContext + extractRequestEntity extractSettings extractUnmatchedPath extractUri diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/BasicDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/BasicDirectivesSpec.scala index 1bee6147a0..c1e619bb63 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/BasicDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/BasicDirectivesSpec.scala @@ -5,6 +5,10 @@ package akka.http.scaladsl.server package directives +import akka.http.scaladsl.model._ +import akka.stream.scaladsl.Source +import akka.util.ByteString + class BasicDirectivesSpec extends RoutingSpec { "The `mapUnmatchedPath` directive" should { @@ -26,4 +30,27 @@ class BasicDirectivesSpec extends RoutingSpec { } ~> check { responseAs[String] shouldEqual "GET" } } } + + "The `extractDataBytes` directive" should { + "extract stream of ByteString from the RequestContext" in { + val dataBytes = Source.fromIterator(() ⇒ Iterator.range(1, 10).map(x ⇒ ByteString(x.toString))) + Post("/abc", HttpEntity(ContentTypes.`text/plain(UTF-8)`, data = dataBytes)) ~> { + extractDataBytes { data ⇒ + val sum = data.runFold(0) { (acc, i) ⇒ acc + i.utf8String.toInt } + onSuccess(sum) { s ⇒ + complete(HttpResponse(entity = HttpEntity(s.toString))) + } + } + } ~> check { responseAs[String] shouldEqual "45" } + } + } + + "The `extractRequestEntity` directive" should { + "extract entity from the RequestContext" in { + val httpEntity = HttpEntity(ContentTypes.`text/plain(UTF-8)`, "req") + Post("/abc", httpEntity) ~> { + extractRequestEntity { complete(_) } + } ~> check { responseEntity shouldEqual httpEntity } + } + } } \ No newline at end of file diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/directives/BasicDirectives.scala b/akka-http/src/main/scala/akka/http/javadsl/server/directives/BasicDirectives.scala index 291453393b..886223d662 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/directives/BasicDirectives.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/directives/BasicDirectives.scala @@ -10,6 +10,8 @@ import akka.http.impl.util.JavaMapping import akka.http.javadsl.settings.ParserSettings import akka.http.javadsl.settings.RoutingSettings import akka.japi.Util +import akka.stream.javadsl.Source +import akka.util.ByteString import scala.concurrent.ExecutionContextExecutor import akka.http.impl.model.JavaUri @@ -184,7 +186,7 @@ abstract class BasicDirectives { * Extracts the current http request entity. */ @CorrespondsTo("extract") - def extractEntity(inner: java.util.function.Function[RequestEntity, Route]): Route = RouteAdapter { + def extractEntity(inner: JFunction[RequestEntity, Route]): Route = RouteAdapter { D.extractRequest { rq ⇒ inner.apply(rq.entity).delegate } @@ -269,4 +271,16 @@ abstract class BasicDirectives { D.extractRequestContext { ctx ⇒ inner.apply(JavaMapping.toJava(ctx)(server.RoutingJavaMapping.RequestContext)).delegate } } + /** + * Extracts the entities `dataBytes` [[akka.stream.javadsl.Source]] from the [[akka.http.javadsl.server.RequestContext]]. + */ + def extractDataBytes(inner: JFunction[Source[ByteString, Any], Route]) = RouteAdapter { + D.extractRequest { ctx ⇒ inner.apply(ctx.entity.dataBytes.asJava).delegate } + } + + /** + * Extracts the [[akka.http.javadsl.model.RequestEntity]] from the [[akka.http.javadsl.server.RequestContext]]. + */ + def extractRequestEntity(inner: JFunction[RequestEntity, Route]): Route = extractEntity(inner) + } diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/BasicDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/BasicDirectives.scala index 93d4952163..1b5d4fcecc 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/BasicDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/BasicDirectives.scala @@ -5,6 +5,9 @@ package akka.http.scaladsl.server package directives +import akka.stream.scaladsl.Source +import akka.util.ByteString + import scala.concurrent.{ Future, ExecutionContextExecutor } import scala.collection.immutable import akka.event.LoggingAdapter @@ -284,6 +287,20 @@ trait BasicDirectives { * @group basic */ def extractRequestContext: Directive1[RequestContext] = BasicDirectives._extractRequestContext + + /** + * Extracts the [[akka.http.scaladsl.model.RequestEntity]] from the [[akka.http.scaladsl.server.RequestContext]]. + * + * @group basic + */ + def extractRequestEntity: Directive1[RequestEntity] = BasicDirectives._extractRequestEntity + + /** + * Extracts the entities `dataBytes` [[akka.stream.scaladsl.Source]] from the [[akka.http.scaladsl.server.RequestContext]]. + * + * @group basic + */ + def extractDataBytes: Directive1[Source[ByteString, Any]] = BasicDirectives._extractDataBytes } object BasicDirectives extends BasicDirectives { @@ -296,4 +313,6 @@ object BasicDirectives extends BasicDirectives { private val _extractSettings: Directive1[RoutingSettings] = extract(_.settings) private val _extractParserSettings: Directive1[ParserSettings] = extract(_.parserSettings) private val _extractRequestContext: Directive1[RequestContext] = extract(conforms) + private val _extractRequestEntity: Directive1[RequestEntity] = extract(_.request.entity) + private val _extractDataBytes: Directive1[Source[ByteString, Any]] = extract(_.request.entity.dataBytes) } From e40a2b21c4475831d8b58dc3a43fca4696c84139 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20Bud=C5=BAko?= Date: Wed, 8 Jun 2016 15:20:21 +0200 Subject: [PATCH 17/85] +doc #20521 Enrich docs and tests regarding empty strings used in matchers (#20719) --- .../directives/path-directives/path.rst | 4 +++ .../directives/path-directives/pathPrefix.rst | 3 ++ .../directives/path-directives/path.rst | 4 +++ .../directives/path-directives/pathPrefix.rst | 3 ++ .../directives/PathDirectivesSpec.scala | 28 +++++++++++++++++++ 5 files changed, 42 insertions(+) diff --git a/akka-docs/rst/java/http/routing-dsl/directives/path-directives/path.rst b/akka-docs/rst/java/http/routing-dsl/directives/path-directives/path.rst index afbf2475fb..0ee095af1d 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/path-directives/path.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/path-directives/path.rst @@ -24,6 +24,10 @@ a ``path`` directive will always be empty). Depending on the type of its ``PathMatcher`` argument the ``path`` directive extracts zero or more values from the URI. If the match fails the request is rejected with an :ref:`empty rejection set `. +.. note:: The empty string (also called empty word or identity) is a **neutral element** of string concatenation operation, + so it will match everything, but remember that ``path`` requires whole remaining path being matched, so (``/``) will succeed + and (``/whatever``) will fail. The :ref:`-pathPrefix-java-` provides more liberal behaviour. + Example ------- diff --git a/akka-docs/rst/java/http/routing-dsl/directives/path-directives/pathPrefix.rst b/akka-docs/rst/java/http/routing-dsl/directives/path-directives/pathPrefix.rst index 4f8b5bae96..d03e885284 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/path-directives/pathPrefix.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/path-directives/pathPrefix.rst @@ -18,6 +18,9 @@ As opposed to its :ref:`-rawPathPrefix-java-` counterpart ``pathPrefix`` automat Depending on the type of its ``PathMatcher`` argument the ``pathPrefix`` directive extracts zero or more values from the URI. If the match fails the request is rejected with an :ref:`empty rejection set `. +.. note:: The empty string (also called empty word or identity) is a **neutral element** of string concatenation operation, + so it will match everything and consume nothing. The :ref:`-path-java-` provides more strict behaviour. + Example ------- diff --git a/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/path.rst b/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/path.rst index 5507c24199..0beac3c264 100644 --- a/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/path.rst +++ b/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/path.rst @@ -31,6 +31,10 @@ a ``path`` directive will always be empty). Depending on the type of its ``PathMatcher`` argument the ``path`` directive extracts zero or more values from the URI. If the match fails the request is rejected with an :ref:`empty rejection set `. +.. note:: The empty string (also called empty word or identity) is a **neutral element** of string concatenation operation, + so it will match everything, but remember that ``path`` requires whole remaining path being matched, so (``/``) will succeed + and (``/whatever``) will fail. The :ref:`-pathPrefix-` provides more liberal behaviour. + Example ------- diff --git a/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/pathPrefix.rst b/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/pathPrefix.rst index b17476dd75..579ab99d55 100644 --- a/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/pathPrefix.rst +++ b/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/pathPrefix.rst @@ -25,6 +25,9 @@ As opposed to its :ref:`-rawPathPrefix-` counterpart ``pathPrefix`` automaticall Depending on the type of its ``PathMatcher`` argument the ``pathPrefix`` directive extracts zero or more values from the URI. If the match fails the request is rejected with an :ref:`empty rejection set `. +.. note:: The empty string (also called empty word or identity) is a **neutral element** of string concatenation operation, + so it will match everything and consume nothing. The :ref:`-path-` provides more strict behaviour. + Example ------- diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/PathDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/PathDirectivesSpec.scala index 9a5f199e7a..608f8bf37a 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/PathDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/PathDirectivesSpec.scala @@ -22,6 +22,34 @@ class PathDirectivesSpec extends RoutingSpec with Inside { "reject [/foo/]" in test() } + """pathPrefix("")""" should { + val test = testFor(pathPrefix("") { echoUnmatchedPath }) + + // Should match everything because pathPrefix is used and "" is a neutral element. + "accept [/] and clear the unmatchedPath=" in test("") + "accept [/foo] and clear the unmatchedPath" in test("foo") + "accept [/foo/] and clear the unmatchedPath" in test("foo/") + "accept [/bar/] and clear the unmatchedPath" in test("bar/") + } + + """path("" | "foo")""" should { + val test = testFor(path("" | "foo") { echoUnmatchedPath }) + + // Should not match anything apart of "/", because path requires whole path being matched. + "accept [/] and clear the unmatchedPath=" in test("") + "reject [/foo]" in test() + "reject [/foo/]" in test() + "reject [/bar/]" in test() + } + + """path("") ~ path("foo")""" should { + val test = testFor(path("")(echoUnmatchedPath) ~ path("foo")(echoUnmatchedPath)) + + // Should match both because ~ operator is used for two exclusive routes. + "accept [/] and clear the unmatchedPath=" in test("") + "accept [/foo] and clear the unmatchedPath=" in test("") + } + """path("foo" /)""" should { val test = testFor(path("foo" /) { echoUnmatchedPath }) "reject [/foo]" in test() From 16cde39de8c331f85593bebca8d1cb9239f23b8a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johan=20Andr=C3=A9n?= Date: Thu, 9 Jun 2016 14:58:32 +0200 Subject: [PATCH 18/85] Better recovery timeout for persistent actors #20738 --- .../scala/akka/persistence/Eventsourced.scala | 54 ++++++++++++----- .../PersistentActorRecoveryTimeoutSpec.scala | 60 +++++++++++++++++++ 2 files changed, 99 insertions(+), 15 deletions(-) diff --git a/akka-persistence/src/main/scala/akka/persistence/Eventsourced.scala b/akka-persistence/src/main/scala/akka/persistence/Eventsourced.scala index 2208a638bc..dd6fd10ad1 100644 --- a/akka-persistence/src/main/scala/akka/persistence/Eventsourced.scala +++ b/akka-persistence/src/main/scala/akka/persistence/Eventsourced.scala @@ -31,6 +31,9 @@ private[persistence] object Eventsourced { private final case class StashingHandlerInvocation(evt: Any, handler: Any ⇒ Unit) extends PendingHandlerInvocation /** does not force the actor to stash commands; Originates from either `persistAsync` or `defer` calls */ private final case class AsyncHandlerInvocation(evt: Any, handler: Any ⇒ Unit) extends PendingHandlerInvocation + + /** message used to detect that recovery timed out */ + private final case class RecoveryTick(snapshot: Boolean) } /** @@ -463,9 +466,12 @@ private[persistence] trait Eventsourced extends Snapshotter with PersistenceStas */ private def recoveryStarted(replayMax: Long) = new State { - // protect against replay stalling forever because of journal overloaded and such - private val previousRecieveTimeout = context.receiveTimeout - context.setReceiveTimeout(extension.journalConfigFor(journalPluginId).getMillisDuration("recovery-event-timeout")) + // protect against snapshot stalling forever because of journal overloaded and such + val timeout = extension.journalConfigFor(journalPluginId).getMillisDuration("recovery-event-timeout") + val timeoutCancellable = { + import context.dispatcher + context.system.scheduler.scheduleOnce(timeout, self, RecoveryTick(snapshot = true)) + } private val recoveryBehavior: Receive = { val _receiveRecover = receiveRecover @@ -486,19 +492,22 @@ private[persistence] trait Eventsourced extends Snapshotter with PersistenceStas override def stateReceive(receive: Receive, message: Any) = message match { case LoadSnapshotResult(sso, toSnr) ⇒ + timeoutCancellable.cancel() sso.foreach { case SelectedSnapshot(metadata, snapshot) ⇒ setLastSequenceNr(metadata.sequenceNr) // Since we are recovering we can ignore the receive behavior from the stack Eventsourced.super.aroundReceive(recoveryBehavior, SnapshotOffer(metadata, snapshot)) } - changeState(recovering(recoveryBehavior, previousRecieveTimeout)) + changeState(recovering(recoveryBehavior, timeout)) journal ! ReplayMessages(lastSequenceNr + 1L, toSnr, replayMax, persistenceId, self) - case ReceiveTimeout ⇒ + + case RecoveryTick(true) ⇒ try onRecoveryFailure( - new RecoveryTimedOut(s"Recovery timed out, didn't get snapshot within ${context.receiveTimeout.toSeconds}s"), + new RecoveryTimedOut(s"Recovery timed out, didn't get snapshot within $timeout s"), event = None) finally context.stop(self) + case other ⇒ stashInternally(other) } @@ -514,8 +523,16 @@ private[persistence] trait Eventsourced extends Snapshotter with PersistenceStas * * All incoming messages are stashed. */ - private def recovering(recoveryBehavior: Receive, previousReceiveTimeout: Duration) = + private def recovering(recoveryBehavior: Receive, timeout: FiniteDuration) = new State { + + // protect against snapshot stalling forever because of journal overloaded and such + val timeoutCancellable = { + import context.dispatcher + context.system.scheduler.schedule(timeout, timeout, self, RecoveryTick(snapshot = false)) + } + var eventSeenInInterval = false + override def toString: String = "replay started" override def recoveryRunning: Boolean = true @@ -523,14 +540,16 @@ private[persistence] trait Eventsourced extends Snapshotter with PersistenceStas override def stateReceive(receive: Receive, message: Any) = message match { case ReplayedMessage(p) ⇒ try { + eventSeenInInterval = true updateLastSequenceNr(p) Eventsourced.super.aroundReceive(recoveryBehavior, p) } catch { case NonFatal(t) ⇒ + timeoutCancellable.cancel() try onRecoveryFailure(t, Some(p.payload)) finally context.stop(self) } case RecoverySuccess(highestSeqNr) ⇒ - resetRecieveTimeout() + timeoutCancellable.cancel() onReplaySuccess() // callback for subclass implementation changeState(processingCommands) sequenceNr = highestSeqNr @@ -538,20 +557,21 @@ private[persistence] trait Eventsourced extends Snapshotter with PersistenceStas internalStash.unstashAll() Eventsourced.super.aroundReceive(recoveryBehavior, RecoveryCompleted) case ReplayMessagesFailure(cause) ⇒ - resetRecieveTimeout() + timeoutCancellable.cancel() try onRecoveryFailure(cause, event = None) finally context.stop(self) - case ReceiveTimeout ⇒ + case RecoveryTick(false) if !eventSeenInInterval ⇒ + timeoutCancellable.cancel() try onRecoveryFailure( - new RecoveryTimedOut(s"Recovery timed out, didn't get event within ${context.receiveTimeout.toSeconds}s, highest sequence number seen ${sequenceNr}"), + new RecoveryTimedOut(s"Recovery timed out, didn't get event within $timeout s, highest sequence number seen $sequenceNr"), event = None) finally context.stop(self) + case RecoveryTick(false) ⇒ + eventSeenInInterval = false + case RecoveryTick(true) ⇒ + // snapshot tick, ignore case other ⇒ stashInternally(other) } - - private def resetRecieveTimeout(): Unit = { - context.setReceiveTimeout(previousReceiveTimeout) - } } private def flushBatch() { @@ -615,6 +635,10 @@ private[persistence] trait Eventsourced extends Snapshotter with PersistenceStas case WriteMessagesFailed(_) ⇒ writeInProgress = false () // it will be stopped by the first WriteMessageFailure message + + case _: RecoveryTick => + // we may have one of these in the mailbox before the scheduled timeout + // is cancelled when recovery has completed, just consume it so the concrete actor never sees it } def onWriteMessageComplete(err: Boolean): Unit diff --git a/akka-persistence/src/test/scala/akka/persistence/PersistentActorRecoveryTimeoutSpec.scala b/akka-persistence/src/test/scala/akka/persistence/PersistentActorRecoveryTimeoutSpec.scala index 5f5c7e964b..c4443831ad 100644 --- a/akka-persistence/src/test/scala/akka/persistence/PersistentActorRecoveryTimeoutSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/PersistentActorRecoveryTimeoutSpec.scala @@ -32,6 +32,28 @@ object PersistentActorRecoveryTimeoutSpec { } } + class TestReceiveTimeoutActor(receiveTimeout: FiniteDuration, probe: ActorRef) extends NamedPersistentActor("recovery-timeout-actor-2") { + + override def preStart(): Unit = { + context.setReceiveTimeout(receiveTimeout) + } + + override def receiveRecover: Receive = { + case RecoveryCompleted ⇒ probe ! context.receiveTimeout + case _ ⇒ // we don't care + } + + override def receiveCommand: Receive = { + case x ⇒ persist(x) { _ ⇒ + sender() ! x + } + } + + override protected def onRecoveryFailure(cause: Throwable, event: Option[Any]): Unit = { + probe ! Failure(cause) + } + } + } class PersistentActorRecoveryTimeoutSpec extends AkkaSpec(PersistentActorRecoveryTimeoutSpec.config) with ImplicitSender { @@ -69,6 +91,44 @@ class PersistentActorRecoveryTimeoutSpec extends AkkaSpec(PersistentActorRecover probe.expectMsgType[Failure].cause shouldBe a[RecoveryTimedOut] expectTerminated(replaying) + // avoid having it stuck in the next test from the + // last read request above + SteppingInmemJournal.step(journal) + } + + "should not interfere with receive timeouts" in { + val timeout = 42.days + + val probe = TestProbe() + val persisting = system.actorOf(Props(classOf[PersistentActorRecoveryTimeoutSpec.TestReceiveTimeoutActor], timeout, probe.ref)) + + awaitAssert(SteppingInmemJournal.getRef(journalId), 3.seconds) + val journal = SteppingInmemJournal.getRef(journalId) + + // initial read highest + SteppingInmemJournal.step(journal) + + persisting ! "A" + SteppingInmemJournal.step(journal) + expectMsg("A") + + watch(persisting) + system.stop(persisting) + expectTerminated(persisting) + + // now replay, but don't give the journal any tokens to replay events + // so that we cause the timeout to trigger + val replaying = system.actorOf(Props(classOf[PersistentActorRecoveryTimeoutSpec.TestReceiveTimeoutActor], timeout, probe.ref)) + + // initial read highest + SteppingInmemJournal.step(journal) + + // read journal + SteppingInmemJournal.step(journal) + + // we should get initial receive timeout back from actor when replay completes + probe.expectMsg(timeout) + } } From efb18c95b169a37531bd5590e314cee5325eb81e Mon Sep 17 00:00:00 2001 From: Hawstein Date: Thu, 9 Jun 2016 21:34:25 +0800 Subject: [PATCH 19/85] example snippet for akka http java dsl: FileAndResourceDirectives (#20758) --- ...FileAndResourceDirectivesExamplesTest.java | 124 ++++++++++++++++++ .../getFromBrowseableDirectories.rst | 3 +- .../getFromBrowseableDirectory.rst | 3 +- .../getFromDirectory.rst | 3 +- .../getFromFile.rst | 3 +- .../getFromResource.rst | 3 +- .../getFromResourceDirectory.rst | 3 +- .../listDirectoryContents.rst | 3 +- ...ileAndResourceDirectivesExamplesSpec.scala | 5 +- .../scaladsl/server/RequestContextImpl.scala | 8 +- .../akka/http/scaladsl/server/Route.scala | 8 +- .../MultipartUnmarshallers.scala | 4 +- 12 files changed, 151 insertions(+), 19 deletions(-) create mode 100644 akka-docs/rst/java/code/docs/http/javadsl/server/directives/FileAndResourceDirectivesExamplesTest.java diff --git a/akka-docs/rst/java/code/docs/http/javadsl/server/directives/FileAndResourceDirectivesExamplesTest.java b/akka-docs/rst/java/code/docs/http/javadsl/server/directives/FileAndResourceDirectivesExamplesTest.java new file mode 100644 index 0000000000..b9b0da0ebc --- /dev/null +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/directives/FileAndResourceDirectivesExamplesTest.java @@ -0,0 +1,124 @@ +/* + * Copyright (C) 2016-2016 Lightbend Inc. + */ +package docs.http.javadsl.server.directives; + +import akka.http.javadsl.model.HttpRequest; +import akka.http.javadsl.model.StatusCodes; +import akka.http.javadsl.server.PathMatchers; +import akka.http.javadsl.server.Route; +import akka.http.javadsl.server.directives.DirectoryRenderer; +import akka.http.javadsl.testkit.JUnitRouteTest; +import org.junit.Ignore; +import org.junit.Test; +import scala.NotImplementedError; + +import static akka.http.javadsl.server.PathMatchers.segment; + +public class FileAndResourceDirectivesExamplesTest extends JUnitRouteTest { + + @Ignore("Compile only test") + @Test + public void testGetFromFile() { + //#getFromFile + final Route route = path(PathMatchers.segment("logs").slash(segment()), name -> + getFromFile(name + ".log") + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/logs/example")) + .assertEntity("example file contents"); + //#getFromFile + } + + @Ignore("Compile only test") + @Test + public void testGetFromResource() { + //#getFromResource + final Route route = path(PathMatchers.segment("logs").slash(segment()), name -> + getFromResource(name + ".log") + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/logs/example")) + .assertEntity("example file contents"); + //#getFromResource + } + + @Ignore("Compile only test") + @Test + public void testListDirectoryContents() { + //#listDirectoryContents + final Route route = route( + path("tmp", () -> listDirectoryContents("/tmp")), + path("custom", () -> { + // implement your custom renderer here + final DirectoryRenderer renderer = renderVanityFooter -> { + throw new NotImplementedError(); + }; + return listDirectoryContents(renderer, "/tmp"); + }) + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/logs/example")) + .assertEntity("example file contents"); + //#listDirectoryContents + } + + @Ignore("Compile only test") + @Test + public void testGetFromBrowseableDirectory() { + //#getFromBrowseableDirectory + final Route route = path("tmp", () -> + getFromBrowseableDirectory("/tmp") + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/tmp")) + .assertStatusCode(StatusCodes.OK); + //#getFromBrowseableDirectory + } + + @Ignore("Compile only test") + @Test + public void testGetFromBrowseableDirectories() { + //#getFromBrowseableDirectories + final Route route = path("tmp", () -> + getFromBrowseableDirectories("/main", "/backups") + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/tmp")) + .assertStatusCode(StatusCodes.OK); + //#getFromBrowseableDirectories + } + + @Ignore("Compile only test") + @Test + public void testGetFromDirectory() { + //#getFromDirectory + final Route route = pathPrefix("tmp", () -> + getFromDirectory("/tmp") + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/tmp/example")) + .assertEntity("example file contents"); + //#getFromDirectory + } + + @Ignore("Compile only test") + @Test + public void testGetFromResourceDirectory() { + //#getFromResourceDirectory + final Route route = pathPrefix("examples", () -> + getFromResourceDirectory("/examples") + ); + + // tests: + testRoute(route).run(HttpRequest.GET("/examples/example-1")) + .assertEntity("example file contents"); + //#getFromResourceDirectory + } +} diff --git a/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromBrowseableDirectories.rst b/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromBrowseableDirectories.rst index 502e30a32d..0aed8331c3 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromBrowseableDirectories.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromBrowseableDirectories.rst @@ -19,4 +19,5 @@ For more details refer to :ref:`-getFromBrowseableDirectory-java-`. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/FileAndResourceDirectivesExamplesTest.java#getFromBrowseableDirectories diff --git a/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromBrowseableDirectory.rst b/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromBrowseableDirectory.rst index 0523adb48e..72c4ae7d97 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromBrowseableDirectory.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromBrowseableDirectory.rst @@ -19,7 +19,8 @@ For more details refer to :ref:`-getFromBrowseableDirectory-java-`. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/FileAndResourceDirectivesExamplesTest.java#getFromBrowseableDirectory Default file listing page example diff --git a/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromDirectory.rst b/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromDirectory.rst index 1459b17392..7fe40d9675 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromDirectory.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromDirectory.rst @@ -27,4 +27,5 @@ Note that it's not required to wrap this directive with ``get`` as this directiv Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/FileAndResourceDirectivesExamplesTest.java#getFromDirectory diff --git a/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromFile.rst b/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromFile.rst index 81d26733be..5042cc5749 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromFile.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromFile.rst @@ -27,4 +27,5 @@ Note that it's not required to wrap this directive with ``get`` as this directiv Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/FileAndResourceDirectivesExamplesTest.java#getFromFile diff --git a/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromResource.rst b/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromResource.rst index 17754ec360..d7032776df 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromResource.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromResource.rst @@ -15,4 +15,5 @@ Note that it's not required to wrap this directive with ``get`` as this directiv Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/FileAndResourceDirectivesExamplesTest.java#getFromResource diff --git a/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromResourceDirectory.rst b/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromResourceDirectory.rst index 32d8369cae..1e56be9cff 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromResourceDirectory.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/getFromResourceDirectory.rst @@ -15,4 +15,5 @@ Note that it's not required to wrap this directive with ``get`` as this directiv Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/FileAndResourceDirectivesExamplesTest.java#getFromResourceDirectory diff --git a/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/listDirectoryContents.rst b/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/listDirectoryContents.rst index b0b0de9455..d8e58f51e3 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/listDirectoryContents.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/file-and-resource-directives/listDirectoryContents.rst @@ -20,4 +20,5 @@ Note that it's not required to wrap this directive with ``get`` as this directiv Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/FileAndResourceDirectivesExamplesTest.java#listDirectoryContents diff --git a/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/FileAndResourceDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/FileAndResourceDirectivesExamplesSpec.scala index f22a8e2bd4..53bee2bb06 100644 --- a/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/FileAndResourceDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/FileAndResourceDirectivesExamplesSpec.scala @@ -18,7 +18,7 @@ class FileAndResourceDirectivesExamplesSpec extends RoutingSpec { val route = path("logs" / Segment) { name => - getFromFile(".log") // uses implicit ContentTypeResolver + getFromFile(s"$name.log") // uses implicit ContentTypeResolver } // tests: @@ -32,7 +32,7 @@ class FileAndResourceDirectivesExamplesSpec extends RoutingSpec { val route = path("logs" / Segment) { name => - getFromResource(".log") // uses implicit ContentTypeResolver + getFromResource(s"$name.log") // uses implicit ContentTypeResolver } // tests: @@ -46,6 +46,7 @@ class FileAndResourceDirectivesExamplesSpec extends RoutingSpec { listDirectoryContents("/tmp") } ~ path("custom") { + // implement your custom renderer here val renderer = new DirectoryRenderer { override def marshaller(renderVanityFooter: Boolean): ToEntityMarshaller[DirectoryListing] = ??? } diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContextImpl.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContextImpl.scala index b477b3858d..c1c008647f 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContextImpl.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContextImpl.scala @@ -4,11 +4,11 @@ package akka.http.scaladsl.server -import scala.concurrent.{ExecutionContextExecutor, Future} -import akka.stream.{ActorMaterializer, ActorMaterializerHelper, Materializer} +import scala.concurrent.{ ExecutionContextExecutor, Future } +import akka.stream.{ ActorMaterializer, ActorMaterializerHelper, Materializer } import akka.event.LoggingAdapter -import akka.http.scaladsl.settings.{ParserSettings, RoutingSettings} -import akka.http.scaladsl.marshalling.{Marshal, ToResponseMarshallable} +import akka.http.scaladsl.settings.{ ParserSettings, RoutingSettings } +import akka.http.scaladsl.marshalling.{ Marshal, ToResponseMarshallable } import akka.http.scaladsl.model._ import akka.http.scaladsl.util.FastFuture import akka.http.scaladsl.util.FastFuture._ diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/Route.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/Route.scala index d1de8d5a44..e60a109578 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/Route.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/Route.scala @@ -5,12 +5,12 @@ package akka.http.scaladsl.server import akka.NotUsed -import akka.http.scaladsl.settings.{ParserSettings, RoutingSettings} -import akka.stream.{ActorMaterializer, ActorMaterializerHelper, Materializer} +import akka.http.scaladsl.settings.{ ParserSettings, RoutingSettings } +import akka.stream.{ ActorMaterializer, ActorMaterializerHelper, Materializer } -import scala.concurrent.{ExecutionContextExecutor, Future} +import scala.concurrent.{ ExecutionContextExecutor, Future } import akka.stream.scaladsl.Flow -import akka.http.scaladsl.model.{HttpRequest, HttpResponse} +import akka.http.scaladsl.model.{ HttpRequest, HttpResponse } import akka.http.scaladsl.util.FastFuture._ object Route { diff --git a/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/MultipartUnmarshallers.scala b/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/MultipartUnmarshallers.scala index 8bf35e5656..380f50fdd6 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/MultipartUnmarshallers.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/MultipartUnmarshallers.scala @@ -9,8 +9,8 @@ import akka.http.scaladsl.settings.ParserSettings import scala.collection.immutable import scala.collection.immutable.VectorBuilder import akka.util.ByteString -import akka.event.{LoggingAdapter, NoLogging} -import akka.stream.{ActorMaterializer, ActorMaterializerHelper} +import akka.event.{ LoggingAdapter, NoLogging } +import akka.stream.{ ActorMaterializer, ActorMaterializerHelper } import akka.stream.impl.fusing.IteratorInterpreter import akka.stream.scaladsl._ import akka.http.impl.engine.parsing.BodyPartParser From b7fdcb06c4292cd66cad91b48f243a79cdd64b7c Mon Sep 17 00:00:00 2001 From: Michal Sitko Date: Thu, 9 Jun 2016 16:02:27 +0200 Subject: [PATCH 20/85] +doc add depenedency note to docs #20732 (#20761) --- akka-docs/rst/java/http/routing-dsl/testkit.rst | 2 +- akka-docs/rst/scala/http/routing-dsl/testkit.rst | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/akka-docs/rst/java/http/routing-dsl/testkit.rst b/akka-docs/rst/java/http/routing-dsl/testkit.rst index 4eddb2a2f8..ad1bac69cf 100644 --- a/akka-docs/rst/java/http/routing-dsl/testkit.rst +++ b/akka-docs/rst/java/http/routing-dsl/testkit.rst @@ -9,7 +9,7 @@ response properties in a compact way. To use the testkit you need to take these steps: -* add a dependency to the ``akka-http-testkit-experimental`` module +* add a dependency to the ``akka-http-testkit`` module * derive the test class from ``JUnitRouteTest`` * wrap the route under test with ``RouteTest.testRoute`` to create a ``TestRoute`` * run requests against the route using ``TestRoute.run(request)`` which will return diff --git a/akka-docs/rst/scala/http/routing-dsl/testkit.rst b/akka-docs/rst/scala/http/routing-dsl/testkit.rst index af52e695d1..35b90a1f80 100644 --- a/akka-docs/rst/scala/http/routing-dsl/testkit.rst +++ b/akka-docs/rst/scala/http/routing-dsl/testkit.rst @@ -4,6 +4,9 @@ Route TestKit One of Akka HTTP's design goals is good testability of the created services. For services built with the Routing DSL Akka HTTP provides a dedicated testkit that makes efficient testing of route logic easy and convenient. This "route test DSL" is made available with the *akka-http-testkit* module. +To use it include the following dependency:: + + "com.typesafe.akka" %% "akka-http-testkit" % "@version@" Usage From 9ffdf81507726c912f08fcbd1f00bc43386a80f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Piotr=20Krzemi=C5=84ski?= Date: Fri, 10 Jun 2016 00:00:27 +0200 Subject: [PATCH 21/85] +htc #20683 discardEntityBytes() combinator for draining entity stream (#20707) --- .../akka/http/javadsl/model/HttpMessage.java | 28 ++++++++ .../akka/http/javadsl/model/HttpRequest.java | 5 ++ .../akka/http/javadsl/model/HttpResponse.java | 7 +- .../http/scaladsl/model/HttpMessage.scala | 38 +++++++++- .../model/EntityDrainingTestCases.java | 71 +++++++++++++++++++ .../scaladsl/model/EntityDrainingSpec.scala | 62 ++++++++++++++++ .../http/scaladsl/model/HttpMessageSpec.scala | 4 ++ project/MiMa.scala | 8 ++- 8 files changed, 217 insertions(+), 6 deletions(-) create mode 100644 akka-http-core/src/test/java/akka/http/javadsl/model/EntityDrainingTestCases.java create mode 100644 akka-http-core/src/test/scala/akka/http/scaladsl/model/EntityDrainingSpec.scala diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMessage.java b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMessage.java index 71b873ce81..545dbd238f 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMessage.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMessage.java @@ -4,10 +4,15 @@ package akka.http.javadsl.model; +import akka.Done; +import akka.stream.Materializer; import akka.util.ByteString; +import scala.concurrent.Future; + import java.io.File; import java.nio.file.Path; import java.util.Optional; +import java.util.concurrent.CompletionStage; /** * The base type for an Http message (request or response). @@ -55,6 +60,29 @@ public interface HttpMessage { */ ResponseEntity entity(); + /** + * Drains entity stream of this message + */ + DiscardedEntity discardEntityBytes(Materializer materializer); + + /** + * Represents the the currently being-drained HTTP Entity which triggers completion of the contained + * Future once the entity has been drained for the given HttpMessage completely. + */ + public interface DiscardedEntity { + /** + * This future completes successfully once the underlying entity stream has been + * successfully drained (and fails otherwise). + */ + Future future(); + + /** + * This future completes successfully once the underlying entity stream has been + * successfully drained (and fails otherwise). + */ + CompletionStage completionStage(); + } + public static interface MessageTransformations { /** * Returns a copy of this message with a new protocol. diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpRequest.java b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpRequest.java index 99288cae1f..b22acb29ca 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpRequest.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpRequest.java @@ -4,7 +4,12 @@ package akka.http.javadsl.model; +import akka.Done; import akka.http.impl.util.JavaAccessors; +import akka.stream.Materializer; +import akka.stream.javadsl.Sink; + +import java.util.concurrent.CompletionStage; /** * Represents an Http request. diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpResponse.java b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpResponse.java index d5ffeb6aca..11d5dd45fb 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpResponse.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpResponse.java @@ -4,7 +4,12 @@ package akka.http.javadsl.model; +import akka.Done; import akka.http.impl.util.JavaAccessors; +import akka.stream.Materializer; +import akka.stream.javadsl.Sink; + +import java.util.concurrent.CompletionStage; /** * Represents an Http response. @@ -16,7 +21,7 @@ public abstract class HttpResponse implements HttpMessage, HttpMessage.MessageTr public abstract StatusCode status(); /** - * Returns the entity of this request. + * Returns the entity of this response. */ public abstract ResponseEntity entity(); diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMessage.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMessage.scala index 65347d068d..66808d21bb 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMessage.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMessage.scala @@ -8,18 +8,22 @@ import java.io.File import java.nio.file.Path import java.lang.{ Iterable ⇒ JIterable } import java.util.Optional +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters import scala.concurrent.duration.FiniteDuration -import scala.concurrent.{ Future, ExecutionContext } +import scala.concurrent.{ ExecutionContext, Future } import scala.collection.immutable import scala.compat.java8.OptionConverters._ -import scala.reflect.{ classTag, ClassTag } +import scala.reflect.{ ClassTag, classTag } +import akka.Done import akka.parboiled2.CharUtils import akka.stream.Materializer -import akka.util.{ HashCode, ByteString } +import akka.util.{ ByteString, HashCode } import akka.http.impl.util._ import akka.http.javadsl.{ model ⇒ jm } import akka.http.scaladsl.util.FastFuture._ +import akka.stream.scaladsl.Sink import headers._ import akka.http.impl.util.JavaMapping.Implicits._ @@ -37,6 +41,10 @@ sealed trait HttpMessage extends jm.HttpMessage { def entity: ResponseEntity def protocol: HttpProtocol + /** Drains entity stream */ + def discardEntityBytes(mat: Materializer): HttpMessage.DiscardedEntity = + new HttpMessage.DiscardedEntity(entity.dataBytes.runWith(Sink.ignore)(mat)) + /** Returns a copy of this message with the list of headers set to the given ones. */ def withHeaders(headers: HttpHeader*): Self = withHeaders(headers.toList) @@ -139,6 +147,30 @@ object HttpMessage { case HttpProtocols.`HTTP/1.1` ⇒ connectionHeader.isDefined && connectionHeader.get.hasClose case HttpProtocols.`HTTP/1.0` ⇒ connectionHeader.isEmpty || !connectionHeader.get.hasKeepAlive } + + /** + * Represents the the currently being-drained HTTP Entity which triggers completion of the contained + * Future once the entity has been drained for the given HttpMessage completely. + */ + final class DiscardedEntity(f: Future[Done]) extends akka.http.javadsl.model.HttpMessage.DiscardedEntity { + /** + * This future completes successfully once the underlying entity stream has been + * successfully drained (and fails otherwise). + */ + def future: Future[Done] = f + + /** + * This future completes successfully once the underlying entity stream has been + * successfully drained (and fails otherwise). + */ + def completionStage: CompletionStage[Done] = FutureConverters.toJava(f) + } + + implicit final class HttpMessageDiscardEntity(val httpMessage: HttpMessage) extends AnyVal { + /** Drains entity stream of this message */ + def discardEntityBytes()(implicit mat: Materializer): HttpMessage.DiscardedEntity = + httpMessage.discardEntityBytes(mat) + } } /** diff --git a/akka-http-core/src/test/java/akka/http/javadsl/model/EntityDrainingTestCases.java b/akka-http-core/src/test/java/akka/http/javadsl/model/EntityDrainingTestCases.java new file mode 100644 index 0000000000..94478f6fd5 --- /dev/null +++ b/akka-http-core/src/test/java/akka/http/javadsl/model/EntityDrainingTestCases.java @@ -0,0 +1,71 @@ +/** + * Copyright (C) 2009-2016 Lightbend Inc. + */ + +package akka.http.javadsl.model; + +import akka.Done; +import akka.actor.ActorSystem; +import akka.japi.function.Procedure; +import akka.stream.ActorMaterializer; +import akka.stream.javadsl.Sink; +import akka.stream.javadsl.Source; +import akka.util.ByteString; +import org.junit.Test; +import org.scalatest.junit.JUnitSuite; + +import scala.util.Try; + +import java.util.Arrays; +import java.util.concurrent.CompletableFuture; + +import static org.junit.Assert.assertEquals; + +public class EntityDrainingTestCases extends JUnitSuite { + + private ActorSystem sys = ActorSystem.create("test"); + private ActorMaterializer mat = ActorMaterializer.create(sys); + private Iterable testData = Arrays.asList(ByteString.fromString("abc"), ByteString.fromString("def")); + + @Test + public void testHttpRequestDrainEntity() { + + CompletableFuture f = new CompletableFuture<>(); + Source s = Source.from(testData).alsoTo(Sink.onComplete(completeDone(f))); + + RequestEntity reqEntity = HttpEntities.create(ContentTypes.TEXT_PLAIN_UTF8, s); + HttpRequest req = HttpRequest.create().withEntity(reqEntity); + + HttpMessage.DiscardedEntity de = req.discardEntityBytes(mat); + + assertEquals(Done.getInstance(), f.join()); + assertEquals(Done.getInstance(), de.completionStage().toCompletableFuture().join()); + } + + @Test + public void testHttpResponseDrainEntity() { + + CompletableFuture f = new CompletableFuture<>(); + Source s = Source.from(testData).alsoTo(Sink.onComplete(completeDone(f))); + + ResponseEntity respEntity = HttpEntities.create(ContentTypes.TEXT_PLAIN_UTF8, s); + HttpResponse resp = HttpResponse.create().withEntity(respEntity); + + HttpMessage.DiscardedEntity de = resp.discardEntityBytes(mat); + + assertEquals(Done.getInstance(), f.join()); + assertEquals(Done.getInstance(), de.completionStage().toCompletableFuture().join()); + } + + private Procedure> completeDone(CompletableFuture p) { + return new Procedure>() { + @Override + public void apply(Try t) throws Exception { + if(t.isSuccess()) + p.complete(Done.getInstance()); + else + p.completeExceptionally(t.failed().get()); + } + }; + } +} diff --git a/akka-http-core/src/test/scala/akka/http/scaladsl/model/EntityDrainingSpec.scala b/akka-http-core/src/test/scala/akka/http/scaladsl/model/EntityDrainingSpec.scala new file mode 100644 index 0000000000..47d5cf6af0 --- /dev/null +++ b/akka-http-core/src/test/scala/akka/http/scaladsl/model/EntityDrainingSpec.scala @@ -0,0 +1,62 @@ +/** + * Copyright (C) 2009-2016 Lightbend Inc. + */ + +package akka.http.scaladsl.model + +import java.util.concurrent.CompletableFuture + +import akka.Done +import akka.actor.ActorSystem +import akka.japi.function +import akka.stream.ActorMaterializer +import akka.stream.scaladsl._ +import akka.util.ByteString +import org.scalatest.concurrent.ScalaFutures._ +import org.scalatest.{ Matchers, WordSpec } + +import scala.concurrent.Promise +import scala.util.{ Failure, Success, Try } + +class EntityDrainingSpec extends WordSpec with Matchers { + + implicit val sys = ActorSystem("test") + implicit val mat = ActorMaterializer() + + val testData = Vector.tabulate(200)(i ⇒ ByteString(s"row-$i")) + + "HttpRequest" should { + + "drain entity stream after .discardEntityBytes() call" in { + + val p = Promise[Done]() + val s = Source + .fromIterator[ByteString](() ⇒ testData.iterator) + .alsoTo(Sink.onComplete(t ⇒ p.complete(t))) + + val req = HttpRequest(entity = HttpEntity(ContentTypes.`text/csv(UTF-8)`, s)) + val de = req.discardEntityBytes() + + p.future.futureValue should ===(Done) + de.future.futureValue should ===(Done) + } + } + + "HttpResponse" should { + + "drain entity stream after .discardEntityBytes() call" in { + + val p = Promise[Done]() + val s = Source + .fromIterator[ByteString](() ⇒ testData.iterator) + .alsoTo(Sink.onComplete(t ⇒ p.complete(t))) + + val resp = HttpResponse(entity = HttpEntity(ContentTypes.`text/csv(UTF-8)`, s)) + val de = resp.discardEntityBytes() + + p.future.futureValue should ===(Done) + de.future.futureValue should ===(Done) + } + } + +} diff --git a/akka-http-core/src/test/scala/akka/http/scaladsl/model/HttpMessageSpec.scala b/akka-http-core/src/test/scala/akka/http/scaladsl/model/HttpMessageSpec.scala index 53eab46770..28c2c5a2f6 100644 --- a/akka-http-core/src/test/scala/akka/http/scaladsl/model/HttpMessageSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/scaladsl/model/HttpMessageSpec.scala @@ -1,3 +1,7 @@ +/** + * Copyright (C) 2009-2016 Lightbend Inc. + */ + package akka.http.scaladsl.model import headers.Host diff --git a/project/MiMa.scala b/project/MiMa.scala index a695fb63c2..14226d8b02 100644 --- a/project/MiMa.scala +++ b/project/MiMa.scala @@ -878,8 +878,12 @@ object MiMa extends AutoPlugin { ProblemFilters.exclude[DirectMissingMethodProblem]("akka.stream.ActorMaterializer.downcast"), // #20531 adding refuseUid to Gated - FilterAnyProblem("akka.remote.EndpointManager$Gated") - ) + FilterAnyProblem("akka.remote.EndpointManager$Gated"), + + // #20683 + ProblemFilters.exclude[ReversedMissingMethodProblem]("akka.http.javadsl.model.HttpMessage.discardEntityBytes"), + ProblemFilters.exclude[ReversedMissingMethodProblem]("akka.http.scaladsl.model.HttpMessage.discardEntityBytes") + ) ) } } From 95cfbda0303426057d935723b0f7aae16466cf0e Mon Sep 17 00:00:00 2001 From: Bernard Leach Date: Fri, 10 Jun 2016 16:21:53 +1000 Subject: [PATCH 22/85] =htc migrate BodyPartRenderer to GraphStage #20288 --- .../engine/rendering/BodyPartRenderer.scala | 79 +++++++++++-------- .../akka/http/scaladsl/model/Multipart.scala | 2 +- .../http/scaladsl/model/MultipartSpec.scala | 17 +++- project/MiMa.scala | 7 +- 4 files changed, 67 insertions(+), 38 deletions(-) diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/rendering/BodyPartRenderer.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/rendering/BodyPartRenderer.scala index 4cfae5850d..0ee220a276 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/rendering/BodyPartRenderer.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/rendering/BodyPartRenderer.scala @@ -17,6 +17,7 @@ import akka.stream.scaladsl.Source import akka.stream.stage._ import akka.util.ByteString import HttpEntity._ +import akka.stream.{ Attributes, FlowShape, Inlet, Outlet } import scala.concurrent.forkjoin.ThreadLocalRandom @@ -29,46 +30,60 @@ private[http] object BodyPartRenderer { boundary: String, nioCharset: Charset, partHeadersSizeHint: Int, - log: LoggingAdapter): PushPullStage[Multipart.BodyPart, Source[ChunkStreamPart, Any]] = - new PushPullStage[Multipart.BodyPart, Source[ChunkStreamPart, Any]] { + log: LoggingAdapter): GraphStage[FlowShape[Multipart.BodyPart, Source[ChunkStreamPart, Any]]] = + new GraphStage[FlowShape[Multipart.BodyPart, Source[ChunkStreamPart, Any]]] { var firstBoundaryRendered = false - override def onPush(bodyPart: Multipart.BodyPart, ctx: Context[Source[ChunkStreamPart, Any]]): SyncDirective = { - val r = new CustomCharsetByteStringRendering(nioCharset, partHeadersSizeHint) + val in: Inlet[Multipart.BodyPart] = Inlet("BodyPartRenderer.in") + val out: Outlet[Source[ChunkStreamPart, Any]] = Outlet("BodyPartRenderer.out") + override val shape: FlowShape[Multipart.BodyPart, Source[ChunkStreamPart, Any]] = FlowShape(in, out) - def bodyPartChunks(data: Source[ByteString, Any]): Source[ChunkStreamPart, Any] = { - val entityChunks = data.map[ChunkStreamPart](Chunk(_)) - (chunkStream(r.get) ++ entityChunks).mapMaterializedValue((_) ⇒ ()) - } + override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = + new GraphStageLogic(shape) with InHandler with OutHandler { + override def onPush(): Unit = { + val r = new CustomCharsetByteStringRendering(nioCharset, partHeadersSizeHint) - def completePartRendering(): Source[ChunkStreamPart, Any] = - bodyPart.entity match { - case x if x.isKnownEmpty ⇒ chunkStream(r.get) - case Strict(_, data) ⇒ chunkStream((r ~~ data).get) - case Default(_, _, data) ⇒ bodyPartChunks(data) - case IndefiniteLength(_, data) ⇒ bodyPartChunks(data) + def bodyPartChunks(data: Source[ByteString, Any]): Source[ChunkStreamPart, Any] = { + val entityChunks = data.map[ChunkStreamPart](Chunk(_)) + (chunkStream(r.get) ++ entityChunks).mapMaterializedValue((_) ⇒ ()) + } + + def completePartRendering(entity: HttpEntity): Source[ChunkStreamPart, Any] = + entity match { + case x if x.isKnownEmpty ⇒ chunkStream(r.get) + case Strict(_, data) ⇒ chunkStream((r ~~ data).get) + case Default(_, _, data) ⇒ bodyPartChunks(data) + case IndefiniteLength(_, data) ⇒ bodyPartChunks(data) + } + + renderBoundary(r, boundary, suppressInitialCrLf = !firstBoundaryRendered) + firstBoundaryRendered = true + + val bodyPart = grab(in) + renderEntityContentType(r, bodyPart.entity) + renderHeaders(r, bodyPart.headers, log) + + push(out, completePartRendering(bodyPart.entity)) } - renderBoundary(r, boundary, suppressInitialCrLf = !firstBoundaryRendered) - firstBoundaryRendered = true - renderEntityContentType(r, bodyPart.entity) - renderHeaders(r, bodyPart.headers, log) - ctx.push(completePartRendering()) - } + override def onPull(): Unit = + if (isClosed(in) && firstBoundaryRendered) + completeRendering() + else if (isClosed(in)) completeStage() + else pull(in) - override def onPull(ctx: Context[Source[ChunkStreamPart, Any]]): SyncDirective = { - val finishing = ctx.isFinishing - if (finishing && firstBoundaryRendered) { - val r = new ByteStringRendering(boundary.length + 4) - renderFinalBoundary(r, boundary) - ctx.pushAndFinish(chunkStream(r.get)) - } else if (finishing) - ctx.finish() - else - ctx.pull() - } + override def onUpstreamFinish(): Unit = + if (isAvailable(out) && firstBoundaryRendered) completeRendering() - override def onUpstreamFinish(ctx: Context[Source[ChunkStreamPart, Any]]): TerminationDirective = ctx.absorbTermination() + private def completeRendering(): Unit = { + val r = new ByteStringRendering(boundary.length + 4) + renderFinalBoundary(r, boundary) + push(out, chunkStream(r.get)) + completeStage() + } + + setHandlers(in, out, this) + } private def chunkStream(byteString: ByteString): Source[ChunkStreamPart, Any] = Source.single(Chunk(byteString)) diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/Multipart.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/Multipart.scala index 7275ff8dab..63029fbaa2 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/Multipart.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/Multipart.scala @@ -61,7 +61,7 @@ sealed trait Multipart extends jm.Multipart { boundary: String = BodyPartRenderer.randomBoundary())(implicit log: LoggingAdapter = NoLogging): MessageEntity = { val chunks = parts - .transform(() ⇒ BodyPartRenderer.streamed(boundary, charset.nioCharset, partHeadersSizeHint = 128, log)) + .via(BodyPartRenderer.streamed(boundary, charset.nioCharset, partHeadersSizeHint = 128, log)) .flatMapConcat(ConstantFun.scalaIdentityFunction) HttpEntity.Chunked(mediaType withBoundary boundary withCharset charset, chunks) } diff --git a/akka-http-core/src/test/scala/akka/http/scaladsl/model/MultipartSpec.scala b/akka-http-core/src/test/scala/akka/http/scaladsl/model/MultipartSpec.scala index a37f21fcc7..0882582ef5 100644 --- a/akka-http-core/src/test/scala/akka/http/scaladsl/model/MultipartSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/scaladsl/model/MultipartSpec.scala @@ -4,12 +4,13 @@ package akka.http.scaladsl.model -import com.typesafe.config.{ Config, ConfigFactory } +import com.typesafe.config.{Config, ConfigFactory} + import scala.concurrent.Await import scala.concurrent.duration._ -import org.scalatest.{ BeforeAndAfterAll, Inside, Matchers, WordSpec } +import org.scalatest.{BeforeAndAfterAll, Inside, Matchers, WordSpec} import akka.stream.ActorMaterializer -import akka.stream.scaladsl.Source +import akka.stream.scaladsl.{Sink, Source} import akka.util.ByteString import akka.actor.ActorSystem import headers._ @@ -34,6 +35,16 @@ class MultipartSpec extends WordSpec with Matchers with Inside with BeforeAndAft MediaTypes.`multipart/mixed`, Multipart.General.BodyPart.Strict(HttpEntity("data"), List(ETag("xzy")))) } + + "support `toEntity`" in { + val streamed = Multipart.General( + MediaTypes.`multipart/mixed`, + Source(Multipart.General.BodyPart(defaultEntity("data"), List(ETag("xzy"))) :: Nil)) + val result = streamed.toEntity(boundary = "boundary") + result.contentType shouldBe MediaTypes.`multipart/mixed`.withBoundary("boundary").withCharset(HttpCharsets.`UTF-8`) + val encoding = Await.result(result.dataBytes.runWith(Sink.seq), 1.second) + encoding .map(_.utf8String).mkString shouldBe "--boundary\r\nContent-Type: text/plain; charset=UTF-8\r\nETag: \"xzy\"\r\n\r\ndata\r\n--boundary--" + } } "Multipart.FormData" should { diff --git a/project/MiMa.scala b/project/MiMa.scala index 14226d8b02..0366eba2d9 100644 --- a/project/MiMa.scala +++ b/project/MiMa.scala @@ -882,8 +882,11 @@ object MiMa extends AutoPlugin { // #20683 ProblemFilters.exclude[ReversedMissingMethodProblem]("akka.http.javadsl.model.HttpMessage.discardEntityBytes"), - ProblemFilters.exclude[ReversedMissingMethodProblem]("akka.http.scaladsl.model.HttpMessage.discardEntityBytes") - ) + ProblemFilters.exclude[ReversedMissingMethodProblem]("akka.http.scaladsl.model.HttpMessage.discardEntityBytes"), + + // #20288 migrate BodyPartRenderer to GraphStage + ProblemFilters.exclude[IncompatibleResultTypeProblem]("akka.http.impl.engine.rendering.BodyPartRenderer.streamed") + ) ) } } From a7451fd888068ea3726f599bb920e53f5aaad70a Mon Sep 17 00:00:00 2001 From: miaoqian Date: Fri, 10 Jun 2016 19:46:59 +0800 Subject: [PATCH 23/85] =str ActorPublisher mustn't to signal onSubscribe on the given subscriber more than once. (#20733) --- .../akka/stream/actor/ActorPublisherSpec.scala | 13 ++++++++++++- .../scala/akka/stream/actor/ActorPublisher.scala | 13 ++++++------- 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/akka-stream-tests/src/test/scala/akka/stream/actor/ActorPublisherSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/actor/ActorPublisherSpec.scala index 65ef8830f3..f357fcd0ee 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/actor/ActorPublisherSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/actor/ActorPublisherSpec.scala @@ -8,6 +8,7 @@ import akka.stream.{ ClosedShape, ActorMaterializer, ActorMaterializerSettings, import akka.stream.scaladsl._ import akka.stream.testkit._ import akka.stream.testkit.Utils._ +import akka.stream.impl.ReactiveStreamsCompliance import akka.testkit.TestEvent.Mute import akka.testkit.{ AkkaSpec, EventFilter, ImplicitSender, TestProbe } import scala.annotation.tailrec @@ -317,7 +318,17 @@ class ActorPublisherSpec extends AkkaSpec(ActorPublisherSpec.config) with Implic s.expectSubscription() val s2 = TestSubscriber.manualProbe[String]() ActorPublisher[String](ref).subscribe(s2) - s2.expectSubscriptionAndError().getClass should be(classOf[IllegalStateException]) + s2.expectSubscriptionAndError().getMessage should be(s"ActorPublisher ${ReactiveStreamsCompliance.SupportsOnlyASingleSubscriber}") + } + + "can not subscribe the same subscriber multiple times" in { + val probe = TestProbe() + val ref = system.actorOf(testPublisherProps(probe.ref)) + val s = TestSubscriber.manualProbe[String]() + ActorPublisher[String](ref).subscribe(s) + s.expectSubscription() + ActorPublisher[String](ref).subscribe(s) + s.expectError().getMessage should be(ReactiveStreamsCompliance.CanNotSubscribeTheSameSubscriberMultipleTimes) } "signal onCompete when actor is stopped" in { diff --git a/akka-stream/src/main/scala/akka/stream/actor/ActorPublisher.scala b/akka-stream/src/main/scala/akka/stream/actor/ActorPublisher.scala index 7eec474a83..cff0ec6bdb 100644 --- a/akka-stream/src/main/scala/akka/stream/actor/ActorPublisher.scala +++ b/akka-stream/src/main/scala/akka/stream/actor/ActorPublisher.scala @@ -183,7 +183,7 @@ trait ActorPublisher[T] extends Actor { * otherwise `onNext` will throw `IllegalStateException`. */ def onNext(element: T): Unit = lifecycleState match { - case Active | PreSubscriber | CompleteThenStop ⇒ + case Active | PreSubscriber ⇒ if (demand > 0) { demand -= 1 tryOnNext(subscriber, element) @@ -192,7 +192,7 @@ trait ActorPublisher[T] extends Actor { "onNext is not allowed when the stream has not requested elements, totalDemand was 0") case _: ErrorEmitted ⇒ throw new IllegalStateException("onNext must not be called after onError") - case Completed ⇒ + case Completed | CompleteThenStop ⇒ throw new IllegalStateException("onNext must not be called after onComplete") case Canceled ⇒ // drop } @@ -302,11 +302,10 @@ trait ActorPublisher[T] extends Actor { tryOnSubscribe(sub, CancelledSubscription) tryOnComplete(sub) case Active | Canceled ⇒ - tryOnSubscribe(sub, CancelledSubscription) - tryOnError( - sub, - if (subscriber == sub) ReactiveStreamsCompliance.canNotSubscribeTheSameSubscriberMultipleTimesException - else ReactiveStreamsCompliance.canNotSubscribeTheSameSubscriberMultipleTimesException) + if (subscriber eq sub) + rejectDuplicateSubscriber(sub) + else + rejectAdditionalSubscriber(sub, "ActorPublisher") } case Cancel ⇒ From 071b64809f9d451fdb443cd56da5075a4aca4e50 Mon Sep 17 00:00:00 2001 From: Denis Rosca Date: Fri, 10 Jun 2016 21:57:31 +0300 Subject: [PATCH 24/85] Warning for actors with value class arguments Update documentation to specify that value class arguments are not supported for Prop creation using the recommended classOf[] approach. Issue: #20735, #16444 --- akka-docs/rst/scala/actors.rst | 17 +++++++++++++++++ .../scala/code/docs/actor/ActorDocSpec.scala | 15 ++++++++++++++- 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/akka-docs/rst/scala/actors.rst b/akka-docs/rst/scala/actors.rst index 8417e10e5c..7f306ef338 100644 --- a/akka-docs/rst/scala/actors.rst +++ b/akka-docs/rst/scala/actors.rst @@ -81,6 +81,11 @@ verified during construction of the :class:`Props` object, resulting in an :class:`IllegalArgumentException` if no or multiple matching constructors are found. +.. note:: + + The recommended approach to create the actor :class:`Props` is not supported + for cases when the actor constructor takes value classes as arguments. + Dangerous Variants ^^^^^^^^^^^^^^^^^^ @@ -162,6 +167,18 @@ another child to the same parent an :class:`InvalidActorNameException` is thrown Actors are automatically started asynchronously when created. +Value classes as constructor arguments +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The recommended way to instantiate actor props uses reflection at runtime +to determine the correct actor constructor to be invoked and due to technical +limitations is not supported when said constructor takes arguments that are +value classes. +In these cases you should either unpack the arguments or create the props by +calling the constructor manually: + +.. includecode:: code/docs/actor/ActorDocSpec.scala#actor-with-value-class-argument + Dependency Injection -------------------- diff --git a/akka-docs/rst/scala/code/docs/actor/ActorDocSpec.scala b/akka-docs/rst/scala/code/docs/actor/ActorDocSpec.scala index 50c7adc846..0048f25058 100644 --- a/akka-docs/rst/scala/code/docs/actor/ActorDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/actor/ActorDocSpec.scala @@ -50,6 +50,19 @@ class ActorWithArgs(arg: String) extends Actor { def receive = { case _ => () } } +//#actor-with-value-class-argument +class Argument(val value: String) extends AnyVal +class ValueClassActor(arg: Argument) extends Actor { + def receive = {case _ => () } +} + +object ValueClassActor { + def props1(arg: Argument) = Props(classOf[ValueClassActor], arg) // fails at runtime + def props2(arg: Argument) = Props(classOf[ValueClassActor], arg.value) // ok + def props3(arg: Argument) = Props(new ValueClassActor(arg)) // ok +} +//#actor-with-value-class-argument + class DemoActorWrapper extends Actor { //#props-factory object DemoActor { @@ -312,7 +325,7 @@ class ActorDocSpec extends AkkaSpec(""" val props1 = Props[MyActor] val props2 = Props(new ActorWithArgs("arg")) // careful, see below - val props3 = Props(classOf[ActorWithArgs], "arg") + val props3 = Props(classOf[ActorWithArgs], "arg") // no support for value class arguments //#creating-props //#creating-props-deprecated From bf746431e03fbc631a935d063c5818ca1beff7eb Mon Sep 17 00:00:00 2001 From: Jan Ypma Date: Thu, 9 Jun 2016 12:41:36 +0200 Subject: [PATCH 25/85] Allow Java API to create actor publishers with stash --- .../akka/stream/actor/ActorPublisher.scala | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/akka-stream/src/main/scala/akka/stream/actor/ActorPublisher.scala b/akka-stream/src/main/scala/akka/stream/actor/ActorPublisher.scala index ee930b4b24..1ba9acdf90 100644 --- a/akka-stream/src/main/scala/akka/stream/actor/ActorPublisher.scala +++ b/akka-stream/src/main/scala/akka/stream/actor/ActorPublisher.scala @@ -469,3 +469,24 @@ object AbstractActorPublisher { * @see [[akka.stream.actor.ActorPublisher]] */ abstract class AbstractActorPublisher[T] extends AbstractActor with ActorPublisher[T] + +/** + * Java API compatible with lambda expressions. + * This class adds a Stash to {@link AbstractActorPublisher}. + * @see [[akka.stream.actor.ActorPublisher]] and [[akka.stream.actor.AbstractActorWithStash]] + */ +abstract class AbstractActorPublisherWithStash[T] extends AbstractActor with ActorPublisher[T] with Stash + +/** + * Java API compatible with lambda expressions. + * This class adds an unbounded Stash to {@link AbstractActorPublisher}. + * @see [[akka.stream.actor.ActorPublisher]] and [[akka.stream.actor.AbstractActorWithUnboundedStash]] + */ +abstract class AbstractActorPublisherWithUnboundedStash[T] extends AbstractActor with ActorPublisher[T] with UnboundedStash + +/** + * Java API compatible with lambda expressions. + * This class adds an unrestricted Stash to {@link AbstractActorPublisher}. + * @see [[akka.stream.actor.ActorPublisher]] and [[akka.stream.actor.AbstractActorWithUnrestrictedStash]] + */ +abstract class AbstractActorPublisherWithUnrestrictedStash[T] extends AbstractActor with ActorPublisher[T] with UnrestrictedStash From 374a8525531d55a4361253b55eaefc126845382c Mon Sep 17 00:00:00 2001 From: Konrad Malawski Date: Mon, 13 Jun 2016 14:11:09 +0200 Subject: [PATCH 26/85] =htc #20683 added more docs, documented expected double-run behaviour (#20765) --- .../akka/http/javadsl/model/HttpMessage.java | 20 ++++- .../akka/http/impl/util/StreamUtils.scala | 2 +- .../http/scaladsl/model/HttpMessage.scala | 20 ++++- ...stCases.java => EntityDiscardingTest.java} | 6 +- .../scaladsl/model/EntityDiscardingSpec.scala | 81 +++++++++++++++++++ .../scaladsl/model/EntityDrainingSpec.scala | 62 -------------- 6 files changed, 120 insertions(+), 71 deletions(-) rename akka-http-core/src/test/java/akka/http/javadsl/model/{EntityDrainingTestCases.java => EntityDiscardingTest.java} (93%) create mode 100644 akka-http-core/src/test/scala/akka/http/scaladsl/model/EntityDiscardingSpec.scala delete mode 100644 akka-http-core/src/test/scala/akka/http/scaladsl/model/EntityDrainingSpec.scala diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMessage.java b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMessage.java index 545dbd238f..aac8d8d3d8 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMessage.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMessage.java @@ -61,7 +61,21 @@ public interface HttpMessage { ResponseEntity entity(); /** - * Drains entity stream of this message + * Discards the entities data bytes by running the {@code dataBytes} Source contained by the {@code entity} + * of this HTTP message. + * + * Note: It is crucial that entities are either discarded, or consumed by running the underlying [[Source]] + * as otherwise the lack of consuming of the data will trigger back-pressure to the underlying TCP connection + * (as designed), however possibly leading to an idle-timeout that will close the connection, instead of + * just having ignored the data. + * + * Warning: It is not allowed to discard and/or consume the the {@code entity.dataBytes} more than once + * as the stream is directly attached to the "live" incoming data source from the underlying TCP connection. + * Allowing it to be consumable twice would require buffering the incoming data, thus defeating the purpose + * of its streaming nature. If the dataBytes source is materialized a second time, it will fail with an + * "stream can cannot be materialized more than once" exception. + * + * In future versions, more automatic ways to warn or resolve these situations may be introduced, see issue #18716. */ DiscardedEntity discardEntityBytes(Materializer materializer); @@ -69,7 +83,7 @@ public interface HttpMessage { * Represents the the currently being-drained HTTP Entity which triggers completion of the contained * Future once the entity has been drained for the given HttpMessage completely. */ - public interface DiscardedEntity { + interface DiscardedEntity { /** * This future completes successfully once the underlying entity stream has been * successfully drained (and fails otherwise). @@ -83,7 +97,7 @@ public interface HttpMessage { CompletionStage completionStage(); } - public static interface MessageTransformations { + interface MessageTransformations { /** * Returns a copy of this message with a new protocol. */ diff --git a/akka-http-core/src/main/scala/akka/http/impl/util/StreamUtils.scala b/akka-http-core/src/main/scala/akka/http/impl/util/StreamUtils.scala index b2cea22a33..1881ebab24 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/util/StreamUtils.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/util/StreamUtils.scala @@ -59,7 +59,7 @@ private[http] object StreamUtils { override def onPull(): Unit = pull(in) override def onUpstreamFailure(ex: Throwable): Unit = { - promise.failure(ex) + promise.tryFailure(ex) failStage(ex) } diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMessage.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMessage.scala index 66808d21bb..d8d87357db 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMessage.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMessage.scala @@ -166,8 +166,24 @@ object HttpMessage { def completionStage: CompletionStage[Done] = FutureConverters.toJava(f) } - implicit final class HttpMessageDiscardEntity(val httpMessage: HttpMessage) extends AnyVal { - /** Drains entity stream of this message */ + /** Adds Scala DSL idiomatic methods to [[HttpMessage]], e.g. versions of methods with an implicit [[Materializer]]. */ + implicit final class HttpMessageScalaDSLSugar(val httpMessage: HttpMessage) extends AnyVal { + /** + * Discards the entities data bytes by running the `dataBytes` Source contained by the `entity` of this HTTP message. + * + * Note: It is crucial that entities are either discarded, or consumed by running the underlying [[akka.stream.scaladsl.Source]] + * as otherwise the lack of consuming of the data will trigger back-pressure to the underlying TCP connection + * (as designed), however possibly leading to an idle-timeout that will close the connection, instead of + * just having ignored the data. + * + * Warning: It is not allowed to discard and/or consume the the `entity.dataBytes` more than once + * as the stream is directly attached to the "live" incoming data source from the underlying TCP connection. + * Allowing it to be consumable twice would require buffering the incoming data, thus defeating the purpose + * of its streaming nature. If the dataBytes source is materialized a second time, it will fail with an + * "stream can cannot be materialized more than once" exception. + * + * In future versions, more automatic ways to warn or resolve these situations may be introduced, see issue #18716. + */ def discardEntityBytes()(implicit mat: Materializer): HttpMessage.DiscardedEntity = httpMessage.discardEntityBytes(mat) } diff --git a/akka-http-core/src/test/java/akka/http/javadsl/model/EntityDrainingTestCases.java b/akka-http-core/src/test/java/akka/http/javadsl/model/EntityDiscardingTest.java similarity index 93% rename from akka-http-core/src/test/java/akka/http/javadsl/model/EntityDrainingTestCases.java rename to akka-http-core/src/test/java/akka/http/javadsl/model/EntityDiscardingTest.java index 94478f6fd5..fbdf6add6a 100644 --- a/akka-http-core/src/test/java/akka/http/javadsl/model/EntityDrainingTestCases.java +++ b/akka-http-core/src/test/java/akka/http/javadsl/model/EntityDiscardingTest.java @@ -21,14 +21,14 @@ import java.util.concurrent.CompletableFuture; import static org.junit.Assert.assertEquals; -public class EntityDrainingTestCases extends JUnitSuite { +public class EntityDiscardingTest extends JUnitSuite { private ActorSystem sys = ActorSystem.create("test"); private ActorMaterializer mat = ActorMaterializer.create(sys); private Iterable testData = Arrays.asList(ByteString.fromString("abc"), ByteString.fromString("def")); @Test - public void testHttpRequestDrainEntity() { + public void testHttpRequestDiscardEntity() { CompletableFuture f = new CompletableFuture<>(); Source s = Source.from(testData).alsoTo(Sink.onComplete(completeDone(f))); @@ -43,7 +43,7 @@ public class EntityDrainingTestCases extends JUnitSuite { } @Test - public void testHttpResponseDrainEntity() { + public void testHttpResponseDiscardEntity() { CompletableFuture f = new CompletableFuture<>(); Source s = Source.from(testData).alsoTo(Sink.onComplete(completeDone(f))); diff --git a/akka-http-core/src/test/scala/akka/http/scaladsl/model/EntityDiscardingSpec.scala b/akka-http-core/src/test/scala/akka/http/scaladsl/model/EntityDiscardingSpec.scala new file mode 100644 index 0000000000..6730d315c3 --- /dev/null +++ b/akka-http-core/src/test/scala/akka/http/scaladsl/model/EntityDiscardingSpec.scala @@ -0,0 +1,81 @@ +/** + * Copyright (C) 2009-2016 Lightbend Inc. + */ + +package akka.http.scaladsl.model + +import akka.Done +import akka.http.scaladsl.model.HttpEntity.Chunked +import akka.http.scaladsl.{ Http, TestUtils } +import akka.stream.ActorMaterializer +import akka.stream.scaladsl._ +import akka.testkit.AkkaSpec +import scala.concurrent.duration._ +import akka.util.ByteString + +import scala.concurrent.{ Await, Promise } + +class EntityDiscardingSpec extends AkkaSpec { + + implicit val mat = ActorMaterializer() + + val testData = Vector.tabulate(200)(i ⇒ ByteString(s"row-$i")) + + "HttpRequest" should { + + "discard entity stream after .discardEntityBytes() call" in { + + val p = Promise[Done]() + val s = Source + .fromIterator[ByteString](() ⇒ testData.iterator) + .alsoTo(Sink.onComplete(t ⇒ p.complete(t))) + + val req = HttpRequest(entity = HttpEntity(ContentTypes.`text/csv(UTF-8)`, s)) + val de = req.discardEntityBytes() + + p.future.futureValue should ===(Done) + de.future.futureValue should ===(Done) + } + } + + "HttpResponse" should { + + "discard entity stream after .discardEntityBytes() call" in { + + val p = Promise[Done]() + val s = Source + .fromIterator[ByteString](() ⇒ testData.iterator) + .alsoTo(Sink.onComplete(t ⇒ p.complete(t))) + + val resp = HttpResponse(entity = HttpEntity(ContentTypes.`text/csv(UTF-8)`, s)) + val de = resp.discardEntityBytes() + + p.future.futureValue should ===(Done) + de.future.futureValue should ===(Done) + } + + // TODO consider improving this by storing a mutable "already materialized" flag somewhere + // TODO likely this is going to inter-op with the auto-draining as described in #18716 + "should not allow draining a second time" in { + val (_, host, port) = TestUtils.temporaryServerHostnameAndPort() + val bound = Http().bindAndHandleSync( + req ⇒ + HttpResponse(entity = HttpEntity( + ContentTypes.`text/csv(UTF-8)`, Source.fromIterator[ByteString](() ⇒ testData.iterator))), + host, port).futureValue + + try { + + val response = Http().singleRequest(HttpRequest(uri = s"http://$host:$port/")).futureValue + + val de = response.discardEntityBytes() + de.future.futureValue should ===(Done) + + val de2 = response.discardEntityBytes() + val secondRunException = intercept[IllegalStateException] { Await.result(de2.future, 3.seconds) } + secondRunException.getMessage should include("Source cannot be materialized more than once") + } finally bound.unbind().futureValue + } + } + +} diff --git a/akka-http-core/src/test/scala/akka/http/scaladsl/model/EntityDrainingSpec.scala b/akka-http-core/src/test/scala/akka/http/scaladsl/model/EntityDrainingSpec.scala deleted file mode 100644 index 47d5cf6af0..0000000000 --- a/akka-http-core/src/test/scala/akka/http/scaladsl/model/EntityDrainingSpec.scala +++ /dev/null @@ -1,62 +0,0 @@ -/** - * Copyright (C) 2009-2016 Lightbend Inc. - */ - -package akka.http.scaladsl.model - -import java.util.concurrent.CompletableFuture - -import akka.Done -import akka.actor.ActorSystem -import akka.japi.function -import akka.stream.ActorMaterializer -import akka.stream.scaladsl._ -import akka.util.ByteString -import org.scalatest.concurrent.ScalaFutures._ -import org.scalatest.{ Matchers, WordSpec } - -import scala.concurrent.Promise -import scala.util.{ Failure, Success, Try } - -class EntityDrainingSpec extends WordSpec with Matchers { - - implicit val sys = ActorSystem("test") - implicit val mat = ActorMaterializer() - - val testData = Vector.tabulate(200)(i ⇒ ByteString(s"row-$i")) - - "HttpRequest" should { - - "drain entity stream after .discardEntityBytes() call" in { - - val p = Promise[Done]() - val s = Source - .fromIterator[ByteString](() ⇒ testData.iterator) - .alsoTo(Sink.onComplete(t ⇒ p.complete(t))) - - val req = HttpRequest(entity = HttpEntity(ContentTypes.`text/csv(UTF-8)`, s)) - val de = req.discardEntityBytes() - - p.future.futureValue should ===(Done) - de.future.futureValue should ===(Done) - } - } - - "HttpResponse" should { - - "drain entity stream after .discardEntityBytes() call" in { - - val p = Promise[Done]() - val s = Source - .fromIterator[ByteString](() ⇒ testData.iterator) - .alsoTo(Sink.onComplete(t ⇒ p.complete(t))) - - val resp = HttpResponse(entity = HttpEntity(ContentTypes.`text/csv(UTF-8)`, s)) - val de = resp.discardEntityBytes() - - p.future.futureValue should ===(Done) - de.future.futureValue should ===(Done) - } - } - -} From f0e9de4e567ed4ab6a72f5a4b97132980a3821a4 Mon Sep 17 00:00:00 2001 From: "Richard S. Imaoka" Date: Mon, 13 Jun 2016 23:13:22 +0900 Subject: [PATCH 27/85] Fix CircuitBreaker Open state's remainingTimeout() method (#20029) --- akka-actor/src/main/scala/akka/pattern/CircuitBreaker.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/akka-actor/src/main/scala/akka/pattern/CircuitBreaker.scala b/akka-actor/src/main/scala/akka/pattern/CircuitBreaker.scala index 9e0a52c2ce..5f34ac2e5a 100644 --- a/akka-actor/src/main/scala/akka/pattern/CircuitBreaker.scala +++ b/akka-actor/src/main/scala/akka/pattern/CircuitBreaker.scala @@ -464,7 +464,8 @@ class CircuitBreaker(scheduler: Scheduler, maxFailures: Int, callTimeout: Finite * @return duration to when the breaker will attempt a reset by transitioning to half-open */ private def remainingDuration(): FiniteDuration = { - val diff = System.nanoTime() - get + val fromOpened = System.nanoTime() - get + val diff = resetTimeout.toNanos - fromOpened if (diff <= 0L) Duration.Zero else diff.nanos } From 5899658416e2b9ea426556dde58441c8b338a7ab Mon Sep 17 00:00:00 2001 From: Konrad Malawski Date: Mon, 13 Jun 2016 18:36:40 +0200 Subject: [PATCH 28/85] =str #20214 remove debug println in TLSActor (#20769) --- akka-stream/src/main/scala/akka/stream/impl/io/TLSActor.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/akka-stream/src/main/scala/akka/stream/impl/io/TLSActor.scala b/akka-stream/src/main/scala/akka/stream/impl/io/TLSActor.scala index 3addd75cd6..9ae2505744 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/io/TLSActor.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/io/TLSActor.scala @@ -477,7 +477,6 @@ private[akka] class TLSActor( // see here: https://docs.oracle.com/javase/8/docs/technotes/guides/security/jsse/JSSERefGuide.html#SNIExamples // resolves: https://github.com/akka/akka/issues/19287 private def applySNI(params: NegotiateNewSession): Unit = { - println("sslConfig.config.loose.disableSNI = " + sslConfig.config.loose.disableSNI) for { sslParams ← params.sslParameters (hostname, _) ← hostInfo From f246c560873a17bfca47ece00f50cc50957f1d14 Mon Sep 17 00:00:00 2001 From: Chris Birchall Date: Sat, 18 Jun 2016 11:15:17 +0200 Subject: [PATCH 29/85] +doc Add more imports to the stream quick start guides for Java and Scala (#20797) * Add more imports to the stream quick start guides. This makes it easier for people to execute the code samples while they read through the guide. * Change line endings to be consistent with other files For some reason these 2 files had CR+LF line endings. --- .../code/docs/stream/QuickStartDocTest.java | 24 +- .../rst/java/stream/stream-quickstart.rst | 670 +++++++++--------- .../code/docs/stream/QuickStartDocSpec.scala | 13 +- .../rst/scala/stream/stream-quickstart.rst | 662 ++++++++--------- 4 files changed, 692 insertions(+), 677 deletions(-) diff --git a/akka-docs/rst/java/code/docs/stream/QuickStartDocTest.java b/akka-docs/rst/java/code/docs/stream/QuickStartDocTest.java index c738a9439c..c3e0395742 100644 --- a/akka-docs/rst/java/code/docs/stream/QuickStartDocTest.java +++ b/akka-docs/rst/java/code/docs/stream/QuickStartDocTest.java @@ -3,23 +3,27 @@ */ package docs.stream; +//#stream-imports +import akka.stream.*; +import akka.stream.javadsl.*; +//#stream-imports + +//#other-imports +import akka.Done; +import akka.NotUsed; +import akka.actor.ActorSystem; +import akka.util.ByteString; + import java.nio.file.Paths; import java.math.BigInteger; import java.util.concurrent.CompletionStage; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import org.junit.*; - -import akka.Done; -import akka.NotUsed; -import akka.actor.ActorSystem; -//#imports -import akka.stream.*; -import akka.stream.javadsl.*; -//#imports -import akka.util.ByteString; import scala.concurrent.duration.Duration; +//#other-imports + +import org.junit.*; /** * This class is not meant to be run as a test in the test suite, but it diff --git a/akka-docs/rst/java/stream/stream-quickstart.rst b/akka-docs/rst/java/stream/stream-quickstart.rst index fef64e9c55..12d9016502 100644 --- a/akka-docs/rst/java/stream/stream-quickstart.rst +++ b/akka-docs/rst/java/stream/stream-quickstart.rst @@ -1,333 +1,337 @@ -.. _stream-quickstart-java: - -Quick Start Guide -================= - -A stream usually begins at a source, so this is also how we start an Akka -Stream. Before we create one, we import the full complement of streaming tools: - -.. includecode:: ../code/docs/stream/QuickStartDocTest.java#imports - -Now we will start with a rather simple source, emitting the integers 1 to 100: - -.. includecode:: ../code/docs/stream/QuickStartDocTest.java#create-source - -The :class:`Source` type is parameterized with two types: the first one is the -type of element that this source emits and the second one may signal that -running the source produces some auxiliary value (e.g. a network source may -provide information about the bound port or the peer’s address). Where no -auxiliary information is produced, the type ``akka.NotUsed`` is used—and a -simple range of integers surely falls into this category. - -Having created this source means that we have a description of how to emit the -first 100 natural numbers, but this source is not yet active. In order to get -those numbers out we have to run it: - -.. includecode:: ../code/docs/stream/QuickStartDocTest.java#run-source - -This line will complement the source with a consumer function—in this example -we simply print out the numbers to the console—and pass this little stream -setup to an Actor that runs it. This activation is signaled by having “run” be -part of the method name; there are other methods that run Akka Streams, and -they all follow this pattern. - -You may wonder where the Actor gets created that runs the stream, and you are -probably also asking yourself what this ``materializer`` means. In order to get -this value we first need to create an Actor system: - -.. includecode:: ../code/docs/stream/QuickStartDocTest.java#create-materializer - -There are other ways to create a materializer, e.g. from an -:class:`ActorContext` when using streams from within Actors. The -:class:`Materializer` is a factory for stream execution engines, it is the -thing that makes streams run—you don’t need to worry about any of the details -just now apart from that you need one for calling any of the ``run`` methods on -a :class:`Source`. - -The nice thing about Akka Streams is that the :class:`Source` is just a -description of what you want to run, and like an architect’s blueprint it can -be reused, incorporated into a larger design. We may choose to transform the -source of integers and write it to a file instead: - -.. includecode:: ../code/docs/stream/QuickStartDocTest.java#transform-source - -First we use the ``scan`` combinator to run a computation over the whole -stream: starting with the number 1 (``BigInteger.ONE``) we multiple by each of -the incoming numbers, one after the other; the scan operationemits the initial -value and then every calculation result. This yields the series of factorial -numbers which we stash away as a :class:`Source` for later reuse—it is -important to keep in mind that nothing is actually computed yet, this is just a -description of what we want to have computed once we run the stream. Then we -convert the resulting series of numbers into a stream of :class:`ByteString` -objects describing lines in a text file. This stream is then run by attaching a -file as the receiver of the data. In the terminology of Akka Streams this is -called a :class:`Sink`. :class:`IOResult` is a type that IO operations return -in Akka Streams in order to tell you how many bytes or elements were consumed -and whether the stream terminated normally or exceptionally. - -Reusable Pieces ---------------- - -One of the nice parts of Akka Streams—and something that other stream libraries -do not offer—is that not only sources can be reused like blueprints, all other -elements can be as well. We can take the file-writing :class:`Sink`, prepend -the processing steps necessary to get the :class:`ByteString` elements from -incoming strings and package that up as a reusable piece as well. Since the -language for writing these streams always flows from left to right (just like -plain English), we need a starting point that is like a source but with an -“open” input. In Akka Streams this is called a :class:`Flow`: - -.. includecode:: ../code/docs/stream/QuickStartDocTest.java#transform-sink - -Starting from a flow of strings we convert each to :class:`ByteString` and then -feed to the already known file-writing :class:`Sink`. The resulting blueprint -is a :class:`Sink>`, which means that it -accepts strings as its input and when materialized it will create auxiliary -information of type ``CompletionStage`` (when chaining operations on -a :class:`Source` or :class:`Flow` the type of the auxiliary information—called -the “materialized value”—is given by the leftmost starting point; since we want -to retain what the ``FileIO.toFile`` sink has to offer, we need to say -``Keep.right()``). - -We can use the new and shiny :class:`Sink` we just created by -attaching it to our ``factorials`` source—after a small adaptation to turn the -numbers into strings: - -.. includecode:: ../code/docs/stream/QuickStartDocTest.java#use-transformed-sink - -Time-Based Processing ---------------------- - -Before we start looking at a more involved example we explore the streaming -nature of what Akka Streams can do. Starting from the ``factorials`` source -we transform the stream by zipping it together with another stream, -represented by a :class:`Source` that emits the number 0 to 100: the first -number emitted by the ``factorials`` source is the factorial of zero, the -second is the factorial of one, and so on. We combine these two by forming -strings like ``"3! = 6"``. - -.. includecode:: ../code/docs/stream/QuickStartDocTest.java#add-streams - -All operations so far have been time-independent and could have been performed -in the same fashion on strict collections of elements. The next line -demonstrates that we are in fact dealing with streams that can flow at a -certain speed: we use the ``throttle`` combinator to slow down the stream to 1 -element per second (the second ``1`` in the argument list is the maximum size -of a burst that we want to allow—passing ``1`` means that the first element -gets through immediately and the second then has to wait for one second and so -on). - -If you run this program you will see one line printed per second. One aspect -that is not immediately visible deserves mention, though: if you try and set -the streams to produce a billion numbers each then you will notice that your -JVM does not crash with an OutOfMemoryError, even though you will also notice -that running the streams happens in the background, asynchronously (this is the -reason for the auxiliary information to be provided as a -:class:`CompletionStage`, in the future). The secret that makes this work is -that Akka Streams implicitly implement pervasive flow control, all combinators -respect back-pressure. This allows the throttle combinator to signal to all its -upstream sources of data that it can only accept elements at a certain -rate—when the incoming rate is higher than one per second the throttle -combinator will assert *back-pressure* upstream. - -This is basically all there is to Akka Streams in a nutshell—glossing over the -fact that there are dozens of sources and sinks and many more stream -transformation combinators to choose from, see also :ref:`stages-overview_java`. - -Reactive Tweets -=============== - -A typical use case for stream processing is consuming a live stream of data that we want to extract or aggregate some -other data from. In this example we'll consider consuming a stream of tweets and extracting information concerning Akka from them. - -We will also consider the problem inherent to all non-blocking streaming -solutions: *"What if the subscriber is too slow to consume the live stream of -data?"*. Traditionally the solution is often to buffer the elements, but this -can—and usually will—cause eventual buffer overflows and instability of such -systems. Instead Akka Streams depend on internal backpressure signals that -allow to control what should happen in such scenarios. - -Here's the data model we'll be working with throughout the quickstart examples: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#model - - -.. note:: - If you would like to get an overview of the used vocabulary first instead of diving head-first - into an actual example you can have a look at the :ref:`core-concepts-java` and :ref:`defining-and-running-streams-java` - sections of the docs, and then come back to this quickstart to see it all pieced together into a simple example application. - -Transforming and consuming simple streams ------------------------------------------ -The example application we will be looking at is a simple Twitter feed stream from which we'll want to extract certain information, -like for example finding all twitter handles of users who tweet about ``#akka``. - -In order to prepare our environment by creating an :class:`ActorSystem` and :class:`ActorMaterializer`, -which will be responsible for materializing and running the streams we are about to create: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#materializer-setup - -The :class:`ActorMaterializer` can optionally take :class:`ActorMaterializerSettings` which can be used to define -materialization properties, such as default buffer sizes (see also :ref:`async-stream-buffers-java`), the dispatcher to -be used by the pipeline etc. These can be overridden with ``withAttributes`` on :class:`Flow`, :class:`Source`, :class:`Sink` and :class:`Graph`. - -Let's assume we have a stream of tweets readily available. In Akka this is expressed as a :class:`Source`: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#tweet-source - -Streams always start flowing from a ``Source`` then can continue through ``Flow`` elements or -more advanced graph elements to finally be consumed by a ``Sink``. - -The first type parameter—:class:`Tweet` in this case—designates the kind of elements produced -by the source while the ``M`` type parameters describe the object that is created during -materialization (:ref:`see below `)—:class:`BoxedUnit` (from the ``scala.runtime`` -package) means that no value is produced, it is the generic equivalent of ``void``. - -The operations should look familiar to anyone who has used the Scala Collections library, -however they operate on streams and not collections of data (which is a very important distinction, as some operations -only make sense in streaming and vice versa): - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#authors-filter-map - -Finally in order to :ref:`materialize ` and run the stream computation we need to attach -the Flow to a ``Sink`` that will get the Flow running. The simplest way to do this is to call -``runWith(sink)`` on a ``Source``. For convenience a number of common Sinks are predefined and collected as static methods on -the `Sink class `_. -For now let's simply print each author: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#authors-foreachsink-println - -or by using the shorthand version (which are defined only for the most popular Sinks such as :class:`Sink.fold` and :class:`Sink.foreach`): - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#authors-foreach-println - -Materializing and running a stream always requires a :class:`Materializer` to be passed in explicitly, -like this: ``.run(mat)``. - -The complete snippet looks like this: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#first-sample - -Flattening sequences in streams -------------------------------- -In the previous section we were working on 1:1 relationships of elements which is the most common case, but sometimes -we might want to map from one element to a number of elements and receive a "flattened" stream, similarly like ``flatMap`` -works on Scala Collections. In order to get a flattened stream of hashtags from our stream of tweets we can use the ``mapConcat`` -combinator: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#hashtags-mapConcat - -.. note:: - The name ``flatMap`` was consciously avoided due to its proximity with for-comprehensions and monadic composition. - It is problematic for two reasons: firstly, flattening by concatenation is often undesirable in bounded stream processing - due to the risk of deadlock (with merge being the preferred strategy), and secondly, the monad laws would not hold for - our implementation of flatMap (due to the liveness issues). - - Please note that the ``mapConcat`` requires the supplied function to return a strict collection (``Out f -> java.util.List``), - whereas ``flatMap`` would have to operate on streams all the way through. - - -Broadcasting a stream ---------------------- -Now let's say we want to persist all hashtags, as well as all author names from this one live stream. -For example we'd like to write all author handles into one file, and all hashtags into another file on disk. -This means we have to split the source stream into two streams which will handle the writing to these different files. - -Elements that can be used to form such "fan-out" (or "fan-in") structures are referred to as "junctions" in Akka Streams. -One of these that we'll be using in this example is called :class:`Broadcast`, and it simply emits elements from its -input port to all of its output ports. - -Akka Streams intentionally separate the linear stream structures (Flows) from the non-linear, branching ones (Graphs) -in order to offer the most convenient API for both of these cases. Graphs can express arbitrarily complex stream setups -at the expense of not reading as familiarly as collection transformations. - -Graphs are constructed using :class:`GraphDSL` like this: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#graph-dsl-broadcast - -As you can see, we use graph builder ``b`` to construct the graph using ``UniformFanOutShape`` and ``Flow`` s. - -``GraphDSL.create`` returns a :class:`Graph`, in this example a ``Graph`` where -:class:`ClosedShape` means that it is *a fully connected graph* or "closed" - there are no unconnected inputs or outputs. -Since it is closed it is possible to transform the graph into a :class:`RunnableGraph` using ``RunnableGraph.fromGraph``. -The runnable graph can then be ``run()`` to materialize a stream out of it. - -Both :class:`Graph` and :class:`RunnableGraph` are *immutable, thread-safe, and freely shareable*. - -A graph can also have one of several other shapes, with one or more unconnected ports. Having unconnected ports -expresses a graph that is a *partial graph*. Concepts around composing and nesting graphs in large structures are -explained in detail in :ref:`composition-java`. It is also possible to wrap complex computation graphs -as Flows, Sinks or Sources, which will be explained in detail in :ref:`partial-graph-dsl-java`. - - -Back-pressure in action ------------------------ - -One of the main advantages of Akka Streams is that they *always* propagate back-pressure information from stream Sinks -(Subscribers) to their Sources (Publishers). It is not an optional feature, and is enabled at all times. To learn more -about the back-pressure protocol used by Akka Streams and all other Reactive Streams compatible implementations read -:ref:`back-pressure-explained-java`. - -A typical problem applications (not using Akka Streams) like this often face is that they are unable to process the incoming data fast enough, -either temporarily or by design, and will start buffering incoming data until there's no more space to buffer, resulting -in either ``OutOfMemoryError`` s or other severe degradations of service responsiveness. With Akka Streams buffering can -and must be handled explicitly. For example, if we are only interested in the "*most recent tweets, with a buffer of 10 -elements*" this can be expressed using the ``buffer`` element: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#tweets-slow-consumption-dropHead - -The ``buffer`` element takes an explicit and required ``OverflowStrategy``, which defines how the buffer should react -when it receives another element while it is full. Strategies provided include dropping the oldest element (``dropHead``), -dropping the entire buffer, signalling failures etc. Be sure to pick and choose the strategy that fits your use case best. - -.. _materialized-values-quick-java: - -Materialized values -------------------- -So far we've been only processing data using Flows and consuming it into some kind of external Sink - be it by printing -values or storing them in some external system. However sometimes we may be interested in some value that can be -obtained from the materialized processing pipeline. For example, we want to know how many tweets we have processed. -While this question is not as obvious to give an answer to in case of an infinite stream of tweets (one way to answer -this question in a streaming setting would be to create a stream of counts described as "*up until now*, we've processed N tweets"), -but in general it is possible to deal with finite streams and come up with a nice result such as a total count of elements. - -First, let's write such an element counter using ``Flow.of(Class)`` and ``Sink.fold`` to see how the types look like: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#tweets-fold-count - -First we prepare a reusable ``Flow`` that will change each incoming tweet into an integer of value ``1``. We'll use this in -order to combine those with a ``Sink.fold`` that will sum all ``Integer`` elements of the stream and make its result available as -a ``CompletionStage``. Next we connect the ``tweets`` stream to ``count`` with ``via``. Finally we connect the Flow to the previously -prepared Sink using ``toMat``. - -Remember those mysterious ``Mat`` type parameters on ``Source``, ``Flow`` and ``Sink``? -They represent the type of values these processing parts return when materialized. When you chain these together, -you can explicitly combine their materialized values: in our example we used the ``Keep.right`` predefined function, -which tells the implementation to only care about the materialized type of the stage currently appended to the right. -The materialized type of ``sumSink`` is ``CompletionStage`` and because of using ``Keep.right``, the resulting :class:`RunnableGraph` -has also a type parameter of ``CompletionStage``. - -This step does *not* yet materialize the -processing pipeline, it merely prepares the description of the Flow, which is now connected to a Sink, and therefore can -be ``run()``, as indicated by its type: ``RunnableGraph>``. Next we call ``run()`` which uses the :class:`ActorMaterializer` -to materialize and run the Flow. The value returned by calling ``run()`` on a ``RunnableGraph`` is of type ``T``. -In our case this type is ``CompletionStage`` which, when completed, will contain the total length of our tweets stream. -In case of the stream failing, this future would complete with a Failure. - -A :class:`RunnableGraph` may be reused -and materialized multiple times, because it is just the "blueprint" of the stream. This means that if we materialize a stream, -for example one that consumes a live stream of tweets within a minute, the materialized values for those two materializations -will be different, as illustrated by this example: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#tweets-runnable-flow-materialized-twice - -Many elements in Akka Streams provide materialized values which can be used for obtaining either results of computation or -steering these elements which will be discussed in detail in :ref:`stream-materialization-java`. Summing up this section, now we know -what happens behind the scenes when we run this one-liner, which is equivalent to the multi line version above: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#tweets-fold-count-oneline - -.. note:: - ``runWith()`` is a convenience method that automatically ignores the materialized value of any other stages except - those appended by the ``runWith()`` itself. In the above example it translates to using ``Keep.right`` as the combiner - for materialized values. +.. _stream-quickstart-java: + +Quick Start Guide +================= + +A stream usually begins at a source, so this is also how we start an Akka +Stream. Before we create one, we import the full complement of streaming tools: + +.. includecode:: ../code/docs/stream/QuickStartDocTest.java#stream-imports + +If you want to execute the code samples while you read through the quick start guide, you will also need the following imports: + +.. includecode:: ../code/docs/stream/QuickStartDocTest.java#other-imports + +Now we will start with a rather simple source, emitting the integers 1 to 100: + +.. includecode:: ../code/docs/stream/QuickStartDocTest.java#create-source + +The :class:`Source` type is parameterized with two types: the first one is the +type of element that this source emits and the second one may signal that +running the source produces some auxiliary value (e.g. a network source may +provide information about the bound port or the peer’s address). Where no +auxiliary information is produced, the type ``akka.NotUsed`` is used—and a +simple range of integers surely falls into this category. + +Having created this source means that we have a description of how to emit the +first 100 natural numbers, but this source is not yet active. In order to get +those numbers out we have to run it: + +.. includecode:: ../code/docs/stream/QuickStartDocTest.java#run-source + +This line will complement the source with a consumer function—in this example +we simply print out the numbers to the console—and pass this little stream +setup to an Actor that runs it. This activation is signaled by having “run” be +part of the method name; there are other methods that run Akka Streams, and +they all follow this pattern. + +You may wonder where the Actor gets created that runs the stream, and you are +probably also asking yourself what this ``materializer`` means. In order to get +this value we first need to create an Actor system: + +.. includecode:: ../code/docs/stream/QuickStartDocTest.java#create-materializer + +There are other ways to create a materializer, e.g. from an +:class:`ActorContext` when using streams from within Actors. The +:class:`Materializer` is a factory for stream execution engines, it is the +thing that makes streams run—you don’t need to worry about any of the details +just now apart from that you need one for calling any of the ``run`` methods on +a :class:`Source`. + +The nice thing about Akka Streams is that the :class:`Source` is just a +description of what you want to run, and like an architect’s blueprint it can +be reused, incorporated into a larger design. We may choose to transform the +source of integers and write it to a file instead: + +.. includecode:: ../code/docs/stream/QuickStartDocTest.java#transform-source + +First we use the ``scan`` combinator to run a computation over the whole +stream: starting with the number 1 (``BigInteger.ONE``) we multiple by each of +the incoming numbers, one after the other; the scan operationemits the initial +value and then every calculation result. This yields the series of factorial +numbers which we stash away as a :class:`Source` for later reuse—it is +important to keep in mind that nothing is actually computed yet, this is just a +description of what we want to have computed once we run the stream. Then we +convert the resulting series of numbers into a stream of :class:`ByteString` +objects describing lines in a text file. This stream is then run by attaching a +file as the receiver of the data. In the terminology of Akka Streams this is +called a :class:`Sink`. :class:`IOResult` is a type that IO operations return +in Akka Streams in order to tell you how many bytes or elements were consumed +and whether the stream terminated normally or exceptionally. + +Reusable Pieces +--------------- + +One of the nice parts of Akka Streams—and something that other stream libraries +do not offer—is that not only sources can be reused like blueprints, all other +elements can be as well. We can take the file-writing :class:`Sink`, prepend +the processing steps necessary to get the :class:`ByteString` elements from +incoming strings and package that up as a reusable piece as well. Since the +language for writing these streams always flows from left to right (just like +plain English), we need a starting point that is like a source but with an +“open” input. In Akka Streams this is called a :class:`Flow`: + +.. includecode:: ../code/docs/stream/QuickStartDocTest.java#transform-sink + +Starting from a flow of strings we convert each to :class:`ByteString` and then +feed to the already known file-writing :class:`Sink`. The resulting blueprint +is a :class:`Sink>`, which means that it +accepts strings as its input and when materialized it will create auxiliary +information of type ``CompletionStage`` (when chaining operations on +a :class:`Source` or :class:`Flow` the type of the auxiliary information—called +the “materialized value”—is given by the leftmost starting point; since we want +to retain what the ``FileIO.toFile`` sink has to offer, we need to say +``Keep.right()``). + +We can use the new and shiny :class:`Sink` we just created by +attaching it to our ``factorials`` source—after a small adaptation to turn the +numbers into strings: + +.. includecode:: ../code/docs/stream/QuickStartDocTest.java#use-transformed-sink + +Time-Based Processing +--------------------- + +Before we start looking at a more involved example we explore the streaming +nature of what Akka Streams can do. Starting from the ``factorials`` source +we transform the stream by zipping it together with another stream, +represented by a :class:`Source` that emits the number 0 to 100: the first +number emitted by the ``factorials`` source is the factorial of zero, the +second is the factorial of one, and so on. We combine these two by forming +strings like ``"3! = 6"``. + +.. includecode:: ../code/docs/stream/QuickStartDocTest.java#add-streams + +All operations so far have been time-independent and could have been performed +in the same fashion on strict collections of elements. The next line +demonstrates that we are in fact dealing with streams that can flow at a +certain speed: we use the ``throttle`` combinator to slow down the stream to 1 +element per second (the second ``1`` in the argument list is the maximum size +of a burst that we want to allow—passing ``1`` means that the first element +gets through immediately and the second then has to wait for one second and so +on). + +If you run this program you will see one line printed per second. One aspect +that is not immediately visible deserves mention, though: if you try and set +the streams to produce a billion numbers each then you will notice that your +JVM does not crash with an OutOfMemoryError, even though you will also notice +that running the streams happens in the background, asynchronously (this is the +reason for the auxiliary information to be provided as a +:class:`CompletionStage`, in the future). The secret that makes this work is +that Akka Streams implicitly implement pervasive flow control, all combinators +respect back-pressure. This allows the throttle combinator to signal to all its +upstream sources of data that it can only accept elements at a certain +rate—when the incoming rate is higher than one per second the throttle +combinator will assert *back-pressure* upstream. + +This is basically all there is to Akka Streams in a nutshell—glossing over the +fact that there are dozens of sources and sinks and many more stream +transformation combinators to choose from, see also :ref:`stages-overview_java`. + +Reactive Tweets +=============== + +A typical use case for stream processing is consuming a live stream of data that we want to extract or aggregate some +other data from. In this example we'll consider consuming a stream of tweets and extracting information concerning Akka from them. + +We will also consider the problem inherent to all non-blocking streaming +solutions: *"What if the subscriber is too slow to consume the live stream of +data?"*. Traditionally the solution is often to buffer the elements, but this +can—and usually will—cause eventual buffer overflows and instability of such +systems. Instead Akka Streams depend on internal backpressure signals that +allow to control what should happen in such scenarios. + +Here's the data model we'll be working with throughout the quickstart examples: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#model + + +.. note:: + If you would like to get an overview of the used vocabulary first instead of diving head-first + into an actual example you can have a look at the :ref:`core-concepts-java` and :ref:`defining-and-running-streams-java` + sections of the docs, and then come back to this quickstart to see it all pieced together into a simple example application. + +Transforming and consuming simple streams +----------------------------------------- +The example application we will be looking at is a simple Twitter feed stream from which we'll want to extract certain information, +like for example finding all twitter handles of users who tweet about ``#akka``. + +In order to prepare our environment by creating an :class:`ActorSystem` and :class:`ActorMaterializer`, +which will be responsible for materializing and running the streams we are about to create: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#materializer-setup + +The :class:`ActorMaterializer` can optionally take :class:`ActorMaterializerSettings` which can be used to define +materialization properties, such as default buffer sizes (see also :ref:`async-stream-buffers-java`), the dispatcher to +be used by the pipeline etc. These can be overridden with ``withAttributes`` on :class:`Flow`, :class:`Source`, :class:`Sink` and :class:`Graph`. + +Let's assume we have a stream of tweets readily available. In Akka this is expressed as a :class:`Source`: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#tweet-source + +Streams always start flowing from a ``Source`` then can continue through ``Flow`` elements or +more advanced graph elements to finally be consumed by a ``Sink``. + +The first type parameter—:class:`Tweet` in this case—designates the kind of elements produced +by the source while the ``M`` type parameters describe the object that is created during +materialization (:ref:`see below `)—:class:`BoxedUnit` (from the ``scala.runtime`` +package) means that no value is produced, it is the generic equivalent of ``void``. + +The operations should look familiar to anyone who has used the Scala Collections library, +however they operate on streams and not collections of data (which is a very important distinction, as some operations +only make sense in streaming and vice versa): + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#authors-filter-map + +Finally in order to :ref:`materialize ` and run the stream computation we need to attach +the Flow to a ``Sink`` that will get the Flow running. The simplest way to do this is to call +``runWith(sink)`` on a ``Source``. For convenience a number of common Sinks are predefined and collected as static methods on +the `Sink class `_. +For now let's simply print each author: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#authors-foreachsink-println + +or by using the shorthand version (which are defined only for the most popular Sinks such as :class:`Sink.fold` and :class:`Sink.foreach`): + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#authors-foreach-println + +Materializing and running a stream always requires a :class:`Materializer` to be passed in explicitly, +like this: ``.run(mat)``. + +The complete snippet looks like this: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#first-sample + +Flattening sequences in streams +------------------------------- +In the previous section we were working on 1:1 relationships of elements which is the most common case, but sometimes +we might want to map from one element to a number of elements and receive a "flattened" stream, similarly like ``flatMap`` +works on Scala Collections. In order to get a flattened stream of hashtags from our stream of tweets we can use the ``mapConcat`` +combinator: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#hashtags-mapConcat + +.. note:: + The name ``flatMap`` was consciously avoided due to its proximity with for-comprehensions and monadic composition. + It is problematic for two reasons: firstly, flattening by concatenation is often undesirable in bounded stream processing + due to the risk of deadlock (with merge being the preferred strategy), and secondly, the monad laws would not hold for + our implementation of flatMap (due to the liveness issues). + + Please note that the ``mapConcat`` requires the supplied function to return a strict collection (``Out f -> java.util.List``), + whereas ``flatMap`` would have to operate on streams all the way through. + + +Broadcasting a stream +--------------------- +Now let's say we want to persist all hashtags, as well as all author names from this one live stream. +For example we'd like to write all author handles into one file, and all hashtags into another file on disk. +This means we have to split the source stream into two streams which will handle the writing to these different files. + +Elements that can be used to form such "fan-out" (or "fan-in") structures are referred to as "junctions" in Akka Streams. +One of these that we'll be using in this example is called :class:`Broadcast`, and it simply emits elements from its +input port to all of its output ports. + +Akka Streams intentionally separate the linear stream structures (Flows) from the non-linear, branching ones (Graphs) +in order to offer the most convenient API for both of these cases. Graphs can express arbitrarily complex stream setups +at the expense of not reading as familiarly as collection transformations. + +Graphs are constructed using :class:`GraphDSL` like this: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#graph-dsl-broadcast + +As you can see, we use graph builder ``b`` to construct the graph using ``UniformFanOutShape`` and ``Flow`` s. + +``GraphDSL.create`` returns a :class:`Graph`, in this example a ``Graph`` where +:class:`ClosedShape` means that it is *a fully connected graph* or "closed" - there are no unconnected inputs or outputs. +Since it is closed it is possible to transform the graph into a :class:`RunnableGraph` using ``RunnableGraph.fromGraph``. +The runnable graph can then be ``run()`` to materialize a stream out of it. + +Both :class:`Graph` and :class:`RunnableGraph` are *immutable, thread-safe, and freely shareable*. + +A graph can also have one of several other shapes, with one or more unconnected ports. Having unconnected ports +expresses a graph that is a *partial graph*. Concepts around composing and nesting graphs in large structures are +explained in detail in :ref:`composition-java`. It is also possible to wrap complex computation graphs +as Flows, Sinks or Sources, which will be explained in detail in :ref:`partial-graph-dsl-java`. + + +Back-pressure in action +----------------------- + +One of the main advantages of Akka Streams is that they *always* propagate back-pressure information from stream Sinks +(Subscribers) to their Sources (Publishers). It is not an optional feature, and is enabled at all times. To learn more +about the back-pressure protocol used by Akka Streams and all other Reactive Streams compatible implementations read +:ref:`back-pressure-explained-java`. + +A typical problem applications (not using Akka Streams) like this often face is that they are unable to process the incoming data fast enough, +either temporarily or by design, and will start buffering incoming data until there's no more space to buffer, resulting +in either ``OutOfMemoryError`` s or other severe degradations of service responsiveness. With Akka Streams buffering can +and must be handled explicitly. For example, if we are only interested in the "*most recent tweets, with a buffer of 10 +elements*" this can be expressed using the ``buffer`` element: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#tweets-slow-consumption-dropHead + +The ``buffer`` element takes an explicit and required ``OverflowStrategy``, which defines how the buffer should react +when it receives another element while it is full. Strategies provided include dropping the oldest element (``dropHead``), +dropping the entire buffer, signalling failures etc. Be sure to pick and choose the strategy that fits your use case best. + +.. _materialized-values-quick-java: + +Materialized values +------------------- +So far we've been only processing data using Flows and consuming it into some kind of external Sink - be it by printing +values or storing them in some external system. However sometimes we may be interested in some value that can be +obtained from the materialized processing pipeline. For example, we want to know how many tweets we have processed. +While this question is not as obvious to give an answer to in case of an infinite stream of tweets (one way to answer +this question in a streaming setting would be to create a stream of counts described as "*up until now*, we've processed N tweets"), +but in general it is possible to deal with finite streams and come up with a nice result such as a total count of elements. + +First, let's write such an element counter using ``Flow.of(Class)`` and ``Sink.fold`` to see how the types look like: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#tweets-fold-count + +First we prepare a reusable ``Flow`` that will change each incoming tweet into an integer of value ``1``. We'll use this in +order to combine those with a ``Sink.fold`` that will sum all ``Integer`` elements of the stream and make its result available as +a ``CompletionStage``. Next we connect the ``tweets`` stream to ``count`` with ``via``. Finally we connect the Flow to the previously +prepared Sink using ``toMat``. + +Remember those mysterious ``Mat`` type parameters on ``Source``, ``Flow`` and ``Sink``? +They represent the type of values these processing parts return when materialized. When you chain these together, +you can explicitly combine their materialized values: in our example we used the ``Keep.right`` predefined function, +which tells the implementation to only care about the materialized type of the stage currently appended to the right. +The materialized type of ``sumSink`` is ``CompletionStage`` and because of using ``Keep.right``, the resulting :class:`RunnableGraph` +has also a type parameter of ``CompletionStage``. + +This step does *not* yet materialize the +processing pipeline, it merely prepares the description of the Flow, which is now connected to a Sink, and therefore can +be ``run()``, as indicated by its type: ``RunnableGraph>``. Next we call ``run()`` which uses the :class:`ActorMaterializer` +to materialize and run the Flow. The value returned by calling ``run()`` on a ``RunnableGraph`` is of type ``T``. +In our case this type is ``CompletionStage`` which, when completed, will contain the total length of our tweets stream. +In case of the stream failing, this future would complete with a Failure. + +A :class:`RunnableGraph` may be reused +and materialized multiple times, because it is just the "blueprint" of the stream. This means that if we materialize a stream, +for example one that consumes a live stream of tweets within a minute, the materialized values for those two materializations +will be different, as illustrated by this example: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#tweets-runnable-flow-materialized-twice + +Many elements in Akka Streams provide materialized values which can be used for obtaining either results of computation or +steering these elements which will be discussed in detail in :ref:`stream-materialization-java`. Summing up this section, now we know +what happens behind the scenes when we run this one-liner, which is equivalent to the multi line version above: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#tweets-fold-count-oneline + +.. note:: + ``runWith()`` is a convenience method that automatically ignores the materialized value of any other stages except + those appended by the ``runWith()`` itself. In the above example it translates to using ``Keep.right`` as the combiner + for materialized values. diff --git a/akka-docs/rst/scala/code/docs/stream/QuickStartDocSpec.scala b/akka-docs/rst/scala/code/docs/stream/QuickStartDocSpec.scala index bf25549841..3f8c68de04 100644 --- a/akka-docs/rst/scala/code/docs/stream/QuickStartDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/stream/QuickStartDocSpec.scala @@ -3,19 +3,22 @@ */ package docs.stream -//#imports +//#stream-imports import akka.stream._ import akka.stream.scaladsl._ -//#imports +//#stream-imports + +//#other-imports import akka.{ NotUsed, Done } import akka.actor.ActorSystem import akka.util.ByteString - -import org.scalatest._ -import org.scalatest.concurrent._ import scala.concurrent._ import scala.concurrent.duration._ import java.nio.file.Paths +//#other-imports + +import org.scalatest._ +import org.scalatest.concurrent._ class QuickStartDocSpec extends WordSpec with BeforeAndAfterAll with ScalaFutures { implicit val patience = PatienceConfig(5.seconds) diff --git a/akka-docs/rst/scala/stream/stream-quickstart.rst b/akka-docs/rst/scala/stream/stream-quickstart.rst index aa2171c539..cdc37c1da2 100644 --- a/akka-docs/rst/scala/stream/stream-quickstart.rst +++ b/akka-docs/rst/scala/stream/stream-quickstart.rst @@ -1,329 +1,333 @@ -.. _stream-quickstart-scala: - -Quick Start Guide -================= - -A stream usually begins at a source, so this is also how we start an Akka -Stream. Before we create one, we import the full complement of streaming tools: - -.. includecode:: ../code/docs/stream/QuickStartDocSpec.scala#imports - -Now we will start with a rather simple source, emitting the integers 1 to 100: - -.. includecode:: ../code/docs/stream/QuickStartDocSpec.scala#create-source - -The :class:`Source` type is parameterized with two types: the first one is the -type of element that this source emits and the second one may signal that -running the source produces some auxiliary value (e.g. a network source may -provide information about the bound port or the peer’s address). Where no -auxiliary information is produced, the type ``akka.NotUsed`` is used—and a -simple range of integers surely falls into this category. - -Having created this source means that we have a description of how to emit the -first 100 natural numbers, but this source is not yet active. In order to get -those numbers out we have to run it: - -.. includecode:: ../code/docs/stream/QuickStartDocSpec.scala#run-source - -This line will complement the source with a consumer function—in this example -we simply print out the numbers to the console—and pass this little stream -setup to an Actor that runs it. This activation is signaled by having “run” be -part of the method name; there are other methods that run Akka Streams, and -they all follow this pattern. - -You may wonder where the Actor gets created that runs the stream, and you are -probably also asking yourself what this ``materializer`` means. In order to get -this value we first need to create an Actor system: - -.. includecode:: ../code/docs/stream/QuickStartDocSpec.scala#create-materializer - -There are other ways to create a materializer, e.g. from an -:class:`ActorContext` when using streams from within Actors. The -:class:`Materializer` is a factory for stream execution engines, it is the -thing that makes streams run—you don’t need to worry about any of the details -just now apart from that you need one for calling any of the ``run`` methods on -a :class:`Source`. The materializer is picked up implicitly if it is omitted -from the ``run`` method call arguments, which we will do in the following. - -The nice thing about Akka Streams is that the :class:`Source` is just a -description of what you want to run, and like an architect’s blueprint it can -be reused, incorporated into a larger design. We may choose to transform the -source of integers and write it to a file instead: - -.. includecode:: ../code/docs/stream/QuickStartDocSpec.scala#transform-source - -First we use the ``scan`` combinator to run a computation over the whole -stream: starting with the number 1 (``BigInt(1)``) we multiple by each of -the incoming numbers, one after the other; the scan operation emits the initial -value and then every calculation result. This yields the series of factorial -numbers which we stash away as a :class:`Source` for later reuse—it is -important to keep in mind that nothing is actually computed yet, this is just a -description of what we want to have computed once we run the stream. Then we -convert the resulting series of numbers into a stream of :class:`ByteString` -objects describing lines in a text file. This stream is then run by attaching a -file as the receiver of the data. In the terminology of Akka Streams this is -called a :class:`Sink`. :class:`IOResult` is a type that IO operations return in -Akka Streams in order to tell you how many bytes or elements were consumed and -whether the stream terminated normally or exceptionally. - -Reusable Pieces ---------------- - -One of the nice parts of Akka Streams—and something that other stream libraries -do not offer—is that not only sources can be reused like blueprints, all other -elements can be as well. We can take the file-writing :class:`Sink`, prepend -the processing steps necessary to get the :class:`ByteString` elements from -incoming strings and package that up as a reusable piece as well. Since the -language for writing these streams always flows from left to right (just like -plain English), we need a starting point that is like a source but with an -“open” input. In Akka Streams this is called a :class:`Flow`: - -.. includecode:: ../code/docs/stream/QuickStartDocSpec.scala#transform-sink - -Starting from a flow of strings we convert each to :class:`ByteString` and then -feed to the already known file-writing :class:`Sink`. The resulting blueprint -is a :class:`Sink[String, Future[IOResult]]`, which means that it -accepts strings as its input and when materialized it will create auxiliary -information of type ``Future[IOResult]`` (when chaining operations on -a :class:`Source` or :class:`Flow` the type of the auxiliary information—called -the “materialized value”—is given by the leftmost starting point; since we want -to retain what the ``FileIO.toFile`` sink has to offer, we need to say -``Keep.right``). - -We can use the new and shiny :class:`Sink` we just created by -attaching it to our ``factorials`` source—after a small adaptation to turn the -numbers into strings: - -.. includecode:: ../code/docs/stream/QuickStartDocSpec.scala#use-transformed-sink - -Time-Based Processing ---------------------- - -Before we start looking at a more involved example we explore the streaming -nature of what Akka Streams can do. Starting from the ``factorials`` source -we transform the stream by zipping it together with another stream, -represented by a :class:`Source` that emits the number 0 to 100: the first -number emitted by the ``factorials`` source is the factorial of zero, the -second is the factorial of one, and so on. We combine these two by forming -strings like ``"3! = 6"``. - -.. includecode:: ../code/docs/stream/QuickStartDocSpec.scala#add-streams - -All operations so far have been time-independent and could have been performed -in the same fashion on strict collections of elements. The next line -demonstrates that we are in fact dealing with streams that can flow at a -certain speed: we use the ``throttle`` combinator to slow down the stream to 1 -element per second (the second ``1`` in the argument list is the maximum size -of a burst that we want to allow—passing ``1`` means that the first element -gets through immediately and the second then has to wait for one second and so -on). - -If you run this program you will see one line printed per second. One aspect -that is not immediately visible deserves mention, though: if you try and set -the streams to produce a billion numbers each then you will notice that your -JVM does not crash with an OutOfMemoryError, even though you will also notice -that running the streams happens in the background, asynchronously (this is the -reason for the auxiliary information to be provided as a :class:`Future`). The -secret that makes this work is that Akka Streams implicitly implement pervasive -flow control, all combinators respect back-pressure. This allows the throttle -combinator to signal to all its upstream sources of data that it can only -accept elements at a certain rate—when the incoming rate is higher than one per -second the throttle combinator will assert *back-pressure* upstream. - -This is basically all there is to Akka Streams in a nutshell—glossing over the -fact that there are dozens of sources and sinks and many more stream -transformation combinators to choose from, see also :ref:`stages-overview_scala`. - -Reactive Tweets -=============== - -A typical use case for stream processing is consuming a live stream of data that we want to extract or aggregate some -other data from. In this example we'll consider consuming a stream of tweets and extracting information concerning Akka from them. - -We will also consider the problem inherent to all non-blocking streaming -solutions: *"What if the subscriber is too slow to consume the live stream of -data?"*. Traditionally the solution is often to buffer the elements, but this -can—and usually will—cause eventual buffer overflows and instability of such -systems. Instead Akka Streams depend on internal backpressure signals that -allow to control what should happen in such scenarios. - -Here's the data model we'll be working with throughout the quickstart examples: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#model - -.. note:: - If you would like to get an overview of the used vocabulary first instead of diving head-first - into an actual example you can have a look at the :ref:`core-concepts-scala` and :ref:`defining-and-running-streams-scala` - sections of the docs, and then come back to this quickstart to see it all pieced together into a simple example application. - -Transforming and consuming simple streams ------------------------------------------ -The example application we will be looking at is a simple Twitter feed stream from which we'll want to extract certain information, -like for example finding all twitter handles of users who tweet about ``#akka``. - -In order to prepare our environment by creating an :class:`ActorSystem` and :class:`ActorMaterializer`, -which will be responsible for materializing and running the streams we are about to create: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#materializer-setup - -The :class:`ActorMaterializer` can optionally take :class:`ActorMaterializerSettings` which can be used to define -materialization properties, such as default buffer sizes (see also :ref:`async-stream-buffers-scala`), the dispatcher to -be used by the pipeline etc. These can be overridden with ``withAttributes`` on :class:`Flow`, :class:`Source`, :class:`Sink` and :class:`Graph`. - -Let's assume we have a stream of tweets readily available. In Akka this is expressed as a :class:`Source[Out, M]`: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#tweet-source - -Streams always start flowing from a :class:`Source[Out,M1]` then can continue through :class:`Flow[In,Out,M2]` elements or -more advanced graph elements to finally be consumed by a :class:`Sink[In,M3]` (ignore the type parameters ``M1``, ``M2`` -and ``M3`` for now, they are not relevant to the types of the elements produced/consumed by these classes – they are -"materialized types", which we'll talk about :ref:`below `). - -The operations should look familiar to anyone who has used the Scala Collections library, -however they operate on streams and not collections of data (which is a very important distinction, as some operations -only make sense in streaming and vice versa): - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#authors-filter-map - -Finally in order to :ref:`materialize ` and run the stream computation we need to attach -the Flow to a :class:`Sink` that will get the Flow running. The simplest way to do this is to call -``runWith(sink)`` on a ``Source``. For convenience a number of common Sinks are predefined and collected as methods on -the :class:`Sink` `companion object `_. -For now let's simply print each author: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#authors-foreachsink-println - -or by using the shorthand version (which are defined only for the most popular Sinks such as ``Sink.fold`` and ``Sink.foreach``): - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#authors-foreach-println - -Materializing and running a stream always requires a :class:`Materializer` to be in implicit scope (or passed in explicitly, -like this: ``.run(materializer)``). - -The complete snippet looks like this: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#first-sample - -Flattening sequences in streams -------------------------------- -In the previous section we were working on 1:1 relationships of elements which is the most common case, but sometimes -we might want to map from one element to a number of elements and receive a "flattened" stream, similarly like ``flatMap`` -works on Scala Collections. In order to get a flattened stream of hashtags from our stream of tweets we can use the ``mapConcat`` -combinator: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#hashtags-mapConcat - -.. note:: - The name ``flatMap`` was consciously avoided due to its proximity with for-comprehensions and monadic composition. - It is problematic for two reasons: first, flattening by concatenation is often undesirable in bounded stream processing - due to the risk of deadlock (with merge being the preferred strategy), and second, the monad laws would not hold for - our implementation of flatMap (due to the liveness issues). - - Please note that the ``mapConcat`` requires the supplied function to return a strict collection (``f:Out=>immutable.Seq[T]``), - whereas ``flatMap`` would have to operate on streams all the way through. - -Broadcasting a stream ---------------------- -Now let's say we want to persist all hashtags, as well as all author names from this one live stream. -For example we'd like to write all author handles into one file, and all hashtags into another file on disk. -This means we have to split the source stream into two streams which will handle the writing to these different files. - -Elements that can be used to form such "fan-out" (or "fan-in") structures are referred to as "junctions" in Akka Streams. -One of these that we'll be using in this example is called :class:`Broadcast`, and it simply emits elements from its -input port to all of its output ports. - -Akka Streams intentionally separate the linear stream structures (Flows) from the non-linear, branching ones (Graphs) -in order to offer the most convenient API for both of these cases. Graphs can express arbitrarily complex stream setups -at the expense of not reading as familiarly as collection transformations. - -Graphs are constructed using :class:`GraphDSL` like this: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#graph-dsl-broadcast - -As you can see, inside the :class:`GraphDSL` we use an implicit graph builder ``b`` to mutably construct the graph -using the ``~>`` "edge operator" (also read as "connect" or "via" or "to"). The operator is provided implicitly -by importing ``GraphDSL.Implicits._``. - -``GraphDSL.create`` returns a :class:`Graph`, in this example a :class:`Graph[ClosedShape, Unit]` where -:class:`ClosedShape` means that it is *a fully connected graph* or "closed" - there are no unconnected inputs or outputs. -Since it is closed it is possible to transform the graph into a :class:`RunnableGraph` using ``RunnableGraph.fromGraph``. -The runnable graph can then be ``run()`` to materialize a stream out of it. - -Both :class:`Graph` and :class:`RunnableGraph` are *immutable, thread-safe, and freely shareable*. - -A graph can also have one of several other shapes, with one or more unconnected ports. Having unconnected ports -expresses a graph that is a *partial graph*. Concepts around composing and nesting graphs in large structures are -explained in detail in :ref:`composition-scala`. It is also possible to wrap complex computation graphs -as Flows, Sinks or Sources, which will be explained in detail in -:ref:`constructing-sources-sinks-flows-from-partial-graphs-scala`. - -Back-pressure in action ------------------------ -One of the main advantages of Akka Streams is that they *always* propagate back-pressure information from stream Sinks -(Subscribers) to their Sources (Publishers). It is not an optional feature, and is enabled at all times. To learn more -about the back-pressure protocol used by Akka Streams and all other Reactive Streams compatible implementations read -:ref:`back-pressure-explained-scala`. - -A typical problem applications (not using Akka Streams) like this often face is that they are unable to process the incoming data fast enough, -either temporarily or by design, and will start buffering incoming data until there's no more space to buffer, resulting -in either ``OutOfMemoryError`` s or other severe degradations of service responsiveness. With Akka Streams buffering can -and must be handled explicitly. For example, if we are only interested in the "*most recent tweets, with a buffer of 10 -elements*" this can be expressed using the ``buffer`` element: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#tweets-slow-consumption-dropHead - -The ``buffer`` element takes an explicit and required ``OverflowStrategy``, which defines how the buffer should react -when it receives another element while it is full. Strategies provided include dropping the oldest element (``dropHead``), -dropping the entire buffer, signalling errors etc. Be sure to pick and choose the strategy that fits your use case best. - -.. _materialized-values-quick-scala: - -Materialized values -------------------- -So far we've been only processing data using Flows and consuming it into some kind of external Sink - be it by printing -values or storing them in some external system. However sometimes we may be interested in some value that can be -obtained from the materialized processing pipeline. For example, we want to know how many tweets we have processed. -While this question is not as obvious to give an answer to in case of an infinite stream of tweets (one way to answer -this question in a streaming setting would be to create a stream of counts described as "*up until now*, we've processed N tweets"), -but in general it is possible to deal with finite streams and come up with a nice result such as a total count of elements. - -First, let's write such an element counter using ``Sink.fold`` and see how the types look like: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#tweets-fold-count - -First we prepare a reusable ``Flow`` that will change each incoming tweet into an integer of value ``1``. We'll use this in -order to combine those with a ``Sink.fold`` that will sum all ``Int`` elements of the stream and make its result available as -a ``Future[Int]``. Next we connect the ``tweets`` stream to ``count`` with ``via``. Finally we connect the Flow to the previously -prepared Sink using ``toMat``. - -Remember those mysterious ``Mat`` type parameters on ``Source[+Out, +Mat]``, ``Flow[-In, +Out, +Mat]`` and ``Sink[-In, +Mat]``? -They represent the type of values these processing parts return when materialized. When you chain these together, -you can explicitly combine their materialized values. In our example we used the ``Keep.right`` predefined function, -which tells the implementation to only care about the materialized type of the stage currently appended to the right. -The materialized type of ``sumSink`` is ``Future[Int]`` and because of using ``Keep.right``, the resulting :class:`RunnableGraph` -has also a type parameter of ``Future[Int]``. - -This step does *not* yet materialize the -processing pipeline, it merely prepares the description of the Flow, which is now connected to a Sink, and therefore can -be ``run()``, as indicated by its type: ``RunnableGraph[Future[Int]]``. Next we call ``run()`` which uses the implicit :class:`ActorMaterializer` -to materialize and run the Flow. The value returned by calling ``run()`` on a ``RunnableGraph[T]`` is of type ``T``. -In our case this type is ``Future[Int]`` which, when completed, will contain the total length of our ``tweets`` stream. -In case of the stream failing, this future would complete with a Failure. - -A :class:`RunnableGraph` may be reused -and materialized multiple times, because it is just the "blueprint" of the stream. This means that if we materialize a stream, -for example one that consumes a live stream of tweets within a minute, the materialized values for those two materializations -will be different, as illustrated by this example: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#tweets-runnable-flow-materialized-twice - -Many elements in Akka Streams provide materialized values which can be used for obtaining either results of computation or -steering these elements which will be discussed in detail in :ref:`stream-materialization-scala`. Summing up this section, now we know -what happens behind the scenes when we run this one-liner, which is equivalent to the multi line version above: - -.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#tweets-fold-count-oneline - -.. note:: - ``runWith()`` is a convenience method that automatically ignores the materialized value of any other stages except - those appended by the ``runWith()`` itself. In the above example it translates to using ``Keep.right`` as the combiner - for materialized values. +.. _stream-quickstart-scala: + +Quick Start Guide +================= + +A stream usually begins at a source, so this is also how we start an Akka +Stream. Before we create one, we import the full complement of streaming tools: + +.. includecode:: ../code/docs/stream/QuickStartDocSpec.scala#stream-imports + +If you want to execute the code samples while you read through the quick start guide, you will also need the following imports: + +.. includecode:: ../code/docs/stream/QuickStartDocSpec.scala#other-imports + +Now we will start with a rather simple source, emitting the integers 1 to 100: + +.. includecode:: ../code/docs/stream/QuickStartDocSpec.scala#create-source + +The :class:`Source` type is parameterized with two types: the first one is the +type of element that this source emits and the second one may signal that +running the source produces some auxiliary value (e.g. a network source may +provide information about the bound port or the peer’s address). Where no +auxiliary information is produced, the type ``akka.NotUsed`` is used—and a +simple range of integers surely falls into this category. + +Having created this source means that we have a description of how to emit the +first 100 natural numbers, but this source is not yet active. In order to get +those numbers out we have to run it: + +.. includecode:: ../code/docs/stream/QuickStartDocSpec.scala#run-source + +This line will complement the source with a consumer function—in this example +we simply print out the numbers to the console—and pass this little stream +setup to an Actor that runs it. This activation is signaled by having “run” be +part of the method name; there are other methods that run Akka Streams, and +they all follow this pattern. + +You may wonder where the Actor gets created that runs the stream, and you are +probably also asking yourself what this ``materializer`` means. In order to get +this value we first need to create an Actor system: + +.. includecode:: ../code/docs/stream/QuickStartDocSpec.scala#create-materializer + +There are other ways to create a materializer, e.g. from an +:class:`ActorContext` when using streams from within Actors. The +:class:`Materializer` is a factory for stream execution engines, it is the +thing that makes streams run—you don’t need to worry about any of the details +just now apart from that you need one for calling any of the ``run`` methods on +a :class:`Source`. The materializer is picked up implicitly if it is omitted +from the ``run`` method call arguments, which we will do in the following. + +The nice thing about Akka Streams is that the :class:`Source` is just a +description of what you want to run, and like an architect’s blueprint it can +be reused, incorporated into a larger design. We may choose to transform the +source of integers and write it to a file instead: + +.. includecode:: ../code/docs/stream/QuickStartDocSpec.scala#transform-source + +First we use the ``scan`` combinator to run a computation over the whole +stream: starting with the number 1 (``BigInt(1)``) we multiple by each of +the incoming numbers, one after the other; the scan operation emits the initial +value and then every calculation result. This yields the series of factorial +numbers which we stash away as a :class:`Source` for later reuse—it is +important to keep in mind that nothing is actually computed yet, this is just a +description of what we want to have computed once we run the stream. Then we +convert the resulting series of numbers into a stream of :class:`ByteString` +objects describing lines in a text file. This stream is then run by attaching a +file as the receiver of the data. In the terminology of Akka Streams this is +called a :class:`Sink`. :class:`IOResult` is a type that IO operations return in +Akka Streams in order to tell you how many bytes or elements were consumed and +whether the stream terminated normally or exceptionally. + +Reusable Pieces +--------------- + +One of the nice parts of Akka Streams—and something that other stream libraries +do not offer—is that not only sources can be reused like blueprints, all other +elements can be as well. We can take the file-writing :class:`Sink`, prepend +the processing steps necessary to get the :class:`ByteString` elements from +incoming strings and package that up as a reusable piece as well. Since the +language for writing these streams always flows from left to right (just like +plain English), we need a starting point that is like a source but with an +“open” input. In Akka Streams this is called a :class:`Flow`: + +.. includecode:: ../code/docs/stream/QuickStartDocSpec.scala#transform-sink + +Starting from a flow of strings we convert each to :class:`ByteString` and then +feed to the already known file-writing :class:`Sink`. The resulting blueprint +is a :class:`Sink[String, Future[IOResult]]`, which means that it +accepts strings as its input and when materialized it will create auxiliary +information of type ``Future[IOResult]`` (when chaining operations on +a :class:`Source` or :class:`Flow` the type of the auxiliary information—called +the “materialized value”—is given by the leftmost starting point; since we want +to retain what the ``FileIO.toFile`` sink has to offer, we need to say +``Keep.right``). + +We can use the new and shiny :class:`Sink` we just created by +attaching it to our ``factorials`` source—after a small adaptation to turn the +numbers into strings: + +.. includecode:: ../code/docs/stream/QuickStartDocSpec.scala#use-transformed-sink + +Time-Based Processing +--------------------- + +Before we start looking at a more involved example we explore the streaming +nature of what Akka Streams can do. Starting from the ``factorials`` source +we transform the stream by zipping it together with another stream, +represented by a :class:`Source` that emits the number 0 to 100: the first +number emitted by the ``factorials`` source is the factorial of zero, the +second is the factorial of one, and so on. We combine these two by forming +strings like ``"3! = 6"``. + +.. includecode:: ../code/docs/stream/QuickStartDocSpec.scala#add-streams + +All operations so far have been time-independent and could have been performed +in the same fashion on strict collections of elements. The next line +demonstrates that we are in fact dealing with streams that can flow at a +certain speed: we use the ``throttle`` combinator to slow down the stream to 1 +element per second (the second ``1`` in the argument list is the maximum size +of a burst that we want to allow—passing ``1`` means that the first element +gets through immediately and the second then has to wait for one second and so +on). + +If you run this program you will see one line printed per second. One aspect +that is not immediately visible deserves mention, though: if you try and set +the streams to produce a billion numbers each then you will notice that your +JVM does not crash with an OutOfMemoryError, even though you will also notice +that running the streams happens in the background, asynchronously (this is the +reason for the auxiliary information to be provided as a :class:`Future`). The +secret that makes this work is that Akka Streams implicitly implement pervasive +flow control, all combinators respect back-pressure. This allows the throttle +combinator to signal to all its upstream sources of data that it can only +accept elements at a certain rate—when the incoming rate is higher than one per +second the throttle combinator will assert *back-pressure* upstream. + +This is basically all there is to Akka Streams in a nutshell—glossing over the +fact that there are dozens of sources and sinks and many more stream +transformation combinators to choose from, see also :ref:`stages-overview_scala`. + +Reactive Tweets +=============== + +A typical use case for stream processing is consuming a live stream of data that we want to extract or aggregate some +other data from. In this example we'll consider consuming a stream of tweets and extracting information concerning Akka from them. + +We will also consider the problem inherent to all non-blocking streaming +solutions: *"What if the subscriber is too slow to consume the live stream of +data?"*. Traditionally the solution is often to buffer the elements, but this +can—and usually will—cause eventual buffer overflows and instability of such +systems. Instead Akka Streams depend on internal backpressure signals that +allow to control what should happen in such scenarios. + +Here's the data model we'll be working with throughout the quickstart examples: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#model + +.. note:: + If you would like to get an overview of the used vocabulary first instead of diving head-first + into an actual example you can have a look at the :ref:`core-concepts-scala` and :ref:`defining-and-running-streams-scala` + sections of the docs, and then come back to this quickstart to see it all pieced together into a simple example application. + +Transforming and consuming simple streams +----------------------------------------- +The example application we will be looking at is a simple Twitter feed stream from which we'll want to extract certain information, +like for example finding all twitter handles of users who tweet about ``#akka``. + +In order to prepare our environment by creating an :class:`ActorSystem` and :class:`ActorMaterializer`, +which will be responsible for materializing and running the streams we are about to create: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#materializer-setup + +The :class:`ActorMaterializer` can optionally take :class:`ActorMaterializerSettings` which can be used to define +materialization properties, such as default buffer sizes (see also :ref:`async-stream-buffers-scala`), the dispatcher to +be used by the pipeline etc. These can be overridden with ``withAttributes`` on :class:`Flow`, :class:`Source`, :class:`Sink` and :class:`Graph`. + +Let's assume we have a stream of tweets readily available. In Akka this is expressed as a :class:`Source[Out, M]`: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#tweet-source + +Streams always start flowing from a :class:`Source[Out,M1]` then can continue through :class:`Flow[In,Out,M2]` elements or +more advanced graph elements to finally be consumed by a :class:`Sink[In,M3]` (ignore the type parameters ``M1``, ``M2`` +and ``M3`` for now, they are not relevant to the types of the elements produced/consumed by these classes – they are +"materialized types", which we'll talk about :ref:`below `). + +The operations should look familiar to anyone who has used the Scala Collections library, +however they operate on streams and not collections of data (which is a very important distinction, as some operations +only make sense in streaming and vice versa): + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#authors-filter-map + +Finally in order to :ref:`materialize ` and run the stream computation we need to attach +the Flow to a :class:`Sink` that will get the Flow running. The simplest way to do this is to call +``runWith(sink)`` on a ``Source``. For convenience a number of common Sinks are predefined and collected as methods on +the :class:`Sink` `companion object `_. +For now let's simply print each author: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#authors-foreachsink-println + +or by using the shorthand version (which are defined only for the most popular Sinks such as ``Sink.fold`` and ``Sink.foreach``): + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#authors-foreach-println + +Materializing and running a stream always requires a :class:`Materializer` to be in implicit scope (or passed in explicitly, +like this: ``.run(materializer)``). + +The complete snippet looks like this: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#first-sample + +Flattening sequences in streams +------------------------------- +In the previous section we were working on 1:1 relationships of elements which is the most common case, but sometimes +we might want to map from one element to a number of elements and receive a "flattened" stream, similarly like ``flatMap`` +works on Scala Collections. In order to get a flattened stream of hashtags from our stream of tweets we can use the ``mapConcat`` +combinator: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#hashtags-mapConcat + +.. note:: + The name ``flatMap`` was consciously avoided due to its proximity with for-comprehensions and monadic composition. + It is problematic for two reasons: first, flattening by concatenation is often undesirable in bounded stream processing + due to the risk of deadlock (with merge being the preferred strategy), and second, the monad laws would not hold for + our implementation of flatMap (due to the liveness issues). + + Please note that the ``mapConcat`` requires the supplied function to return a strict collection (``f:Out=>immutable.Seq[T]``), + whereas ``flatMap`` would have to operate on streams all the way through. + +Broadcasting a stream +--------------------- +Now let's say we want to persist all hashtags, as well as all author names from this one live stream. +For example we'd like to write all author handles into one file, and all hashtags into another file on disk. +This means we have to split the source stream into two streams which will handle the writing to these different files. + +Elements that can be used to form such "fan-out" (or "fan-in") structures are referred to as "junctions" in Akka Streams. +One of these that we'll be using in this example is called :class:`Broadcast`, and it simply emits elements from its +input port to all of its output ports. + +Akka Streams intentionally separate the linear stream structures (Flows) from the non-linear, branching ones (Graphs) +in order to offer the most convenient API for both of these cases. Graphs can express arbitrarily complex stream setups +at the expense of not reading as familiarly as collection transformations. + +Graphs are constructed using :class:`GraphDSL` like this: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#graph-dsl-broadcast + +As you can see, inside the :class:`GraphDSL` we use an implicit graph builder ``b`` to mutably construct the graph +using the ``~>`` "edge operator" (also read as "connect" or "via" or "to"). The operator is provided implicitly +by importing ``GraphDSL.Implicits._``. + +``GraphDSL.create`` returns a :class:`Graph`, in this example a :class:`Graph[ClosedShape, Unit]` where +:class:`ClosedShape` means that it is *a fully connected graph* or "closed" - there are no unconnected inputs or outputs. +Since it is closed it is possible to transform the graph into a :class:`RunnableGraph` using ``RunnableGraph.fromGraph``. +The runnable graph can then be ``run()`` to materialize a stream out of it. + +Both :class:`Graph` and :class:`RunnableGraph` are *immutable, thread-safe, and freely shareable*. + +A graph can also have one of several other shapes, with one or more unconnected ports. Having unconnected ports +expresses a graph that is a *partial graph*. Concepts around composing and nesting graphs in large structures are +explained in detail in :ref:`composition-scala`. It is also possible to wrap complex computation graphs +as Flows, Sinks or Sources, which will be explained in detail in +:ref:`constructing-sources-sinks-flows-from-partial-graphs-scala`. + +Back-pressure in action +----------------------- +One of the main advantages of Akka Streams is that they *always* propagate back-pressure information from stream Sinks +(Subscribers) to their Sources (Publishers). It is not an optional feature, and is enabled at all times. To learn more +about the back-pressure protocol used by Akka Streams and all other Reactive Streams compatible implementations read +:ref:`back-pressure-explained-scala`. + +A typical problem applications (not using Akka Streams) like this often face is that they are unable to process the incoming data fast enough, +either temporarily or by design, and will start buffering incoming data until there's no more space to buffer, resulting +in either ``OutOfMemoryError`` s or other severe degradations of service responsiveness. With Akka Streams buffering can +and must be handled explicitly. For example, if we are only interested in the "*most recent tweets, with a buffer of 10 +elements*" this can be expressed using the ``buffer`` element: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#tweets-slow-consumption-dropHead + +The ``buffer`` element takes an explicit and required ``OverflowStrategy``, which defines how the buffer should react +when it receives another element while it is full. Strategies provided include dropping the oldest element (``dropHead``), +dropping the entire buffer, signalling errors etc. Be sure to pick and choose the strategy that fits your use case best. + +.. _materialized-values-quick-scala: + +Materialized values +------------------- +So far we've been only processing data using Flows and consuming it into some kind of external Sink - be it by printing +values or storing them in some external system. However sometimes we may be interested in some value that can be +obtained from the materialized processing pipeline. For example, we want to know how many tweets we have processed. +While this question is not as obvious to give an answer to in case of an infinite stream of tweets (one way to answer +this question in a streaming setting would be to create a stream of counts described as "*up until now*, we've processed N tweets"), +but in general it is possible to deal with finite streams and come up with a nice result such as a total count of elements. + +First, let's write such an element counter using ``Sink.fold`` and see how the types look like: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#tweets-fold-count + +First we prepare a reusable ``Flow`` that will change each incoming tweet into an integer of value ``1``. We'll use this in +order to combine those with a ``Sink.fold`` that will sum all ``Int`` elements of the stream and make its result available as +a ``Future[Int]``. Next we connect the ``tweets`` stream to ``count`` with ``via``. Finally we connect the Flow to the previously +prepared Sink using ``toMat``. + +Remember those mysterious ``Mat`` type parameters on ``Source[+Out, +Mat]``, ``Flow[-In, +Out, +Mat]`` and ``Sink[-In, +Mat]``? +They represent the type of values these processing parts return when materialized. When you chain these together, +you can explicitly combine their materialized values. In our example we used the ``Keep.right`` predefined function, +which tells the implementation to only care about the materialized type of the stage currently appended to the right. +The materialized type of ``sumSink`` is ``Future[Int]`` and because of using ``Keep.right``, the resulting :class:`RunnableGraph` +has also a type parameter of ``Future[Int]``. + +This step does *not* yet materialize the +processing pipeline, it merely prepares the description of the Flow, which is now connected to a Sink, and therefore can +be ``run()``, as indicated by its type: ``RunnableGraph[Future[Int]]``. Next we call ``run()`` which uses the implicit :class:`ActorMaterializer` +to materialize and run the Flow. The value returned by calling ``run()`` on a ``RunnableGraph[T]`` is of type ``T``. +In our case this type is ``Future[Int]`` which, when completed, will contain the total length of our ``tweets`` stream. +In case of the stream failing, this future would complete with a Failure. + +A :class:`RunnableGraph` may be reused +and materialized multiple times, because it is just the "blueprint" of the stream. This means that if we materialize a stream, +for example one that consumes a live stream of tweets within a minute, the materialized values for those two materializations +will be different, as illustrated by this example: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#tweets-runnable-flow-materialized-twice + +Many elements in Akka Streams provide materialized values which can be used for obtaining either results of computation or +steering these elements which will be discussed in detail in :ref:`stream-materialization-scala`. Summing up this section, now we know +what happens behind the scenes when we run this one-liner, which is equivalent to the multi line version above: + +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#tweets-fold-count-oneline + +.. note:: + ``runWith()`` is a convenience method that automatically ignores the materialized value of any other stages except + those appended by the ``runWith()`` itself. In the above example it translates to using ``Keep.right`` as the combiner + for materialized values. From cc22ed45607500ec0703a1ec214b76c34a9feef6 Mon Sep 17 00:00:00 2001 From: Hawstein Date: Sun, 19 Jun 2016 08:06:19 +0800 Subject: [PATCH 30/85] +doc example snippet for akka http java dsl: SecurityDirectives (#20717) --- .../SecurityDirectivesExamplesTest.java | 364 ++++++++++++++++++ .../security-directives/authenticateBasic.rst | 3 +- .../authenticateBasicAsync.rst | 3 +- .../authenticateBasicPF.rst | 3 +- .../authenticateBasicPFAsync.rst | 3 +- .../authenticateOrRejectWithChallenge.rst | 3 +- .../security-directives/authorize.rst | 3 +- .../security-directives/authorizeAsync.rst | 3 +- .../extractCredentials.rst | 3 +- .../akka/http/javadsl/model/HttpMessage.java | 6 + .../http/scaladsl/model/HttpMessage.scala | 2 + .../directives/SecurityDirectives.scala | 52 ++- project/MiMa.scala | 5 + 13 files changed, 442 insertions(+), 11 deletions(-) create mode 100644 akka-docs/rst/java/code/docs/http/javadsl/server/directives/SecurityDirectivesExamplesTest.java diff --git a/akka-docs/rst/java/code/docs/http/javadsl/server/directives/SecurityDirectivesExamplesTest.java b/akka-docs/rst/java/code/docs/http/javadsl/server/directives/SecurityDirectivesExamplesTest.java new file mode 100644 index 0000000000..68d7386bbe --- /dev/null +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/directives/SecurityDirectivesExamplesTest.java @@ -0,0 +1,364 @@ +/* + * Copyright (C) 2016-2016 Lightbend Inc. + */ +package docs.http.javadsl.server.directives; + +import akka.http.javadsl.model.HttpRequest; +import akka.http.javadsl.model.StatusCodes; +import akka.http.javadsl.model.headers.BasicHttpCredentials; +import akka.http.javadsl.model.headers.HttpChallenge; +import akka.http.javadsl.model.headers.HttpCredentials; +import akka.http.javadsl.server.Route; +import akka.http.javadsl.testkit.JUnitRouteTest; +import akka.japi.JavaPartialFunction; +import org.junit.Test; +import scala.PartialFunction; +import scala.util.Either; +import scala.util.Left; +import scala.util.Right; + +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.function.Function; +import java.util.Optional; + +public class SecurityDirectivesExamplesTest extends JUnitRouteTest { + + @Test + public void testAuthenticateBasic() { + //#authenticateBasic + final Function, Optional> myUserPassAuthenticator = + credentials -> + credentials.filter(c -> c.verify("p4ssw0rd")).map(ProvidedCredentials::identifier); + + final Route route = path("secured", () -> + authenticateBasic("secure site", myUserPassAuthenticator, userName -> + complete("The user is '" + userName + "'") + ) + ).seal(system(), materializer()); + + // tests: + testRoute(route).run(HttpRequest.GET("/secured")) + .assertStatusCode(StatusCodes.UNAUTHORIZED) + .assertEntity("The resource requires authentication, which was not supplied with the request") + .assertHeaderExists("WWW-Authenticate", "Basic realm=\"secure site\""); + + final HttpCredentials validCredentials = + BasicHttpCredentials.createBasicHttpCredentials("John", "p4ssw0rd"); + testRoute(route).run(HttpRequest.GET("/secured").addCredentials(validCredentials)) + .assertEntity("The user is 'John'"); + + final HttpCredentials invalidCredentials = + BasicHttpCredentials.createBasicHttpCredentials("Peter", "pan"); + testRoute(route).run(HttpRequest.GET("/secured").addCredentials(invalidCredentials)) + .assertStatusCode(StatusCodes.UNAUTHORIZED) + .assertEntity("The supplied authentication is invalid") + .assertHeaderExists("WWW-Authenticate", "Basic realm=\"secure site\""); + //#authenticateBasic + } + + + @Test + public void testAuthenticateBasicPF() { + //#authenticateBasicPF + final PartialFunction, String> myUserPassAuthenticator = + new JavaPartialFunction, String>() { + @Override + public String apply(Optional opt, boolean isCheck) throws Exception { + if (opt.filter(c -> (c != null) && c.verify("p4ssw0rd")).isPresent()) { + if (isCheck) return null; + else return opt.get().identifier(); + } else if (opt.filter(c -> (c != null) && c.verify("p4ssw0rd-special")).isPresent()) { + if (isCheck) return null; + else return opt.get().identifier() + "-admin"; + } else { + throw noMatch(); + } + } + }; + + final Route route = path("secured", () -> + authenticateBasicPF("secure site", myUserPassAuthenticator, userName -> + complete("The user is '" + userName + "'") + ) + ).seal(system(), materializer()); + + // tests: + testRoute(route).run(HttpRequest.GET("/secured")) + .assertStatusCode(StatusCodes.UNAUTHORIZED) + .assertEntity("The resource requires authentication, which was not supplied with the request") + .assertHeaderExists("WWW-Authenticate", "Basic realm=\"secure site\""); + + final HttpCredentials validCredentials = + BasicHttpCredentials.createBasicHttpCredentials("John", "p4ssw0rd"); + testRoute(route).run(HttpRequest.GET("/secured").addCredentials(validCredentials)) + .assertEntity("The user is 'John'"); + + final HttpCredentials validAdminCredentials = + BasicHttpCredentials.createBasicHttpCredentials("John", "p4ssw0rd-special"); + testRoute(route).run(HttpRequest.GET("/secured").addCredentials(validAdminCredentials)) + .assertEntity("The user is 'John-admin'"); + + final HttpCredentials invalidCredentials = + BasicHttpCredentials.createBasicHttpCredentials("Peter", "pan"); + testRoute(route).run(HttpRequest.GET("/secured").addCredentials(invalidCredentials)) + .assertStatusCode(StatusCodes.UNAUTHORIZED) + .assertEntity("The supplied authentication is invalid") + .assertHeaderExists("WWW-Authenticate", "Basic realm=\"secure site\""); + //#authenticateBasicPF + } + + @Test + public void testAuthenticateBasicPFAsync() { + //#authenticateBasicPFAsync + class User { + private final String id; + public User(String id) { + this.id = id; + } + public String getId() { + return id; + } + } + + final PartialFunction, CompletionStage> myUserPassAuthenticator = + new JavaPartialFunction,CompletionStage>() { + @Override + public CompletionStage apply(Optional opt, boolean isCheck) throws Exception { + if (opt.filter(c -> (c != null) && c.verify("p4ssw0rd")).isPresent()) { + if (isCheck) return CompletableFuture.completedFuture(null); + else return CompletableFuture.completedFuture(new User(opt.get().identifier())); + } else { + throw noMatch(); + } + } + }; + + final Route route = path("secured", () -> + authenticateBasicPFAsync("secure site", myUserPassAuthenticator, user -> + complete("The user is '" + user.getId() + "'")) + ).seal(system(), materializer()); + + // tests: + testRoute(route).run(HttpRequest.GET("/secured")) + .assertStatusCode(StatusCodes.UNAUTHORIZED) + .assertEntity("The resource requires authentication, which was not supplied with the request") + .assertHeaderExists("WWW-Authenticate", "Basic realm=\"secure site\""); + + final HttpCredentials validCredentials = + BasicHttpCredentials.createBasicHttpCredentials("John", "p4ssw0rd"); + testRoute(route).run(HttpRequest.GET("/secured").addCredentials(validCredentials)) + .assertEntity("The user is 'John'"); + + final HttpCredentials invalidCredentials = + BasicHttpCredentials.createBasicHttpCredentials("Peter", "pan"); + testRoute(route).run(HttpRequest.GET("/secured").addCredentials(invalidCredentials)) + .assertStatusCode(StatusCodes.UNAUTHORIZED) + .assertEntity("The supplied authentication is invalid") + .assertHeaderExists("WWW-Authenticate", "Basic realm=\"secure site\""); + //#authenticateBasicPFAsync + } + + @Test + public void testAuthenticateBasicAsync() { + //#authenticateBasicAsync + final Function, CompletionStage>> myUserPassAuthenticator = opt -> { + if (opt.filter(c -> (c != null) && c.verify("p4ssw0rd")).isPresent()) { + return CompletableFuture.completedFuture(Optional.of(opt.get().identifier())); + } else { + return CompletableFuture.completedFuture(Optional.empty()); + } + }; + + final Route route = path("secured", () -> + authenticateBasicAsync("secure site", myUserPassAuthenticator, userName -> + complete("The user is '" + userName + "'") + ) + ).seal(system(), materializer()); + + // tests: + testRoute(route).run(HttpRequest.GET("/secured")) + .assertStatusCode(StatusCodes.UNAUTHORIZED) + .assertEntity("The resource requires authentication, which was not supplied with the request") + .assertHeaderExists("WWW-Authenticate", "Basic realm=\"secure site\""); + + final HttpCredentials validCredentials = + BasicHttpCredentials.createBasicHttpCredentials("John", "p4ssw0rd"); + testRoute(route).run(HttpRequest.GET("/secured").addCredentials(validCredentials)) + .assertEntity("The user is 'John'"); + + final HttpCredentials invalidCredentials = + BasicHttpCredentials.createBasicHttpCredentials("Peter", "pan"); + testRoute(route).run(HttpRequest.GET("/secured").addCredentials(invalidCredentials)) + .assertStatusCode(StatusCodes.UNAUTHORIZED) + .assertEntity("The supplied authentication is invalid") + .assertHeaderExists("WWW-Authenticate", "Basic realm=\"secure site\""); + //#authenticateBasicAsync + } + + @Test + public void testAuthenticateOrRejectWithChallenge() { + //#authenticateOrRejectWithChallenge + final HttpChallenge challenge = HttpChallenge.create("MyAuth", "MyRealm"); + + // your custom authentication logic: + final Function auth = credentials -> true; + + final Function, CompletionStage>> myUserPassAuthenticator = + opt -> { + if (opt.isPresent() && auth.apply(opt.get())) { + return CompletableFuture.completedFuture(Right.apply("some-user-name-from-creds")); + } else { + return CompletableFuture.completedFuture(Left.apply(challenge)); + } + }; + + final Route route = path("secured", () -> + authenticateOrRejectWithChallenge(myUserPassAuthenticator, userName -> + complete("Authenticated!") + ) + ).seal(system(), materializer()); + + // tests: + testRoute(route).run(HttpRequest.GET("/secured")) + .assertStatusCode(StatusCodes.UNAUTHORIZED) + .assertEntity("The resource requires authentication, which was not supplied with the request") + .assertHeaderExists("WWW-Authenticate", "MyAuth realm=\"MyRealm\""); + + final HttpCredentials validCredentials = + BasicHttpCredentials.createBasicHttpCredentials("John", "p4ssw0rd"); + testRoute(route).run(HttpRequest.GET("/secured").addCredentials(validCredentials)) + .assertStatusCode(StatusCodes.OK) + .assertEntity("Authenticated!"); + //#authenticateOrRejectWithChallenge + } + + @Test + public void testAuthorize() { + //#authorize + class User { + private final String name; + public User(String name) { + this.name = name; + } + public String getName() { + return name; + } + } + + // authenticate the user: + final Function, Optional> myUserPassAuthenticator = + opt -> { + if (opt.isPresent()) { + return Optional.of(new User(opt.get().identifier())); + } else { + return Optional.empty(); + } + }; + + // check if user is authorized to perform admin actions: + final Set admins = new HashSet<>(); + admins.add("Peter"); + final Function hasAdminPermissions = user -> admins.contains(user.getName()); + + final Route route = authenticateBasic("secure site", myUserPassAuthenticator, user -> + path("peters-lair", () -> + authorize(() -> hasAdminPermissions.apply(user), () -> + complete("'" + user.getName() +"' visited Peter's lair") + ) + ) + ).seal(system(), materializer()); + + // tests: + final HttpCredentials johnsCred = + BasicHttpCredentials.createBasicHttpCredentials("John", "p4ssw0rd"); + testRoute(route).run(HttpRequest.GET("/peters-lair").addCredentials(johnsCred)) + .assertStatusCode(StatusCodes.FORBIDDEN) + .assertEntity("The supplied authentication is not authorized to access this resource"); + + final HttpCredentials petersCred = + BasicHttpCredentials.createBasicHttpCredentials("Peter", "pan"); + testRoute(route).run(HttpRequest.GET("/peters-lair").addCredentials(petersCred)) + .assertEntity("'Peter' visited Peter's lair"); + //#authorize + } + + @Test + public void testAuthorizeAsync() { + //#authorizeAsync + class User { + private final String name; + public User(String name) { + this.name = name; + } + public String getName() { + return name; + } + } + + // authenticate the user: + final Function, Optional> myUserPassAuthenticator = + opt -> { + if (opt.isPresent()) { + return Optional.of(new User(opt.get().identifier())); + } else { + return Optional.empty(); + } + }; + + // check if user is authorized to perform admin actions, + // this could potentially be a long operation so it would return a Future + final Set admins = new HashSet<>(); + admins.add("Peter"); + final Set synchronizedAdmins = Collections.synchronizedSet(admins); + + final Function> hasAdminPermissions = + user -> CompletableFuture.completedFuture(synchronizedAdmins.contains(user.getName())); + + final Route route = authenticateBasic("secure site", myUserPassAuthenticator, user -> + path("peters-lair", () -> + authorizeAsync(() -> hasAdminPermissions.apply(user), () -> + complete("'" + user.getName() +"' visited Peter's lair") + ) + ) + ).seal(system(), materializer()); + + // tests: + final HttpCredentials johnsCred = + BasicHttpCredentials.createBasicHttpCredentials("John", "p4ssw0rd"); + testRoute(route).run(HttpRequest.GET("/peters-lair").addCredentials(johnsCred)) + .assertStatusCode(StatusCodes.FORBIDDEN) + .assertEntity("The supplied authentication is not authorized to access this resource"); + + final HttpCredentials petersCred = + BasicHttpCredentials.createBasicHttpCredentials("Peter", "pan"); + testRoute(route).run(HttpRequest.GET("/peters-lair").addCredentials(petersCred)) + .assertEntity("'Peter' visited Peter's lair"); + //#authorizeAsync + } + + @Test + public void testExtractCredentials() { + //#extractCredentials + final Route route = extractCredentials(optCreds -> { + if (optCreds.isPresent()) { + return complete("Credentials: " + optCreds.get()); + } else { + return complete("No credentials"); + } + }); + + // tests: + final HttpCredentials johnsCred = + BasicHttpCredentials.createBasicHttpCredentials("John", "p4ssw0rd"); + testRoute(route).run(HttpRequest.GET("/").addCredentials(johnsCred)) + .assertEntity("Credentials: Basic Sm9objpwNHNzdzByZA=="); + + testRoute(route).run(HttpRequest.GET("/")) + .assertEntity("No credentials"); + //#extractCredentials + } +} diff --git a/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authenticateBasic.rst b/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authenticateBasic.rst index fb3999f259..16fd9479c8 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authenticateBasic.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authenticateBasic.rst @@ -27,4 +27,5 @@ See :ref:`credentials-and-timing-attacks-java` for details about verifying the s Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/SecurityDirectivesExamplesTest.java#authenticateBasic diff --git a/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authenticateBasicAsync.rst b/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authenticateBasicAsync.rst index 4cd3f54777..2267737a5a 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authenticateBasicAsync.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authenticateBasicAsync.rst @@ -25,4 +25,5 @@ See :ref:`credentials-and-timing-attacks-java` for details about verifying the s Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/SecurityDirectivesExamplesTest.java#authenticateBasicAsync diff --git a/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authenticateBasicPF.rst b/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authenticateBasicPF.rst index f5731af93f..9617e2a3c1 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authenticateBasicPF.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authenticateBasicPF.rst @@ -25,4 +25,5 @@ See :ref:`credentials-and-timing-attacks-java` for details about verifying the s Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/SecurityDirectivesExamplesTest.java#authenticateBasicPF diff --git a/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authenticateBasicPFAsync.rst b/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authenticateBasicPFAsync.rst index ff0e95174e..e0c5e5118d 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authenticateBasicPFAsync.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authenticateBasicPFAsync.rst @@ -22,4 +22,5 @@ See :ref:`credentials-and-timing-attacks-java` for details about verifying the s Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/SecurityDirectivesExamplesTest.java#authenticateBasicPFAsync diff --git a/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authenticateOrRejectWithChallenge.rst b/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authenticateOrRejectWithChallenge.rst index 76509bdb2d..4b96af6747 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authenticateOrRejectWithChallenge.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authenticateOrRejectWithChallenge.rst @@ -16,4 +16,5 @@ More details about challenge-response authentication are available in the `RFC 2 Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/SecurityDirectivesExamplesTest.java#authenticateOrRejectWithChallenge diff --git a/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authorize.rst b/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authorize.rst index caa435d414..6a9306ba8a 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authorize.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authorize.rst @@ -24,4 +24,5 @@ See also :ref:`-authorize-java-` for the asynchronous version of this directive. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/SecurityDirectivesExamplesTest.java#authorize diff --git a/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authorizeAsync.rst b/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authorizeAsync.rst index c1920a79d8..32fa84a65a 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authorizeAsync.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/security-directives/authorizeAsync.rst @@ -25,4 +25,5 @@ See also :ref:`-authorize-java-` for the synchronous version of this directive. Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/SecurityDirectivesExamplesTest.java#authorizeAsync diff --git a/akka-docs/rst/java/http/routing-dsl/directives/security-directives/extractCredentials.rst b/akka-docs/rst/java/http/routing-dsl/directives/security-directives/extractCredentials.rst index d8c61a5d64..d24acf4484 100644 --- a/akka-docs/rst/java/http/routing-dsl/directives/security-directives/extractCredentials.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/security-directives/extractCredentials.rst @@ -13,4 +13,5 @@ See :ref:`credentials-and-timing-attacks-java` for details about verifying the s Example ------- -TODO: Example snippets for JavaDSL are subject to community contributions! Help us complete the docs, read more about it here: `write example snippets for Akka HTTP Java DSL #20466 `_. + +.. includecode:: ../../../../code/docs/http/javadsl/server/directives/SecurityDirectivesExamplesTest.java#extractCredentials diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMessage.java b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMessage.java index aac8d8d3d8..5f1f1ae812 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMessage.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMessage.java @@ -6,6 +6,7 @@ package akka.http.javadsl.model; import akka.Done; import akka.stream.Materializer; +import akka.http.javadsl.model.headers.HttpCredentials; import akka.util.ByteString; import scala.concurrent.Future; @@ -113,6 +114,11 @@ public interface HttpMessage { */ Self addHeaders(Iterable headers); + /** + * Returns a copy of this message with the given http credential header added to the list of headers. + */ + Self addCredentials(HttpCredentials credentials); + /** * Returns a copy of this message with all headers of the given name (case-insensitively) removed. */ diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMessage.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMessage.scala index d8d87357db..9cf985f448 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMessage.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMessage.scala @@ -109,6 +109,8 @@ sealed trait HttpMessage extends jm.HttpMessage { def addHeader(header: jm.HttpHeader): Self = mapHeaders(_ :+ header.asInstanceOf[HttpHeader]) + def addCredentials(credentials: jm.headers.HttpCredentials): Self = addHeader(jm.headers.Authorization.create(credentials)) + /** Removes the header with the given name (case-insensitive) */ def removeHeader(headerName: String): Self = { val lowerHeaderName = headerName.toRootLowerCase diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/directives/SecurityDirectives.scala b/akka-http/src/main/scala/akka/http/javadsl/server/directives/SecurityDirectives.scala index f25d08aa42..2670c60a55 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/directives/SecurityDirectives.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/directives/SecurityDirectives.scala @@ -12,9 +12,11 @@ import scala.compat.java8.FutureConverters._ import scala.compat.java8.OptionConverters._ import akka.http.javadsl.model.headers.HttpChallenge import akka.http.javadsl.model.headers.HttpCredentials -import akka.http.javadsl.server.{ RequestContext, Route } +import akka.http.javadsl.server.{ Route, RequestContext } import akka.http.scaladsl -import akka.http.scaladsl.server.{ AuthorizationFailedRejection, Directives ⇒ D } +import akka.http.scaladsl.server.{ Directives ⇒ D } + +import scala.concurrent.{ ExecutionContextExecutor, Future } object SecurityDirectives { /** @@ -68,6 +70,50 @@ abstract class SecurityDirectives extends SchemeDirectives { } } + /** + * Wraps the inner route with Http Basic authentication support. + * The given authenticator determines whether the credentials in the request are valid + * and, if so, which user object to supply to the inner route. + * + * Authentication is required in this variant, i.e. the request is rejected if [authenticator] returns Optional.empty. + */ + def authenticateBasicPF[T](realm: String, authenticator: PartialFunction[Optional[ProvidedCredentials], T], + inner: JFunction[T, Route]): Route = RouteAdapter { + def pf: PartialFunction[scaladsl.server.directives.Credentials, Option[T]] = { + case c ⇒ Option(authenticator.applyOrElse(toJava(c), (_: Any) ⇒ null.asInstanceOf[T])) + } + + D.authenticateBasic(realm, pf) { t ⇒ + inner.apply(t).delegate + } + } + + /** + * Wraps the inner route with Http Basic authentication support. + * The given authenticator determines whether the credentials in the request are valid + * and, if so, which user object to supply to the inner route. + * + * Authentication is required in this variant, i.e. the request is rejected if [authenticator] returns Optional.empty. + */ + def authenticateBasicPFAsync[T](realm: String, authenticator: PartialFunction[Optional[ProvidedCredentials], CompletionStage[T]], + inner: JFunction[T, Route]): Route = RouteAdapter { + def pf(implicit ec: ExecutionContextExecutor): PartialFunction[scaladsl.server.directives.Credentials, Future[Option[T]]] = { + case credentials ⇒ + val jCredentials = toJava(credentials) + if (authenticator isDefinedAt jCredentials) { + authenticator(jCredentials).toScala.map(Some(_)) + } else { + Future.successful(None) + } + } + + D.extractExecutionContext { implicit ec ⇒ + D.authenticateBasicAsync(realm, pf) { t ⇒ + inner.apply(t).delegate + } + } + } + /** * Wraps the inner route with Http Basic authentication support using a given `Authenticator[T]`. * The given authenticator determines whether the credentials in the request are valid @@ -261,4 +307,4 @@ abstract class SecurityDirectives extends SchemeDirectives { */ def challengeFor(realm: String): HttpChallenge = HttpChallenge.create("Basic", realm) -} \ No newline at end of file +} diff --git a/project/MiMa.scala b/project/MiMa.scala index 0366eba2d9..1b2029ccee 100644 --- a/project/MiMa.scala +++ b/project/MiMa.scala @@ -886,6 +886,11 @@ object MiMa extends AutoPlugin { // #20288 migrate BodyPartRenderer to GraphStage ProblemFilters.exclude[IncompatibleResultTypeProblem]("akka.http.impl.engine.rendering.BodyPartRenderer.streamed") + ), + "2.4.8" -> Seq( + // #20717 example snippet for akka http java dsl: SecurityDirectives + ProblemFilters.exclude[ReversedMissingMethodProblem]("akka.http.javadsl.model.HttpMessage#MessageTransformations.addCredentials"), + ProblemFilters.exclude[ReversedMissingMethodProblem]("akka.http.scaladsl.model.HttpMessage.addCredentials") ) ) } From 4d9268214d3efa29913bf2ee5f54f6a427f764f0 Mon Sep 17 00:00:00 2001 From: Felix Satyaputra Date: Sun, 19 Jun 2016 17:55:10 +0200 Subject: [PATCH 31/85] +rem Support serialization of scala Some and None (#20801) --- .../java/akka/remote/ContainerFormats.java | 543 +++++++++++++++++- .../src/main/protobuf/ContainerFormats.proto | 4 + akka-remote/src/main/resources/reference.conf | 5 +- .../serialization/MiscMessageSerializer.scala | 25 +- .../MiscMessageSerializerSpec.scala | 12 +- 5 files changed, 578 insertions(+), 11 deletions(-) diff --git a/akka-remote/src/main/java/akka/remote/ContainerFormats.java b/akka-remote/src/main/java/akka/remote/ContainerFormats.java index cdd93b7d85..1e63dd0f4b 100644 --- a/akka-remote/src/main/java/akka/remote/ContainerFormats.java +++ b/akka-remote/src/main/java/akka/remote/ContainerFormats.java @@ -3464,6 +3464,525 @@ public final class ContainerFormats { // @@protoc_insertion_point(class_scope:ActorRef) } + public interface OptionOrBuilder + extends akka.protobuf.MessageOrBuilder { + + // optional .Payload value = 1; + /** + * optional .Payload value = 1; + */ + boolean hasValue(); + /** + * optional .Payload value = 1; + */ + akka.remote.ContainerFormats.Payload getValue(); + /** + * optional .Payload value = 1; + */ + akka.remote.ContainerFormats.PayloadOrBuilder getValueOrBuilder(); + } + /** + * Protobuf type {@code Option} + */ + public static final class Option extends + akka.protobuf.GeneratedMessage + implements OptionOrBuilder { + // Use Option.newBuilder() to construct. + private Option(akka.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private Option(boolean noInit) { this.unknownFields = akka.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final Option defaultInstance; + public static Option getDefaultInstance() { + return defaultInstance; + } + + public Option getDefaultInstanceForType() { + return defaultInstance; + } + + private final akka.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final akka.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Option( + akka.protobuf.CodedInputStream input, + akka.protobuf.ExtensionRegistryLite extensionRegistry) + throws akka.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + akka.protobuf.UnknownFieldSet.Builder unknownFields = + akka.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + akka.remote.ContainerFormats.Payload.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = value_.toBuilder(); + } + value_ = input.readMessage(akka.remote.ContainerFormats.Payload.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(value_); + value_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + } + } + } catch (akka.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new akka.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final akka.protobuf.Descriptors.Descriptor + getDescriptor() { + return akka.remote.ContainerFormats.internal_static_Option_descriptor; + } + + protected akka.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return akka.remote.ContainerFormats.internal_static_Option_fieldAccessorTable + .ensureFieldAccessorsInitialized( + akka.remote.ContainerFormats.Option.class, akka.remote.ContainerFormats.Option.Builder.class); + } + + public static akka.protobuf.Parser