diff --git a/.gitignore b/.gitignore index 750f097ba6..2716cbae25 100755 --- a/.gitignore +++ b/.gitignore @@ -41,6 +41,7 @@ _dump _mb activemq-data akka-contrib/rst_preprocessed/ +akka-docs-dev/rst_preprocessed/ akka-docs/_build/ akka-docs/exts/ akka-docs/rst_preprocessed/ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 595ff6cfc3..6bdf078ee8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -18,9 +18,9 @@ These guidelines mainly apply to Typesafe’s “mature” projects - not necess Depending on which version (or sometimes module) you want to work on, you should target a specific branch as explained below: -* `master` – development branch of Akka 2.4.x +* `master` – active development branch of Akka 2.4.x * `release-2.3` – maintanance branch of Akka 2.3.x -* `release 2.3-dev` – development branch of Akka Streams and HTTP (only) +* similarily `release-2.#` branches contain legacy versions of Akka ## General Workflow @@ -52,6 +52,31 @@ This is the process for committing code into master. There are of course excepti 9. Once everything is said and done, associate the ticket with the “earliest” release milestone (i.e. if back-ported so that it will be in release x.y.z, find the relevant milestone for that release) and close it. +## The `validatePullRequest` task + +The Akka build includes a special task called `validatePullRequest` which investigates the changes made as well as dirty +(uncommitted changes) in your local working directory and figures out which projects are impacted by those changes, +then running tests only on those projects. + +For example changing something in `akka-http-core` would cause tests to be run in all projects which depend on it +(e.g. `akka-http-core-tests`, `akka-http-marshallers-*`, `akka-docs` etc.). + +To use the task simply type, and the output should include entries like shown below: + +``` +> validatePullRequest +[info] Diffing [HEAD] to determine changed modules in PR... +[info] Detected uncomitted changes in directories (including in dependency analysis): [akka-protobuf,project] +[info] Detected changes in directories: [akka-docs, project, akka-http-tests, akka-protobuf, akka-http-testkit, akka-http, akka-http-core, akka-stream] +``` + +By default changes are diffed with the `master` branch when working locally, if you want to validate against a different +target PR branch you can do so by setting the PR_TARGET_BRANCH environment variable for SBT: + +``` +PR_TARGET_BRANCH=origin/example sbt validatePullRequest +``` + ## Pull Request Requirements For a Pull Request to be considered at all it has to meet these requirements: diff --git a/akka-actor-tests/build.sbt b/akka-actor-tests/build.sbt index 9fe53bec61..0653f125b5 100644 --- a/akka-actor-tests/build.sbt +++ b/akka-actor-tests/build.sbt @@ -1,11 +1,6 @@ import akka.{ AkkaBuild, Dependencies, Formatting } AkkaBuild.defaultSettings - -Formatting.formatSettings - -publishArtifact in Compile := false - -Dependencies.actorTests - AkkaBuild.dontPublishSettings +Formatting.formatSettings +Dependencies.actorTests diff --git a/akka-actor-tests/src/test/java/akka/actor/ActorCreationTest.java b/akka-actor-tests/src/test/java/akka/actor/ActorCreationTest.java index 7ddb3d5fa2..692f615986 100644 --- a/akka-actor-tests/src/test/java/akka/actor/ActorCreationTest.java +++ b/akka-actor-tests/src/test/java/akka/actor/ActorCreationTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor; diff --git a/akka-actor-tests/src/test/java/akka/actor/InboxJavaAPITest.java b/akka-actor-tests/src/test/java/akka/actor/InboxJavaAPITest.java index a204bd40e8..45c0d4e3f0 100644 --- a/akka-actor-tests/src/test/java/akka/actor/InboxJavaAPITest.java +++ b/akka-actor-tests/src/test/java/akka/actor/InboxJavaAPITest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor; diff --git a/akka-actor-tests/src/test/java/akka/actor/JavaAPI.java b/akka-actor-tests/src/test/java/akka/actor/JavaAPI.java index acfe70f356..9635df2c34 100644 --- a/akka-actor-tests/src/test/java/akka/actor/JavaAPI.java +++ b/akka-actor-tests/src/test/java/akka/actor/JavaAPI.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor; diff --git a/akka-actor-tests/src/test/java/akka/actor/JavaExtension.java b/akka-actor-tests/src/test/java/akka/actor/JavaExtension.java index 33da9613ef..34b12a1c9e 100644 --- a/akka-actor-tests/src/test/java/akka/actor/JavaExtension.java +++ b/akka-actor-tests/src/test/java/akka/actor/JavaExtension.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor; diff --git a/akka-actor-tests/src/test/java/akka/actor/NonPublicClass.java b/akka-actor-tests/src/test/java/akka/actor/NonPublicClass.java index 26351f827d..b9d3bcb1b3 100644 --- a/akka-actor-tests/src/test/java/akka/actor/NonPublicClass.java +++ b/akka-actor-tests/src/test/java/akka/actor/NonPublicClass.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor; diff --git a/akka-actor-tests/src/test/java/akka/japi/MatchBuilderTest.java b/akka-actor-tests/src/test/java/akka/japi/MatchBuilderTest.java index 13803b6858..d2ffbd9471 100644 --- a/akka-actor-tests/src/test/java/akka/japi/MatchBuilderTest.java +++ b/akka-actor-tests/src/test/java/akka/japi/MatchBuilderTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.japi; diff --git a/akka-actor-tests/src/test/java/akka/japi/pf/PFBuilderTest.java b/akka-actor-tests/src/test/java/akka/japi/pf/PFBuilderTest.java index 37981d7fdc..c8d36444e5 100644 --- a/akka-actor-tests/src/test/java/akka/japi/pf/PFBuilderTest.java +++ b/akka-actor-tests/src/test/java/akka/japi/pf/PFBuilderTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.japi.pf; diff --git a/akka-actor-tests/src/test/java/akka/pattern/PatternsTest.java b/akka-actor-tests/src/test/java/akka/pattern/PatternsTest.java index 9800b17eae..05d2fb15a1 100644 --- a/akka-actor-tests/src/test/java/akka/pattern/PatternsTest.java +++ b/akka-actor-tests/src/test/java/akka/pattern/PatternsTest.java @@ -15,7 +15,7 @@ import static akka.pattern.Patterns.pipe; import static org.junit.Assert.assertEquals; /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ public class PatternsTest { diff --git a/akka-actor-tests/src/test/java/akka/util/ByteStringTest.java b/akka-actor-tests/src/test/java/akka/util/ByteStringTest.java index 649c61651b..5584ab88ce 100644 --- a/akka-actor-tests/src/test/java/akka/util/ByteStringTest.java +++ b/akka-actor-tests/src/test/java/akka/util/ByteStringTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util; diff --git a/akka-actor-tests/src/test/java/akka/util/JavaDuration.java b/akka-actor-tests/src/test/java/akka/util/JavaDuration.java index 2dc4eac8ef..ba38b068af 100644 --- a/akka-actor-tests/src/test/java/akka/util/JavaDuration.java +++ b/akka-actor-tests/src/test/java/akka/util/JavaDuration.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util; diff --git a/akka-actor-tests/src/test/java/akka/util/LineNumberSpecCodeForJava.java b/akka-actor-tests/src/test/java/akka/util/LineNumberSpecCodeForJava.java index 9a3a96ca5c..df67ad602e 100644 --- a/akka-actor-tests/src/test/java/akka/util/LineNumberSpecCodeForJava.java +++ b/akka-actor-tests/src/test/java/akka/util/LineNumberSpecCodeForJava.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.util; diff --git a/akka-actor-tests/src/test/scala/akka/Messages.scala b/akka-actor-tests/src/test/scala/akka/Messages.scala index d635ec134c..5c61cad20d 100644 --- a/akka-actor-tests/src/test/scala/akka/Messages.scala +++ b/akka-actor-tests/src/test/scala/akka/Messages.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorConfigurationVerificationSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorConfigurationVerificationSpec.scala index c3f7d879e0..10db87e4de 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorConfigurationVerificationSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorConfigurationVerificationSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorCreationPerfSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorCreationPerfSpec.scala index 15f78b9752..475d7e08f2 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorCreationPerfSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorCreationPerfSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorDSLSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorDSLSpec.scala index 4a17967f9e..09518327b1 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorDSLSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorDSLSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorLifeCycleSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorLifeCycleSpec.scala index 5ddc19a791..37a529f2a3 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorLifeCycleSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorLifeCycleSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorLookupSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorLookupSpec.scala index cb843808b1..e5a4a3ef8e 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorLookupSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorLookupSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorMailboxSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorMailboxSpec.scala index 424041bb66..9a8e65618f 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorMailboxSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorMailboxSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorPathSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorPathSpec.scala index ae2fc3e66b..51e17bf122 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorPathSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorPathSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala index 174b877e97..082da50100 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorSelectionSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorSelectionSpec.scala index 3c6e6c2353..2ebe1900da 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorSelectionSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorSelectionSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala index 6b1afb17af..07c971320a 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorTimeoutSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorTimeoutSpec.scala index 2a12e670a1..00f32697dd 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorTimeoutSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorTimeoutSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorWithBoundedStashSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorWithBoundedStashSpec.scala index 9c4502a0e2..513c234eea 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorWithBoundedStashSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorWithBoundedStashSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorWithStashSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorWithStashSpec.scala index 576ec70dca..642d7fdbff 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorWithStashSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorWithStashSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/DeadLetterSupressionSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/DeadLetterSupressionSpec.scala index 5f4583bef8..ded1e38fd4 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/DeadLetterSupressionSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/DeadLetterSupressionSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/DeathWatchSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/DeathWatchSpec.scala index c2b890f4c2..dd77b625bf 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/DeathWatchSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/DeathWatchSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala index f9b5700696..2a7e417fe8 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala index 3de2186dba..9512562328 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/FSMTimingSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/FSMTimingSpec.scala index 78b58a449c..6f7082a8a0 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/FSMTimingSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/FSMTimingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/FSMTransitionSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/FSMTransitionSpec.scala index baa60e49e0..a5d9de6140 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/FSMTransitionSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/FSMTransitionSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/ForwardActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ForwardActorSpec.scala index 2950e5230d..5ed2cf0f5e 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ForwardActorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ForwardActorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/FunctionRefSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/FunctionRefSpec.scala new file mode 100644 index 0000000000..f3c608d8d3 --- /dev/null +++ b/akka-actor-tests/src/test/scala/akka/actor/FunctionRefSpec.scala @@ -0,0 +1,100 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ +package akka.actor + +import akka.testkit.AkkaSpec +import akka.testkit.ImplicitSender +import scala.concurrent.duration._ +import akka.testkit.EventFilter + +object FunctionRefSpec { + + case class GetForwarder(replyTo: ActorRef) + case class DropForwarder(ref: FunctionRef) + case class Forwarded(msg: Any, sender: ActorRef) + + class Super extends Actor { + def receive = { + case GetForwarder(replyTo) ⇒ + val cell = context.asInstanceOf[ActorCell] + val ref = cell.addFunctionRef((sender, msg) ⇒ replyTo ! Forwarded(msg, sender)) + replyTo ! ref + case DropForwarder(ref) ⇒ + val cell = context.asInstanceOf[ActorCell] + cell.removeFunctionRef(ref) + } + } + + class SupSuper extends Actor { + val s = context.actorOf(Props[Super], "super") + def receive = { + case msg ⇒ s ! msg + } + } + +} + +@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) +class FunctionRefSpec extends AkkaSpec with ImplicitSender { + import FunctionRefSpec._ + + def commonTests(s: ActorRef) = { + s ! GetForwarder(testActor) + val forwarder = expectMsgType[FunctionRef] + + "forward messages" in { + forwarder ! "hello" + expectMsg(Forwarded("hello", testActor)) + } + + "be watchable" in { + s ! GetForwarder(testActor) + val f = expectMsgType[FunctionRef] + watch(f) + s ! DropForwarder(f) + expectTerminated(f) + } + + "be able to watch" in { + s ! GetForwarder(testActor) + val f = expectMsgType[FunctionRef] + forwarder.watch(f) + s ! DropForwarder(f) + expectMsg(Forwarded(Terminated(f)(true, false), null)) + } + + "terminate when their parent terminates" in { + watch(forwarder) + s ! PoisonPill + expectTerminated(forwarder) + } + } + + "A FunctionRef" when { + + "created by a toplevel actor" must { + val s = system.actorOf(Props[Super], "super") + commonTests(s) + } + + "created by a non-toplevel actor" must { + val s = system.actorOf(Props[SupSuper], "supsuper") + commonTests(s) + } + + "not registered" must { + "not be found" in { + val provider = system.asInstanceOf[ExtendedActorSystem].provider + val ref = new FunctionRef(testActor.path / "blabla", provider, system.eventStream, (x, y) ⇒ ()) + EventFilter[ClassCastException](occurrences = 1) intercept { + // needs to be something that fails when the deserialized form is not a FunctionRef + // this relies upon serialize-messages during tests + testActor ! DropForwarder(ref) + expectNoMsg(1.second) + } + } + } + + } +} diff --git a/akka-actor-tests/src/test/scala/akka/actor/HotSwapSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/HotSwapSpec.scala index 9001d09dc9..0c8bca7cd8 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/HotSwapSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/HotSwapSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/JavaAPISpec.scala b/akka-actor-tests/src/test/scala/akka/actor/JavaAPISpec.scala index d223dd76f7..8b58e9112d 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/JavaAPISpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/JavaAPISpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/LocalActorRefProviderSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/LocalActorRefProviderSpec.scala index f06bf0707a..4b986fcaa2 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/LocalActorRefProviderSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/LocalActorRefProviderSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/PropsCreationSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/PropsCreationSpec.scala index de85c11b58..18658da5c3 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/PropsCreationSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/PropsCreationSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/ReceiveTimeoutSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ReceiveTimeoutSpec.scala index 65bff78409..2051eda858 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ReceiveTimeoutSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ReceiveTimeoutSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/RelativeActorPathSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/RelativeActorPathSpec.scala index ff25e3661c..19590f46c8 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/RelativeActorPathSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/RelativeActorPathSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/RestartStrategySpec.scala b/akka-actor-tests/src/test/scala/akka/actor/RestartStrategySpec.scala index 67c5b68d8e..ab4fe6b151 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/RestartStrategySpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/RestartStrategySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala index 12db769b61..b3ac78dfca 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/Supervisor.scala b/akka-actor-tests/src/test/scala/akka/actor/Supervisor.scala index cea6358503..e6216aee6e 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/Supervisor.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/Supervisor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/SupervisorHierarchySpec.scala b/akka-actor-tests/src/test/scala/akka/actor/SupervisorHierarchySpec.scala index b4284979b3..75c328e137 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/SupervisorHierarchySpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/SupervisorHierarchySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/SupervisorMiscSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/SupervisorMiscSpec.scala index 7b8e406532..739ba6824d 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/SupervisorMiscSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/SupervisorMiscSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/SupervisorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/SupervisorSpec.scala index 8effe8645c..269cc340b0 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/SupervisorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/SupervisorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/SupervisorTreeSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/SupervisorTreeSpec.scala index c676b6b4d6..3c5f3bb57a 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/SupervisorTreeSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/SupervisorTreeSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/Ticket669Spec.scala b/akka-actor-tests/src/test/scala/akka/actor/Ticket669Spec.scala index b3597ae352..3ff3f240e0 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/Ticket669Spec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/Ticket669Spec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala index 82ffd8be07..7571894bdf 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/UidClashTest.scala b/akka-actor-tests/src/test/scala/akka/actor/UidClashTest.scala index 86f372bb0e..15b5ac16a7 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/UidClashTest.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/UidClashTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/dispatch/ActorModelSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/dispatch/ActorModelSpec.scala index 11724a46ee..5a3da6afb3 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/dispatch/ActorModelSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/dispatch/ActorModelSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor.dispatch diff --git a/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatchersSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatchersSpec.scala index 8e5d482863..151ce01eb9 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatchersSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatchersSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor.dispatch diff --git a/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala b/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala index b4a4c4d248..b477ae703a 100644 --- a/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.config diff --git a/akka-actor-tests/src/test/scala/akka/dataflow/Future2Actor.scala b/akka-actor-tests/src/test/scala/akka/dataflow/Future2Actor.scala index daa72e8b35..87bec4be25 100644 --- a/akka-actor-tests/src/test/scala/akka/dataflow/Future2Actor.scala +++ b/akka-actor-tests/src/test/scala/akka/dataflow/Future2Actor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.dataflow diff --git a/akka-actor-tests/src/test/scala/akka/dispatch/MailboxConfigSpec.scala b/akka-actor-tests/src/test/scala/akka/dispatch/MailboxConfigSpec.scala index c4feb599bd..28970d06ec 100644 --- a/akka-actor-tests/src/test/scala/akka/dispatch/MailboxConfigSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/dispatch/MailboxConfigSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.dispatch diff --git a/akka-actor-tests/src/test/scala/akka/dispatch/StablePriorityDispatcherSpec.scala b/akka-actor-tests/src/test/scala/akka/dispatch/StablePriorityDispatcherSpec.scala index 309b136ae9..39a6ae0483 100644 --- a/akka-actor-tests/src/test/scala/akka/dispatch/StablePriorityDispatcherSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/dispatch/StablePriorityDispatcherSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.dispatch diff --git a/akka-actor-tests/src/test/scala/akka/dispatch/sysmsg/SystemMessageListSpec.scala b/akka-actor-tests/src/test/scala/akka/dispatch/sysmsg/SystemMessageListSpec.scala index 90f0a7af6c..235ca7587a 100644 --- a/akka-actor-tests/src/test/scala/akka/dispatch/sysmsg/SystemMessageListSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/dispatch/sysmsg/SystemMessageListSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.dispatch.sysmsg diff --git a/akka-actor-tests/src/test/scala/akka/event/AddressTerminatedTopicBenchSpec.scala b/akka-actor-tests/src/test/scala/akka/event/AddressTerminatedTopicBenchSpec.scala index ebc10262f1..c8b5b5a021 100644 --- a/akka-actor-tests/src/test/scala/akka/event/AddressTerminatedTopicBenchSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/event/AddressTerminatedTopicBenchSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.event diff --git a/akka-actor-tests/src/test/scala/akka/event/EventBusSpec.scala b/akka-actor-tests/src/test/scala/akka/event/EventBusSpec.scala index c3aca4f652..17244a51fd 100644 --- a/akka-actor-tests/src/test/scala/akka/event/EventBusSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/event/EventBusSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.event diff --git a/akka-actor-tests/src/test/scala/akka/event/EventStreamSpec.scala b/akka-actor-tests/src/test/scala/akka/event/EventStreamSpec.scala index 0e491b9e92..ed3a32438a 100644 --- a/akka-actor-tests/src/test/scala/akka/event/EventStreamSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/event/EventStreamSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.event diff --git a/akka-actor-tests/src/test/scala/akka/event/LoggerSpec.scala b/akka-actor-tests/src/test/scala/akka/event/LoggerSpec.scala index 4feef3e01f..70a68e30a6 100644 --- a/akka-actor-tests/src/test/scala/akka/event/LoggerSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/event/LoggerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.event @@ -170,7 +170,7 @@ class LoggerSpec extends WordSpec with Matchers { out.size should be > (0) } - "drain logger queue on system shutdown" in { + "drain logger queue on system.terminate" in { val out = new java.io.ByteArrayOutputStream() Console.withOut(out) { val sys = ActorSystem("defaultLogger", slowConfig) diff --git a/akka-actor-tests/src/test/scala/akka/event/LoggingReceiveSpec.scala b/akka-actor-tests/src/test/scala/akka/event/LoggingReceiveSpec.scala index fcfedea2a3..cf6d9d4875 100644 --- a/akka-actor-tests/src/test/scala/akka/event/LoggingReceiveSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/event/LoggingReceiveSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.event @@ -66,7 +66,8 @@ class LoggingReceiveSpec extends WordSpec with BeforeAndAfterAll { }) })) a ! "hallo" - expectMsg(1 second, Logging.Debug("funky", classOf[DummyClassForStringSources], "received unhandled message hallo")) + expectMsg(1 second, Logging.Debug("funky", classOf[DummyClassForStringSources], + "received unhandled message hallo from " + system.deadLetters)) expectMsgType[UnhandledMessage](1 second) } } @@ -91,7 +92,8 @@ class LoggingReceiveSpec extends WordSpec with BeforeAndAfterAll { val name = actor.path.toString actor ! "buh" within(1 second) { - expectMsg(Logging.Debug(actor.path.toString, actor.underlyingActor.getClass, "received handled message buh")) + expectMsg(Logging.Debug(actor.path.toString, actor.underlyingActor.getClass, + "received handled message buh from " + self)) expectMsg("x") } @@ -116,7 +118,8 @@ class LoggingReceiveSpec extends WordSpec with BeforeAndAfterAll { }) actor ! "buh" within(1 second) { - expectMsg(Logging.Debug(actor.path.toString, actor.underlyingActor.getClass, "received handled message buh")) + expectMsg(Logging.Debug(actor.path.toString, actor.underlyingActor.getClass, + "received handled message buh from " + self)) expectMsg("x") } } diff --git a/akka-actor-tests/src/test/scala/akka/io/CapacityLimitSpec.scala b/akka-actor-tests/src/test/scala/akka/io/CapacityLimitSpec.scala index 1609b928b6..12648e477c 100644 --- a/akka-actor-tests/src/test/scala/akka/io/CapacityLimitSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/io/CapacityLimitSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor-tests/src/test/scala/akka/io/TcpConnectionSpec.scala b/akka-actor-tests/src/test/scala/akka/io/TcpConnectionSpec.scala index dabf890aeb..84fdd46225 100644 --- a/akka-actor-tests/src/test/scala/akka/io/TcpConnectionSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/io/TcpConnectionSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io @@ -402,7 +402,7 @@ class TcpConnectionSpec extends AkkaSpec(""" connectionHandler.expectMsgType[Received].data.decodeString("ASCII") should ===(vs) } - finally system.shutdown() + finally system.terminate() } "close the connection and reply with `Closed` upon reception of a `Close` command" in diff --git a/akka-actor-tests/src/test/scala/akka/io/TcpIntegrationSpec.scala b/akka-actor-tests/src/test/scala/akka/io/TcpIntegrationSpec.scala index 00a11dd749..e598c6f23e 100644 --- a/akka-actor-tests/src/test/scala/akka/io/TcpIntegrationSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/io/TcpIntegrationSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor-tests/src/test/scala/akka/io/TcpIntegrationSpecSupport.scala b/akka-actor-tests/src/test/scala/akka/io/TcpIntegrationSpecSupport.scala index 684c8fc3cb..fa43aee2ce 100644 --- a/akka-actor-tests/src/test/scala/akka/io/TcpIntegrationSpecSupport.scala +++ b/akka-actor-tests/src/test/scala/akka/io/TcpIntegrationSpecSupport.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor-tests/src/test/scala/akka/io/TcpListenerSpec.scala b/akka-actor-tests/src/test/scala/akka/io/TcpListenerSpec.scala index 71bbc6af57..8e5e18a305 100644 --- a/akka-actor-tests/src/test/scala/akka/io/TcpListenerSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/io/TcpListenerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor-tests/src/test/scala/akka/io/UdpConnectedIntegrationSpec.scala b/akka-actor-tests/src/test/scala/akka/io/UdpConnectedIntegrationSpec.scala index ef00d60992..8cb5ee38e2 100644 --- a/akka-actor-tests/src/test/scala/akka/io/UdpConnectedIntegrationSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/io/UdpConnectedIntegrationSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor-tests/src/test/scala/akka/io/UdpIntegrationSpec.scala b/akka-actor-tests/src/test/scala/akka/io/UdpIntegrationSpec.scala index 2c63aec7f8..12b5146579 100644 --- a/akka-actor-tests/src/test/scala/akka/io/UdpIntegrationSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/io/UdpIntegrationSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor-tests/src/test/scala/akka/pattern/AskSpec.scala b/akka-actor-tests/src/test/scala/akka/pattern/AskSpec.scala index 7d60805dc8..97aeae88ab 100644 --- a/akka-actor-tests/src/test/scala/akka/pattern/AskSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/pattern/AskSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.pattern diff --git a/akka-actor-tests/src/test/scala/akka/pattern/BackoffOnRestartSupervisorSpec.scala b/akka-actor-tests/src/test/scala/akka/pattern/BackoffOnRestartSupervisorSpec.scala index 389986b6e0..86c4c9ed3b 100644 --- a/akka-actor-tests/src/test/scala/akka/pattern/BackoffOnRestartSupervisorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/pattern/BackoffOnRestartSupervisorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.pattern diff --git a/akka-actor-tests/src/test/scala/akka/pattern/BackoffSupervisorSpec.scala b/akka-actor-tests/src/test/scala/akka/pattern/BackoffSupervisorSpec.scala index fe8f2604c9..98746301a9 100644 --- a/akka-actor-tests/src/test/scala/akka/pattern/BackoffSupervisorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/pattern/BackoffSupervisorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.pattern @@ -133,8 +133,11 @@ class BackoffSupervisorSpec extends AkkaSpec with ImplicitSender { watch(c1) c1 ! "boom" expectTerminated(c1) - supervisor ! BackoffSupervisor.GetRestartCount - expectMsg(BackoffSupervisor.RestartCount(1)) + + awaitAssert { + supervisor ! BackoffSupervisor.GetRestartCount + expectMsg(BackoffSupervisor.RestartCount(1)) + } awaitAssert { supervisor ! BackoffSupervisor.GetCurrentChild diff --git a/akka-actor-tests/src/test/scala/akka/pattern/CircuitBreakerMTSpec.scala b/akka-actor-tests/src/test/scala/akka/pattern/CircuitBreakerMTSpec.scala index 0af8e15a83..9fb5ca4a33 100644 --- a/akka-actor-tests/src/test/scala/akka/pattern/CircuitBreakerMTSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/pattern/CircuitBreakerMTSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.pattern diff --git a/akka-actor-tests/src/test/scala/akka/pattern/CircuitBreakerSpec.scala b/akka-actor-tests/src/test/scala/akka/pattern/CircuitBreakerSpec.scala index f2c7fe1d1d..eef05169a2 100644 --- a/akka-actor-tests/src/test/scala/akka/pattern/CircuitBreakerSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/pattern/CircuitBreakerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.pattern diff --git a/akka-actor-tests/src/test/scala/akka/pattern/CircuitBreakerStressSpec.scala b/akka-actor-tests/src/test/scala/akka/pattern/CircuitBreakerStressSpec.scala index e56f872c18..629d64d617 100644 --- a/akka-actor-tests/src/test/scala/akka/pattern/CircuitBreakerStressSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/pattern/CircuitBreakerStressSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.pattern diff --git a/akka-actor-tests/src/test/scala/akka/pattern/PatternSpec.scala b/akka-actor-tests/src/test/scala/akka/pattern/PatternSpec.scala index 20bb670314..429bc727c7 100644 --- a/akka-actor-tests/src/test/scala/akka/pattern/PatternSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/pattern/PatternSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.pattern diff --git a/akka-actor-tests/src/test/scala/akka/pattern/PipeToSpec.scala b/akka-actor-tests/src/test/scala/akka/pattern/PipeToSpec.scala index 1987b0dba2..193acf61be 100644 --- a/akka-actor-tests/src/test/scala/akka/pattern/PipeToSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/pattern/PipeToSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.pattern diff --git a/akka-actor-tests/src/test/scala/akka/pattern/PromiseRefSpec.scala b/akka-actor-tests/src/test/scala/akka/pattern/PromiseRefSpec.scala index 2c6638267b..9bbe489ac3 100644 --- a/akka-actor-tests/src/test/scala/akka/pattern/PromiseRefSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/pattern/PromiseRefSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.pattern diff --git a/akka-actor-tests/src/test/scala/akka/pattern/extended/ExplicitAskSpec.scala b/akka-actor-tests/src/test/scala/akka/pattern/extended/ExplicitAskSpec.scala index cbfc083c40..1a54ddffb6 100644 --- a/akka-actor-tests/src/test/scala/akka/pattern/extended/ExplicitAskSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/pattern/extended/ExplicitAskSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.pattern.extended diff --git a/akka-actor-tests/src/test/scala/akka/routing/BalancingSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/BalancingSpec.scala index 40a99c5d46..745ae28d13 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/BalancingSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/BalancingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor-tests/src/test/scala/akka/routing/BroadcastSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/BroadcastSpec.scala index 1de5a3b0dd..f444b0ded2 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/BroadcastSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/BroadcastSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor-tests/src/test/scala/akka/routing/ConfiguredLocalRoutingSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/ConfiguredLocalRoutingSpec.scala index 3c8c1296c3..37370e0611 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/ConfiguredLocalRoutingSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/ConfiguredLocalRoutingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor-tests/src/test/scala/akka/routing/ConsistentHashingRouterSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/ConsistentHashingRouterSpec.scala index f7789450ee..bc86143fef 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/ConsistentHashingRouterSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/ConsistentHashingRouterSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor-tests/src/test/scala/akka/routing/MetricsBasedResizerSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/MetricsBasedResizerSpec.scala index 731a1116e9..e990fd2033 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/MetricsBasedResizerSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/MetricsBasedResizerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor-tests/src/test/scala/akka/routing/RandomSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/RandomSpec.scala index 9cae9b7354..795941ffe2 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/RandomSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/RandomSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor-tests/src/test/scala/akka/routing/ResizerSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/ResizerSpec.scala index c46b7ddb63..0b69570661 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/ResizerSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/ResizerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor-tests/src/test/scala/akka/routing/RoundRobinSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/RoundRobinSpec.scala index 279919e3c2..d2d6aff06f 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/RoundRobinSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/RoundRobinSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor-tests/src/test/scala/akka/routing/RouteeCreationSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/RouteeCreationSpec.scala index 4b43c9eaa5..33b50e4373 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/RouteeCreationSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/RouteeCreationSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala index 393d26e4b4..e2d8b9f521 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor-tests/src/test/scala/akka/routing/ScatterGatherFirstCompletedSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/ScatterGatherFirstCompletedSpec.scala index 10bcb69d1c..506981a035 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/ScatterGatherFirstCompletedSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/ScatterGatherFirstCompletedSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor-tests/src/test/scala/akka/routing/SmallestMailboxSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/SmallestMailboxSpec.scala index 75ab090c96..ea320864d8 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/SmallestMailboxSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/SmallestMailboxSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor-tests/src/test/scala/akka/routing/TailChoppingSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/TailChoppingSpec.scala index d0a9ed84cf..eecb322118 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/TailChoppingSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/TailChoppingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala b/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala index d288e0e60d..405284fa27 100644 --- a/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.serialization diff --git a/akka-actor-tests/src/test/scala/akka/testkit/CallingThreadDispatcherModelSpec.scala b/akka-actor-tests/src/test/scala/akka/testkit/CallingThreadDispatcherModelSpec.scala index 685395a2fb..b6872b88c6 100644 --- a/akka-actor-tests/src/test/scala/akka/testkit/CallingThreadDispatcherModelSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/testkit/CallingThreadDispatcherModelSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit diff --git a/akka-actor-tests/src/test/scala/akka/util/ByteStringSpec.scala b/akka-actor-tests/src/test/scala/akka/util/ByteStringSpec.scala index 714566e0d0..f76127417c 100644 --- a/akka-actor-tests/src/test/scala/akka/util/ByteStringSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/util/ByteStringSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor-tests/src/test/scala/akka/util/DurationSpec.scala b/akka-actor-tests/src/test/scala/akka/util/DurationSpec.scala index f15baf3f51..4363f4f567 100644 --- a/akka-actor-tests/src/test/scala/akka/util/DurationSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/util/DurationSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor-tests/src/test/scala/akka/util/IndexSpec.scala b/akka-actor-tests/src/test/scala/akka/util/IndexSpec.scala index 1f11859ea8..1b64db63b3 100644 --- a/akka-actor-tests/src/test/scala/akka/util/IndexSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/util/IndexSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor-tests/src/test/scala/akka/util/JavaDurationSpec.scala b/akka-actor-tests/src/test/scala/akka/util/JavaDurationSpec.scala index 30bf1baf5d..58fb1953b2 100644 --- a/akka-actor-tests/src/test/scala/akka/util/JavaDurationSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/util/JavaDurationSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor-tests/src/test/scala/akka/util/LineNumberSpec.scala b/akka-actor-tests/src/test/scala/akka/util/LineNumberSpec.scala index 2d292ac16f..4fd5103d66 100644 --- a/akka-actor-tests/src/test/scala/akka/util/LineNumberSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/util/LineNumberSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor-tests/src/test/scala/akka/util/LineNumberSpecCodeForScala.scala b/akka-actor-tests/src/test/scala/akka/util/LineNumberSpecCodeForScala.scala index 54271b3965..c57d580d11 100644 --- a/akka-actor-tests/src/test/scala/akka/util/LineNumberSpecCodeForScala.scala +++ b/akka-actor-tests/src/test/scala/akka/util/LineNumberSpecCodeForScala.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor-tests/src/test/scala/akka/util/PrettyDurationSpec.scala b/akka-actor-tests/src/test/scala/akka/util/PrettyDurationSpec.scala index 66433e19cd..4726b57697 100644 --- a/akka-actor-tests/src/test/scala/akka/util/PrettyDurationSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/util/PrettyDurationSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor-tests/src/test/scala/akka/util/ReflectSpec.scala b/akka-actor-tests/src/test/scala/akka/util/ReflectSpec.scala index 59adb76b41..dd12885870 100644 --- a/akka-actor-tests/src/test/scala/akka/util/ReflectSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/util/ReflectSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor-tests/src/test/scala/akka/util/SwitchSpec.scala b/akka-actor-tests/src/test/scala/akka/util/SwitchSpec.scala index 90cfb5b0f7..882eedcee7 100644 --- a/akka-actor-tests/src/test/scala/akka/util/SwitchSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/util/SwitchSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor-tests/src/test/scala/akka/util/TypedMultiMapSpec.scala b/akka-actor-tests/src/test/scala/akka/util/TypedMultiMapSpec.scala index 3739d00080..d35bd4ef78 100644 --- a/akka-actor-tests/src/test/scala/akka/util/TypedMultiMapSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/util/TypedMultiMapSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/java/akka/actor/AbstractActorRef.java b/akka-actor/src/main/java/akka/actor/AbstractActorRef.java index 35fd9da7d7..efc2985f36 100644 --- a/akka-actor/src/main/java/akka/actor/AbstractActorRef.java +++ b/akka-actor/src/main/java/akka/actor/AbstractActorRef.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor; diff --git a/akka-actor/src/main/java/akka/actor/AbstractScheduler.java b/akka-actor/src/main/java/akka/actor/AbstractScheduler.java index 0f47d24a85..1f45e6adee 100644 --- a/akka-actor/src/main/java/akka/actor/AbstractScheduler.java +++ b/akka-actor/src/main/java/akka/actor/AbstractScheduler.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor; diff --git a/akka-actor/src/main/java/akka/actor/dungeon/AbstractActorCell.java b/akka-actor/src/main/java/akka/actor/dungeon/AbstractActorCell.java index c852ddde9c..be42935dc9 100644 --- a/akka-actor/src/main/java/akka/actor/dungeon/AbstractActorCell.java +++ b/akka-actor/src/main/java/akka/actor/dungeon/AbstractActorCell.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor.dungeon; @@ -11,12 +11,14 @@ final class AbstractActorCell { final static long mailboxOffset; final static long childrenOffset; final static long nextNameOffset; + final static long functionRefsOffset; static { try { mailboxOffset = Unsafe.instance.objectFieldOffset(ActorCell.class.getDeclaredField("akka$actor$dungeon$Dispatch$$_mailboxDoNotCallMeDirectly")); childrenOffset = Unsafe.instance.objectFieldOffset(ActorCell.class.getDeclaredField("akka$actor$dungeon$Children$$_childrenRefsDoNotCallMeDirectly")); nextNameOffset = Unsafe.instance.objectFieldOffset(ActorCell.class.getDeclaredField("akka$actor$dungeon$Children$$_nextNameDoNotCallMeDirectly")); + functionRefsOffset = Unsafe.instance.objectFieldOffset(ActorCell.class.getDeclaredField("akka$actor$dungeon$Children$$_functionRefsDoNotCallMeDirectly")); } catch(Throwable t){ throw new ExceptionInInitializerError(t); } diff --git a/akka-actor/src/main/java/akka/dispatch/AbstractBoundedNodeQueue.java b/akka-actor/src/main/java/akka/dispatch/AbstractBoundedNodeQueue.java index 305495fc06..2d2fdc276a 100644 --- a/akka-actor/src/main/java/akka/dispatch/AbstractBoundedNodeQueue.java +++ b/akka-actor/src/main/java/akka/dispatch/AbstractBoundedNodeQueue.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.dispatch; diff --git a/akka-actor/src/main/java/akka/dispatch/AbstractMailbox.java b/akka-actor/src/main/java/akka/dispatch/AbstractMailbox.java index 79c59efac6..1e5b299e7a 100644 --- a/akka-actor/src/main/java/akka/dispatch/AbstractMailbox.java +++ b/akka-actor/src/main/java/akka/dispatch/AbstractMailbox.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.dispatch; diff --git a/akka-actor/src/main/java/akka/dispatch/AbstractMessageDispatcher.java b/akka-actor/src/main/java/akka/dispatch/AbstractMessageDispatcher.java index 5c4e552bd5..fc0214800a 100644 --- a/akka-actor/src/main/java/akka/dispatch/AbstractMessageDispatcher.java +++ b/akka-actor/src/main/java/akka/dispatch/AbstractMessageDispatcher.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.dispatch; diff --git a/akka-actor/src/main/java/akka/dispatch/AbstractNodeQueue.java b/akka-actor/src/main/java/akka/dispatch/AbstractNodeQueue.java index 0c887faf28..aaf606ea5f 100644 --- a/akka-actor/src/main/java/akka/dispatch/AbstractNodeQueue.java +++ b/akka-actor/src/main/java/akka/dispatch/AbstractNodeQueue.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.dispatch; diff --git a/akka-actor/src/main/java/akka/japi/JAPI.java b/akka-actor/src/main/java/akka/japi/JAPI.java index 4c040220f3..886a0ea3d7 100644 --- a/akka-actor/src/main/java/akka/japi/JAPI.java +++ b/akka-actor/src/main/java/akka/japi/JAPI.java @@ -4,6 +4,7 @@ import scala.collection.Seq; public class JAPI { + @SafeVarargs public static Seq seq(T... ts) { return Util.immutableSeq(ts); } diff --git a/akka-actor/src/main/java/akka/japi/pf/AbstractMatch.java b/akka-actor/src/main/java/akka/japi/pf/AbstractMatch.java index 1b3edc5afc..96302dc3c9 100644 --- a/akka-actor/src/main/java/akka/japi/pf/AbstractMatch.java +++ b/akka-actor/src/main/java/akka/japi/pf/AbstractMatch.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.japi.pf; diff --git a/akka-actor/src/main/java/akka/japi/pf/AbstractPFBuilder.java b/akka-actor/src/main/java/akka/japi/pf/AbstractPFBuilder.java index cd52a0f508..7ab566a7c3 100644 --- a/akka-actor/src/main/java/akka/japi/pf/AbstractPFBuilder.java +++ b/akka-actor/src/main/java/akka/japi/pf/AbstractPFBuilder.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.japi.pf; diff --git a/akka-actor/src/main/java/akka/japi/pf/DeciderBuilder.java b/akka-actor/src/main/java/akka/japi/pf/DeciderBuilder.java index afc03a5e60..173c020b79 100644 --- a/akka-actor/src/main/java/akka/japi/pf/DeciderBuilder.java +++ b/akka-actor/src/main/java/akka/japi/pf/DeciderBuilder.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.japi.pf; diff --git a/akka-actor/src/main/java/akka/japi/pf/FI.java b/akka-actor/src/main/java/akka/japi/pf/FI.java index e6a606d536..8e3afd89a0 100644 --- a/akka-actor/src/main/java/akka/japi/pf/FI.java +++ b/akka-actor/src/main/java/akka/japi/pf/FI.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.japi.pf; diff --git a/akka-actor/src/main/java/akka/japi/pf/FSMStateFunctionBuilder.java b/akka-actor/src/main/java/akka/japi/pf/FSMStateFunctionBuilder.java index 0661489c61..c89363e2d4 100644 --- a/akka-actor/src/main/java/akka/japi/pf/FSMStateFunctionBuilder.java +++ b/akka-actor/src/main/java/akka/japi/pf/FSMStateFunctionBuilder.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.japi.pf; diff --git a/akka-actor/src/main/java/akka/japi/pf/FSMStopBuilder.java b/akka-actor/src/main/java/akka/japi/pf/FSMStopBuilder.java index 7b00cf8258..ed7d12f45d 100644 --- a/akka-actor/src/main/java/akka/japi/pf/FSMStopBuilder.java +++ b/akka-actor/src/main/java/akka/japi/pf/FSMStopBuilder.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.japi.pf; diff --git a/akka-actor/src/main/java/akka/japi/pf/FSMTransitionHandlerBuilder.java b/akka-actor/src/main/java/akka/japi/pf/FSMTransitionHandlerBuilder.java index 376ab78dcc..eb271d13d9 100644 --- a/akka-actor/src/main/java/akka/japi/pf/FSMTransitionHandlerBuilder.java +++ b/akka-actor/src/main/java/akka/japi/pf/FSMTransitionHandlerBuilder.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.japi.pf; diff --git a/akka-actor/src/main/java/akka/japi/pf/Match.java b/akka-actor/src/main/java/akka/japi/pf/Match.java index 14446428d5..81ff5f1330 100644 --- a/akka-actor/src/main/java/akka/japi/pf/Match.java +++ b/akka-actor/src/main/java/akka/japi/pf/Match.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.japi.pf; diff --git a/akka-actor/src/main/java/akka/japi/pf/PFBuilder.java b/akka-actor/src/main/java/akka/japi/pf/PFBuilder.java index 927849b305..8cf4615bfc 100644 --- a/akka-actor/src/main/java/akka/japi/pf/PFBuilder.java +++ b/akka-actor/src/main/java/akka/japi/pf/PFBuilder.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.japi.pf; diff --git a/akka-actor/src/main/java/akka/japi/pf/ReceiveBuilder.java b/akka-actor/src/main/java/akka/japi/pf/ReceiveBuilder.java index 590bc3d35d..74a549c73c 100644 --- a/akka-actor/src/main/java/akka/japi/pf/ReceiveBuilder.java +++ b/akka-actor/src/main/java/akka/japi/pf/ReceiveBuilder.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.japi.pf; diff --git a/akka-actor/src/main/java/akka/japi/pf/UnitMatch.java b/akka-actor/src/main/java/akka/japi/pf/UnitMatch.java index bb8ec77634..82ae651c6c 100644 --- a/akka-actor/src/main/java/akka/japi/pf/UnitMatch.java +++ b/akka-actor/src/main/java/akka/japi/pf/UnitMatch.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.japi.pf; diff --git a/akka-actor/src/main/java/akka/japi/pf/UnitPFBuilder.java b/akka-actor/src/main/java/akka/japi/pf/UnitPFBuilder.java index 4fa2946fe0..12ff185e90 100644 --- a/akka-actor/src/main/java/akka/japi/pf/UnitPFBuilder.java +++ b/akka-actor/src/main/java/akka/japi/pf/UnitPFBuilder.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.japi.pf; diff --git a/akka-actor/src/main/java/akka/pattern/AbstractCircuitBreaker.java b/akka-actor/src/main/java/akka/pattern/AbstractCircuitBreaker.java index 7018552259..6af5958188 100644 --- a/akka-actor/src/main/java/akka/pattern/AbstractCircuitBreaker.java +++ b/akka-actor/src/main/java/akka/pattern/AbstractCircuitBreaker.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.pattern; diff --git a/akka-actor/src/main/java/akka/pattern/AbstractPromiseActorRef.java b/akka-actor/src/main/java/akka/pattern/AbstractPromiseActorRef.java index 9418297f1e..8266dc1e58 100644 --- a/akka-actor/src/main/java/akka/pattern/AbstractPromiseActorRef.java +++ b/akka-actor/src/main/java/akka/pattern/AbstractPromiseActorRef.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.pattern; diff --git a/akka-actor/src/main/scala/akka/AkkaException.scala b/akka-actor/src/main/scala/akka/AkkaException.scala index f2929166c1..bbaf170aaa 100644 --- a/akka-actor/src/main/scala/akka/AkkaException.scala +++ b/akka-actor/src/main/scala/akka/AkkaException.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka diff --git a/akka-actor/src/main/scala/akka/Done.scala b/akka-actor/src/main/scala/akka/Done.scala new file mode 100644 index 0000000000..f372eeb2c1 --- /dev/null +++ b/akka-actor/src/main/scala/akka/Done.scala @@ -0,0 +1,18 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ +package akka + +/** + * Typically used together with `Future` to signal completion + * but there is no actual value completed. More clearly signals intent + * than `Unit` and is available both from Scala and Java (which `Unit` is not). + */ +sealed abstract class Done + +case object Done extends Done { + /** + * Java API: the singleton instance + */ + def getInstance(): Done = this +} diff --git a/akka-actor/src/main/scala/akka/Main.scala b/akka-actor/src/main/scala/akka/Main.scala index d6f799417a..7164e9688f 100644 --- a/akka-actor/src/main/scala/akka/Main.scala +++ b/akka-actor/src/main/scala/akka/Main.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka diff --git a/akka-actor/src/main/scala/akka/NotUsed.scala b/akka-actor/src/main/scala/akka/NotUsed.scala new file mode 100644 index 0000000000..f896ba2d9a --- /dev/null +++ b/akka-actor/src/main/scala/akka/NotUsed.scala @@ -0,0 +1,19 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ +package akka + +/** + * This type is used in generic type signatures wherever the actual value is of no importance. + * It is a combination of Scala’s `Unit` and Java’s `Void`, which both have different issues when + * used from the other language. An example use-case is the materialized value of an Akka Stream for cases + * where no result shall be returned from materialization. + */ +sealed abstract class NotUsed + +case object NotUsed extends NotUsed { + /** + * Java API: the singleton instance + */ + def getInstance(): NotUsed = this +} diff --git a/akka-actor/src/main/scala/akka/actor/AbstractActor.scala b/akka-actor/src/main/scala/akka/actor/AbstractActor.scala index 9eeff702f7..a679e7e734 100644 --- a/akka-actor/src/main/scala/akka/actor/AbstractActor.scala +++ b/akka-actor/src/main/scala/akka/actor/AbstractActor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/AbstractFSM.scala b/akka-actor/src/main/scala/akka/actor/AbstractFSM.scala index 59e0c40c7e..79be00c9a8 100644 --- a/akka-actor/src/main/scala/akka/actor/AbstractFSM.scala +++ b/akka-actor/src/main/scala/akka/actor/AbstractFSM.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/AbstractProps.scala b/akka-actor/src/main/scala/akka/actor/AbstractProps.scala index 78f820bcf6..fa8aff3cb2 100644 --- a/akka-actor/src/main/scala/akka/actor/AbstractProps.scala +++ b/akka-actor/src/main/scala/akka/actor/AbstractProps.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/Actor.scala b/akka-actor/src/main/scala/akka/actor/Actor.scala index 7f4d07546c..2a4a999c22 100644 --- a/akka-actor/src/main/scala/akka/actor/Actor.scala +++ b/akka-actor/src/main/scala/akka/actor/Actor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/ActorCell.scala b/akka-actor/src/main/scala/akka/actor/ActorCell.scala index ba2c1922cb..a60cb3975c 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorCell.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorCell.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/ActorDSL.scala b/akka-actor/src/main/scala/akka/actor/ActorDSL.scala index d673edd6a9..a8adc879ae 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorDSL.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorDSL.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/ActorPath.scala b/akka-actor/src/main/scala/akka/actor/ActorPath.scala index 16a825d9b6..4a9bca0709 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorPath.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorPath.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor import scala.annotation.{ switch, tailrec } diff --git a/akka-actor/src/main/scala/akka/actor/ActorRef.scala b/akka-actor/src/main/scala/akka/actor/ActorRef.scala index ccd439cdb8..43266a3d1f 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorRef.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorRef.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor @@ -12,7 +12,9 @@ import akka.serialization.{ Serialization, JavaSerializer } import akka.event.EventStream import scala.annotation.tailrec import java.util.concurrent.ConcurrentHashMap -import akka.event.LoggingAdapter +import akka.event.{ Logging, LoggingAdapter } +import java.util.concurrent.atomic.AtomicReference +import scala.util.control.NonFatal object ActorRef { @@ -687,3 +689,139 @@ private[akka] class VirtualPathContainer( while (iter.hasNext) f(iter.next) } } + +/** + * INTERNAL API + * + * This kind of ActorRef passes all received messages to the given function for + * performing a non-blocking side-effect. The intended use is to transform the + * message before sending to the real target actor. Such references can be created + * by calling `ActorCell.addFunctionRef` and must be deregistered when no longer + * needed by calling `ActorCell.removeFunctionRef`. FunctionRefs do not count + * towards the live children of an actor, they do not receive the Terminate command + * and do not prevent the parent from terminating. FunctionRef is properly + * registered for remote lookup and ActorSelection. + * + * When using the watch() feature you must ensure that upon reception of the + * Terminated message the watched actorRef is unwatch()ed. + */ +private[akka] final class FunctionRef(override val path: ActorPath, + override val provider: ActorRefProvider, + val eventStream: EventStream, + f: (ActorRef, Any) ⇒ Unit) extends MinimalActorRef { + + override def !(message: Any)(implicit sender: ActorRef = Actor.noSender): Unit = { + f(sender, message) + } + + override def sendSystemMessage(message: SystemMessage): Unit = { + message match { + case w: Watch ⇒ addWatcher(w.watchee, w.watcher) + case u: Unwatch ⇒ remWatcher(u.watchee, u.watcher) + case DeathWatchNotification(actorRef, _, _) ⇒ + this.!(Terminated(actorRef)(existenceConfirmed = true, addressTerminated = false)) + case _ ⇒ //ignore all other messages + } + } + + private[this] var watching = ActorCell.emptyActorRefSet + private[this] val _watchedBy = new AtomicReference[Set[ActorRef]](ActorCell.emptyActorRefSet) + + override def isTerminated = _watchedBy.get() == null + + //noinspection EmptyCheck + protected def sendTerminated(): Unit = { + val watchedBy = _watchedBy.getAndSet(null) + if (watchedBy != null) { + if (watchedBy.nonEmpty) { + watchedBy foreach sendTerminated(ifLocal = false) + watchedBy foreach sendTerminated(ifLocal = true) + } + if (watching.nonEmpty) { + watching foreach unwatchWatched + watching = Set.empty + } + } + } + + private def sendTerminated(ifLocal: Boolean)(watcher: ActorRef): Unit = + if (watcher.asInstanceOf[ActorRefScope].isLocal == ifLocal) + watcher.asInstanceOf[InternalActorRef].sendSystemMessage(DeathWatchNotification(this, existenceConfirmed = true, addressTerminated = false)) + + private def unwatchWatched(watched: ActorRef): Unit = + watched.asInstanceOf[InternalActorRef].sendSystemMessage(Unwatch(watched, this)) + + override def stop(): Unit = sendTerminated() + + @tailrec private def addWatcher(watchee: ActorRef, watcher: ActorRef): Unit = + _watchedBy.get() match { + case null ⇒ + sendTerminated(ifLocal = true)(watcher) + sendTerminated(ifLocal = false)(watcher) + + case watchedBy ⇒ + val watcheeSelf = watchee == this + val watcherSelf = watcher == this + + if (watcheeSelf && !watcherSelf) { + if (!watchedBy.contains(watcher)) + if (!_watchedBy.compareAndSet(watchedBy, watchedBy + watcher)) + addWatcher(watchee, watcher) // try again + } else if (!watcheeSelf && watcherSelf) { + publish(Logging.Warning(path.toString, classOf[FunctionRef], s"externally triggered watch from $watcher to $watchee is illegal on FunctionRef")) + } else { + publish(Logging.Error(path.toString, classOf[FunctionRef], s"BUG: illegal Watch($watchee,$watcher) for $this")) + } + } + + @tailrec private def remWatcher(watchee: ActorRef, watcher: ActorRef): Unit = { + _watchedBy.get() match { + case null ⇒ // do nothing... + case watchedBy ⇒ + val watcheeSelf = watchee == this + val watcherSelf = watcher == this + + if (watcheeSelf && !watcherSelf) { + if (watchedBy.contains(watcher)) + if (!_watchedBy.compareAndSet(watchedBy, watchedBy - watcher)) + remWatcher(watchee, watcher) // try again + } else if (!watcheeSelf && watcherSelf) { + publish(Logging.Warning(path.toString, classOf[FunctionRef], s"externally triggered unwatch from $watcher to $watchee is illegal on FunctionRef")) + } else { + publish(Logging.Error(path.toString, classOf[FunctionRef], s"BUG: illegal Unwatch($watchee,$watcher) for $this")) + } + } + } + + private def publish(e: Logging.LogEvent): Unit = try eventStream.publish(e) catch { case NonFatal(_) ⇒ } + + /** + * Have this FunctionRef watch the given Actor. This method must not be + * called concurrently from different threads, it should only be called by + * its parent Actor. + * + * Upon receiving the Terminated message, unwatch() must be called from a + * safe context (i.e. normally from the parent Actor). + */ + def watch(actorRef: ActorRef): Unit = { + watching += actorRef + actorRef.asInstanceOf[InternalActorRef].sendSystemMessage(Watch(actorRef.asInstanceOf[InternalActorRef], this)) + } + + /** + * Have this FunctionRef unwatch the given Actor. This method must not be + * called concurrently from different threads, it should only be called by + * its parent Actor. + */ + def unwatch(actorRef: ActorRef): Unit = { + watching -= actorRef + actorRef.asInstanceOf[InternalActorRef].sendSystemMessage(Unwatch(actorRef.asInstanceOf[InternalActorRef], this)) + } + + /** + * Query whether this FunctionRef is currently watching the given Actor. This + * method must not be called concurrently from different threads, it should + * only be called by its parent Actor. + */ + def isWatching(actorRef: ActorRef): Boolean = watching.contains(actorRef) +} diff --git a/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala b/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala index 0bcb41b726..3f0969b4d9 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/ActorSelection.scala b/akka-actor/src/main/scala/akka/actor/ActorSelection.scala index 67051a53c1..5a6ef079e6 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorSelection.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorSelection.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/ActorSystem.scala b/akka-actor/src/main/scala/akka/actor/ActorSystem.scala index b088dd9bff..16355cf79b 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorSystem.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorSystem.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor @@ -491,7 +491,7 @@ abstract class ExtendedActorSystem extends ActorSystem { /** * Create an actor in the "/system" namespace. This actor will be shut down - * during system shutdown only after all user actors have terminated. + * during system.terminate only after all user actors have terminated. */ def systemActorOf(props: Props, name: String): ActorRef diff --git a/akka-actor/src/main/scala/akka/actor/Address.scala b/akka-actor/src/main/scala/akka/actor/Address.scala index 955f127522..480b4f2135 100644 --- a/akka-actor/src/main/scala/akka/actor/Address.scala +++ b/akka-actor/src/main/scala/akka/actor/Address.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor import java.net.URI diff --git a/akka-actor/src/main/scala/akka/actor/Deployer.scala b/akka-actor/src/main/scala/akka/actor/Deployer.scala index fde0247ecb..a96e233f30 100644 --- a/akka-actor/src/main/scala/akka/actor/Deployer.scala +++ b/akka-actor/src/main/scala/akka/actor/Deployer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/DynamicAccess.scala b/akka-actor/src/main/scala/akka/actor/DynamicAccess.scala index c5e95877c5..cfb22bed94 100644 --- a/akka-actor/src/main/scala/akka/actor/DynamicAccess.scala +++ b/akka-actor/src/main/scala/akka/actor/DynamicAccess.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/Extension.scala b/akka-actor/src/main/scala/akka/actor/Extension.scala index 2308421e99..089918adea 100644 --- a/akka-actor/src/main/scala/akka/actor/Extension.scala +++ b/akka-actor/src/main/scala/akka/actor/Extension.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/FSM.scala b/akka-actor/src/main/scala/akka/actor/FSM.scala index 6ca8358ebf..a7a768b9cd 100644 --- a/akka-actor/src/main/scala/akka/actor/FSM.scala +++ b/akka-actor/src/main/scala/akka/actor/FSM.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/FaultHandling.scala b/akka-actor/src/main/scala/akka/actor/FaultHandling.scala index 40d4f51db6..0bab8b2976 100644 --- a/akka-actor/src/main/scala/akka/actor/FaultHandling.scala +++ b/akka-actor/src/main/scala/akka/actor/FaultHandling.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/IndirectActorProducer.scala b/akka-actor/src/main/scala/akka/actor/IndirectActorProducer.scala index 95209d4613..25397feed3 100644 --- a/akka-actor/src/main/scala/akka/actor/IndirectActorProducer.scala +++ b/akka-actor/src/main/scala/akka/actor/IndirectActorProducer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/LightArrayRevolverScheduler.scala b/akka-actor/src/main/scala/akka/actor/LightArrayRevolverScheduler.scala index 6ee5a5df20..227aa98555 100644 --- a/akka-actor/src/main/scala/akka/actor/LightArrayRevolverScheduler.scala +++ b/akka-actor/src/main/scala/akka/actor/LightArrayRevolverScheduler.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/Props.scala b/akka-actor/src/main/scala/akka/actor/Props.scala index 2a96bd0066..54744b3619 100644 --- a/akka-actor/src/main/scala/akka/actor/Props.scala +++ b/akka-actor/src/main/scala/akka/actor/Props.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/ReflectiveDynamicAccess.scala b/akka-actor/src/main/scala/akka/actor/ReflectiveDynamicAccess.scala index cbffae2434..ddf08769ce 100644 --- a/akka-actor/src/main/scala/akka/actor/ReflectiveDynamicAccess.scala +++ b/akka-actor/src/main/scala/akka/actor/ReflectiveDynamicAccess.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/RepointableActorRef.scala b/akka-actor/src/main/scala/akka/actor/RepointableActorRef.scala index 7b24c015e2..95045a597d 100644 --- a/akka-actor/src/main/scala/akka/actor/RepointableActorRef.scala +++ b/akka-actor/src/main/scala/akka/actor/RepointableActorRef.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor @@ -150,7 +150,10 @@ private[akka] class RepointableActorRef( lookup.getChildByName(childName) match { case Some(crs: ChildRestartStats) if uid == ActorCell.undefinedUid || uid == crs.uid ⇒ crs.child.asInstanceOf[InternalActorRef].getChild(name) - case _ ⇒ Nobody + case _ ⇒ lookup match { + case ac: ActorCell ⇒ ac.getFunctionRefOrNobody(childName, uid) + case _ ⇒ Nobody + } } } } else this diff --git a/akka-actor/src/main/scala/akka/actor/Scheduler.scala b/akka-actor/src/main/scala/akka/actor/Scheduler.scala index 426d094b6b..1b43724a8a 100644 --- a/akka-actor/src/main/scala/akka/actor/Scheduler.scala +++ b/akka-actor/src/main/scala/akka/actor/Scheduler.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/Stash.scala b/akka-actor/src/main/scala/akka/actor/Stash.scala index 1abbb9f6fb..09fc5c379a 100644 --- a/akka-actor/src/main/scala/akka/actor/Stash.scala +++ b/akka-actor/src/main/scala/akka/actor/Stash.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/TypedActor.scala b/akka-actor/src/main/scala/akka/actor/TypedActor.scala index b19cdefcdb..3a1424b86e 100644 --- a/akka-actor/src/main/scala/akka/actor/TypedActor.scala +++ b/akka-actor/src/main/scala/akka/actor/TypedActor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/UntypedActor.scala b/akka-actor/src/main/scala/akka/actor/UntypedActor.scala index 27f2a53852..f14eceb3a5 100644 --- a/akka-actor/src/main/scala/akka/actor/UntypedActor.scala +++ b/akka-actor/src/main/scala/akka/actor/UntypedActor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/UntypedActorWithStash.scala b/akka-actor/src/main/scala/akka/actor/UntypedActorWithStash.scala index ae5450aee8..af0f4feec5 100644 --- a/akka-actor/src/main/scala/akka/actor/UntypedActorWithStash.scala +++ b/akka-actor/src/main/scala/akka/actor/UntypedActorWithStash.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/dsl/Creators.scala b/akka-actor/src/main/scala/akka/actor/dsl/Creators.scala index 2f3cbffbe6..e0ce2e03e3 100644 --- a/akka-actor/src/main/scala/akka/actor/dsl/Creators.scala +++ b/akka-actor/src/main/scala/akka/actor/dsl/Creators.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor.dsl diff --git a/akka-actor/src/main/scala/akka/actor/dsl/Inbox.scala b/akka-actor/src/main/scala/akka/actor/dsl/Inbox.scala index f497787f2f..1d05a53fdf 100644 --- a/akka-actor/src/main/scala/akka/actor/dsl/Inbox.scala +++ b/akka-actor/src/main/scala/akka/actor/dsl/Inbox.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor.dsl diff --git a/akka-actor/src/main/scala/akka/actor/dungeon/Children.scala b/akka-actor/src/main/scala/akka/actor/dungeon/Children.scala index f14313de84..36ad87a1da 100644 --- a/akka-actor/src/main/scala/akka/actor/dungeon/Children.scala +++ b/akka-actor/src/main/scala/akka/actor/dungeon/Children.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor.dungeon @@ -12,6 +12,10 @@ import akka.serialization.SerializationExtension import akka.util.{ Unsafe, Helpers } import akka.serialization.SerializerWithStringManifest +private[akka] object Children { + val GetNobody = () ⇒ Nobody +} + private[akka] trait Children { this: ActorCell ⇒ import ChildrenContainer._ @@ -41,14 +45,63 @@ private[akka] trait Children { this: ActorCell ⇒ private[akka] def attachChild(props: Props, name: String, systemService: Boolean): ActorRef = makeChild(this, props, checkName(name), async = true, systemService = systemService) - @volatile private var _nextNameDoNotCallMeDirectly = 0L - final protected def randomName(): String = { - @tailrec def inc(): Long = { - val current = Unsafe.instance.getLongVolatile(this, AbstractActorCell.nextNameOffset) - if (Unsafe.instance.compareAndSwapLong(this, AbstractActorCell.nextNameOffset, current, current + 1)) current - else inc() + @volatile private var _functionRefsDoNotCallMeDirectly = Map.empty[String, FunctionRef] + private def functionRefs: Map[String, FunctionRef] = + Unsafe.instance.getObjectVolatile(this, AbstractActorCell.functionRefsOffset).asInstanceOf[Map[String, FunctionRef]] + + private[akka] def getFunctionRefOrNobody(name: String, uid: Int = ActorCell.undefinedUid): InternalActorRef = + functionRefs.getOrElse(name, Children.GetNobody()) match { + case f: FunctionRef ⇒ + if (uid == ActorCell.undefinedUid || f.path.uid == uid) f else Nobody + case other ⇒ + other } - Helpers.base64(inc()) + + private[akka] def addFunctionRef(f: (ActorRef, Any) ⇒ Unit): FunctionRef = { + val childPath = new ChildActorPath(self.path, randomName(new java.lang.StringBuilder("$$")), ActorCell.newUid()) + val ref = new FunctionRef(childPath, provider, system.eventStream, f) + + @tailrec def rec(): Unit = { + val old = functionRefs + val added = old.updated(childPath.name, ref) + if (!Unsafe.instance.compareAndSwapObject(this, AbstractActorCell.functionRefsOffset, old, added)) rec() + } + rec() + + ref + } + + private[akka] def removeFunctionRef(ref: FunctionRef): Boolean = { + require(ref.path.parent eq self.path, "trying to remove FunctionRef from wrong ActorCell") + val name = ref.path.name + @tailrec def rec(): Boolean = { + val old = functionRefs + if (!old.contains(name)) false + else { + val removed = old - name + if (!Unsafe.instance.compareAndSwapObject(this, AbstractActorCell.functionRefsOffset, old, removed)) rec() + else { + ref.stop() + true + } + } + } + rec() + } + + protected def stopFunctionRefs(): Unit = { + val refs = Unsafe.instance.getAndSetObject(this, AbstractActorCell.functionRefsOffset, Map.empty).asInstanceOf[Map[String, FunctionRef]] + refs.valuesIterator.foreach(_.stop()) + } + + @volatile private var _nextNameDoNotCallMeDirectly = 0L + final protected def randomName(sb: java.lang.StringBuilder): String = { + val num = Unsafe.instance.getAndAddLong(this, AbstractActorCell.nextNameOffset, 1) + Helpers.base64(num, sb) + } + final protected def randomName(): String = { + val num = Unsafe.instance.getAndAddLong(this, AbstractActorCell.nextNameOffset, 1) + Helpers.base64(num) } final def stop(actor: ActorRef): Unit = { @@ -140,14 +193,14 @@ private[akka] trait Children { this: ActorCell ⇒ // optimization for the non-uid case getChildByName(name) match { case Some(crs: ChildRestartStats) ⇒ crs.child.asInstanceOf[InternalActorRef] - case _ ⇒ Nobody + case _ ⇒ getFunctionRefOrNobody(name) } } else { val (childName, uid) = ActorCell.splitNameAndUid(name) getChildByName(childName) match { case Some(crs: ChildRestartStats) if uid == ActorCell.undefinedUid || uid == crs.uid ⇒ crs.child.asInstanceOf[InternalActorRef] - case _ ⇒ Nobody + case _ ⇒ getFunctionRefOrNobody(childName, uid) } } diff --git a/akka-actor/src/main/scala/akka/actor/dungeon/ChildrenContainer.scala b/akka-actor/src/main/scala/akka/actor/dungeon/ChildrenContainer.scala index 04433ef6b2..9060c887e3 100644 --- a/akka-actor/src/main/scala/akka/actor/dungeon/ChildrenContainer.scala +++ b/akka-actor/src/main/scala/akka/actor/dungeon/ChildrenContainer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor.dungeon diff --git a/akka-actor/src/main/scala/akka/actor/dungeon/DeathWatch.scala b/akka-actor/src/main/scala/akka/actor/dungeon/DeathWatch.scala index 41892202b3..002267ac7a 100644 --- a/akka-actor/src/main/scala/akka/actor/dungeon/DeathWatch.scala +++ b/akka-actor/src/main/scala/akka/actor/dungeon/DeathWatch.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor.dungeon diff --git a/akka-actor/src/main/scala/akka/actor/dungeon/Dispatch.scala b/akka-actor/src/main/scala/akka/actor/dungeon/Dispatch.scala index f404b43306..84742122f5 100644 --- a/akka-actor/src/main/scala/akka/actor/dungeon/Dispatch.scala +++ b/akka-actor/src/main/scala/akka/actor/dungeon/Dispatch.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor.dungeon diff --git a/akka-actor/src/main/scala/akka/actor/dungeon/FaultHandling.scala b/akka-actor/src/main/scala/akka/actor/dungeon/FaultHandling.scala index 1bc158a01c..73ba9f2db1 100644 --- a/akka-actor/src/main/scala/akka/actor/dungeon/FaultHandling.scala +++ b/akka-actor/src/main/scala/akka/actor/dungeon/FaultHandling.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor.dungeon @@ -211,6 +211,7 @@ private[akka] trait FaultHandling { this: ActorCell ⇒ catch handleNonFatalOrInterruptedException { e ⇒ publish(Error(e, self.path.toString, clazz(a), e.getMessage)) } finally try dispatcher.detach(this) finally try parent.sendSystemMessage(DeathWatchNotification(self, existenceConfirmed = true, addressTerminated = false)) + finally try stopFunctionRefs() finally try tellWatchersWeDied() finally try unwatchWatchedActors(a) // stay here as we expect an emergency stop from handleInvokeFailure finally { diff --git a/akka-actor/src/main/scala/akka/actor/dungeon/ReceiveTimeout.scala b/akka-actor/src/main/scala/akka/actor/dungeon/ReceiveTimeout.scala index 2a4610a582..c2167aea5b 100644 --- a/akka-actor/src/main/scala/akka/actor/dungeon/ReceiveTimeout.scala +++ b/akka-actor/src/main/scala/akka/actor/dungeon/ReceiveTimeout.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.actor.dungeon diff --git a/akka-actor/src/main/scala/akka/actor/package.scala b/akka-actor/src/main/scala/akka/actor/package.scala index f5ce10e108..fb1b39ac26 100644 --- a/akka-actor/src/main/scala/akka/actor/package.scala +++ b/akka-actor/src/main/scala/akka/actor/package.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka diff --git a/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala b/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala index 9a5a0d4bb4..a83816180e 100644 --- a/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala +++ b/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.dispatch @@ -92,17 +92,17 @@ abstract class MessageDispatcher(val configurator: MessageDispatcherConfigurator @volatile private[this] var _inhabitantsDoNotCallMeDirectly: Long = _ // DO NOT TOUCH! @volatile private[this] var _shutdownScheduleDoNotCallMeDirectly: Int = _ // DO NOT TOUCH! - @tailrec private final def addInhabitants(add: Long): Long = { - val c = inhabitants - val r = c + add - if (r < 0) { + private final def addInhabitants(add: Long): Long = { + val old = Unsafe.instance.getAndAddLong(this, inhabitantsOffset, add) + val ret = old + add + if (ret < 0) { // We haven't succeeded in decreasing the inhabitants yet but the simple fact that we're trying to // go below zero means that there is an imbalance and we might as well throw the exception val e = new IllegalStateException("ACTOR SYSTEM CORRUPTED!!! A dispatcher can't have less than 0 inhabitants!") reportFailure(e) throw e } - if (Unsafe.instance.compareAndSwapLong(this, inhabitantsOffset, c, r)) r else addInhabitants(add) + ret } final def inhabitants: Long = Unsafe.instance.getLongVolatile(this, inhabitantsOffset) diff --git a/akka-actor/src/main/scala/akka/dispatch/BalancingDispatcher.scala b/akka-actor/src/main/scala/akka/dispatch/BalancingDispatcher.scala index bf7eb935e2..aa5e51db59 100644 --- a/akka-actor/src/main/scala/akka/dispatch/BalancingDispatcher.scala +++ b/akka-actor/src/main/scala/akka/dispatch/BalancingDispatcher.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.dispatch diff --git a/akka-actor/src/main/scala/akka/dispatch/BatchingExecutor.scala b/akka-actor/src/main/scala/akka/dispatch/BatchingExecutor.scala index 4e3189f81b..fe41b09cfa 100644 --- a/akka-actor/src/main/scala/akka/dispatch/BatchingExecutor.scala +++ b/akka-actor/src/main/scala/akka/dispatch/BatchingExecutor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.dispatch diff --git a/akka-actor/src/main/scala/akka/dispatch/CachingConfig.scala b/akka-actor/src/main/scala/akka/dispatch/CachingConfig.scala index ab5579dc7a..7f196b9800 100644 --- a/akka-actor/src/main/scala/akka/dispatch/CachingConfig.scala +++ b/akka-actor/src/main/scala/akka/dispatch/CachingConfig.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.dispatch diff --git a/akka-actor/src/main/scala/akka/dispatch/Dispatcher.scala b/akka-actor/src/main/scala/akka/dispatch/Dispatcher.scala index c13783bc2d..7389b5f7ef 100644 --- a/akka-actor/src/main/scala/akka/dispatch/Dispatcher.scala +++ b/akka-actor/src/main/scala/akka/dispatch/Dispatcher.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.dispatch diff --git a/akka-actor/src/main/scala/akka/dispatch/Dispatchers.scala b/akka-actor/src/main/scala/akka/dispatch/Dispatchers.scala index cfe484c8ad..e7338a5c3b 100644 --- a/akka-actor/src/main/scala/akka/dispatch/Dispatchers.scala +++ b/akka-actor/src/main/scala/akka/dispatch/Dispatchers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.dispatch diff --git a/akka-actor/src/main/scala/akka/dispatch/Future.scala b/akka-actor/src/main/scala/akka/dispatch/Future.scala index f753967487..4229192dd1 100644 --- a/akka-actor/src/main/scala/akka/dispatch/Future.scala +++ b/akka-actor/src/main/scala/akka/dispatch/Future.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.dispatch @@ -11,6 +11,8 @@ import java.lang.{ Iterable ⇒ JIterable } import java.util.{ LinkedList ⇒ JLinkedList } import java.util.concurrent.{ Executor, ExecutorService, ExecutionException, Callable, TimeoutException } import scala.util.{ Try, Success, Failure } +import java.util.concurrent.CompletionStage +import java.util.concurrent.CompletableFuture /** * ExecutionContexts is the Java API for ExecutionContexts @@ -111,6 +113,15 @@ object Futures { */ def successful[T](result: T): Future[T] = Future.successful(result) + /** + * Creates an already completed CompletionStage with the specified exception + */ + def failedCompletionStage[T](ex: Throwable): CompletionStage[T] = { + val f = CompletableFuture.completedFuture[T](null.asInstanceOf[T]) + f.obtrudeException(ex) + f + } + /** * Returns a Future that will hold the optional result of the first Future with a result that matches the predicate */ diff --git a/akka-actor/src/main/scala/akka/dispatch/Mailbox.scala b/akka-actor/src/main/scala/akka/dispatch/Mailbox.scala index ff085f3c0d..404dbf9cc9 100644 --- a/akka-actor/src/main/scala/akka/dispatch/Mailbox.scala +++ b/akka-actor/src/main/scala/akka/dispatch/Mailbox.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.dispatch diff --git a/akka-actor/src/main/scala/akka/dispatch/Mailboxes.scala b/akka-actor/src/main/scala/akka/dispatch/Mailboxes.scala index 59c70ee65c..5c6daef53d 100644 --- a/akka-actor/src/main/scala/akka/dispatch/Mailboxes.scala +++ b/akka-actor/src/main/scala/akka/dispatch/Mailboxes.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.dispatch diff --git a/akka-actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala b/akka-actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala index f485eedcc7..e4bb1b5c08 100644 --- a/akka-actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala +++ b/akka-actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.dispatch diff --git a/akka-actor/src/main/scala/akka/dispatch/ThreadPoolBuilder.scala b/akka-actor/src/main/scala/akka/dispatch/ThreadPoolBuilder.scala index ffa456e894..4f920c8dcb 100644 --- a/akka-actor/src/main/scala/akka/dispatch/ThreadPoolBuilder.scala +++ b/akka-actor/src/main/scala/akka/dispatch/ThreadPoolBuilder.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.dispatch diff --git a/akka-actor/src/main/scala/akka/dispatch/sysmsg/SystemMessage.scala b/akka-actor/src/main/scala/akka/dispatch/sysmsg/SystemMessage.scala index 697dcb3edd..78004f5084 100644 --- a/akka-actor/src/main/scala/akka/dispatch/sysmsg/SystemMessage.scala +++ b/akka-actor/src/main/scala/akka/dispatch/sysmsg/SystemMessage.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.dispatch.sysmsg diff --git a/akka-actor/src/main/scala/akka/event/AddressTerminatedTopic.scala b/akka-actor/src/main/scala/akka/event/AddressTerminatedTopic.scala index c5e74220c8..836ef6fd70 100644 --- a/akka-actor/src/main/scala/akka/event/AddressTerminatedTopic.scala +++ b/akka-actor/src/main/scala/akka/event/AddressTerminatedTopic.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.event diff --git a/akka-actor/src/main/scala/akka/event/DeadLetterListener.scala b/akka-actor/src/main/scala/akka/event/DeadLetterListener.scala index f9f2bad288..642098af55 100644 --- a/akka-actor/src/main/scala/akka/event/DeadLetterListener.scala +++ b/akka-actor/src/main/scala/akka/event/DeadLetterListener.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.event diff --git a/akka-actor/src/main/scala/akka/event/EventBus.scala b/akka-actor/src/main/scala/akka/event/EventBus.scala index b7d615e38c..904eb9827e 100644 --- a/akka-actor/src/main/scala/akka/event/EventBus.scala +++ b/akka-actor/src/main/scala/akka/event/EventBus.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.event diff --git a/akka-actor/src/main/scala/akka/event/EventBusUnsubscribers.scala b/akka-actor/src/main/scala/akka/event/EventBusUnsubscribers.scala index 7bc3ee25c5..8c1ba137e6 100644 --- a/akka-actor/src/main/scala/akka/event/EventBusUnsubscribers.scala +++ b/akka-actor/src/main/scala/akka/event/EventBusUnsubscribers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.event diff --git a/akka-actor/src/main/scala/akka/event/EventStream.scala b/akka-actor/src/main/scala/akka/event/EventStream.scala index 3a7b1069e9..339b8b8e37 100644 --- a/akka-actor/src/main/scala/akka/event/EventStream.scala +++ b/akka-actor/src/main/scala/akka/event/EventStream.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.event diff --git a/akka-actor/src/main/scala/akka/event/LoggerMailbox.scala b/akka-actor/src/main/scala/akka/event/LoggerMailbox.scala index 1397b93ec1..7d5c89dea2 100644 --- a/akka-actor/src/main/scala/akka/event/LoggerMailbox.scala +++ b/akka-actor/src/main/scala/akka/event/LoggerMailbox.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.event diff --git a/akka-actor/src/main/scala/akka/event/Logging.scala b/akka-actor/src/main/scala/akka/event/Logging.scala index 0711eaa96d..86089fccf2 100644 --- a/akka-actor/src/main/scala/akka/event/Logging.scala +++ b/akka-actor/src/main/scala/akka/event/Logging.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.event diff --git a/akka-actor/src/main/scala/akka/event/LoggingReceive.scala b/akka-actor/src/main/scala/akka/event/LoggingReceive.scala index d9acda9690..df66cd490a 100644 --- a/akka-actor/src/main/scala/akka/event/LoggingReceive.scala +++ b/akka-actor/src/main/scala/akka/event/LoggingReceive.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.event @@ -54,6 +54,7 @@ class LoggingReceive(source: Option[AnyRef], r: Receive, label: Option[String])( if (context.system.eventStream.logLevel >= Logging.DebugLevel) { val (str, clazz) = LogSource.fromAnyRef(source getOrElse context.asInstanceOf[ActorCell].actor) context.system.eventStream.publish(Debug(str, clazz, "received " + (if (handled) "handled" else "unhandled") + " message " + o + + " from " + context.sender() + (label match { case Some(l) ⇒ " in state " + l case _ ⇒ "" diff --git a/akka-actor/src/main/scala/akka/event/japi/EventBusJavaAPI.scala b/akka-actor/src/main/scala/akka/event/japi/EventBusJavaAPI.scala index ab26631b65..398a8f4ac3 100644 --- a/akka-actor/src/main/scala/akka/event/japi/EventBusJavaAPI.scala +++ b/akka-actor/src/main/scala/akka/event/japi/EventBusJavaAPI.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.event.japi diff --git a/akka-actor/src/main/scala/akka/io/DirectByteBufferPool.scala b/akka-actor/src/main/scala/akka/io/DirectByteBufferPool.scala index 581d43881c..a9336606ea 100644 --- a/akka-actor/src/main/scala/akka/io/DirectByteBufferPool.scala +++ b/akka-actor/src/main/scala/akka/io/DirectByteBufferPool.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor/src/main/scala/akka/io/IO.scala b/akka-actor/src/main/scala/akka/io/IO.scala index 81ec6f24c7..75fd74098d 100644 --- a/akka-actor/src/main/scala/akka/io/IO.scala +++ b/akka-actor/src/main/scala/akka/io/IO.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor/src/main/scala/akka/io/Inet.scala b/akka-actor/src/main/scala/akka/io/Inet.scala index 105e00c519..e8a0d41e01 100644 --- a/akka-actor/src/main/scala/akka/io/Inet.scala +++ b/akka-actor/src/main/scala/akka/io/Inet.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor/src/main/scala/akka/io/SelectionHandler.scala b/akka-actor/src/main/scala/akka/io/SelectionHandler.scala index 4caeb593c7..44c7159186 100644 --- a/akka-actor/src/main/scala/akka/io/SelectionHandler.scala +++ b/akka-actor/src/main/scala/akka/io/SelectionHandler.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor/src/main/scala/akka/io/Tcp.scala b/akka-actor/src/main/scala/akka/io/Tcp.scala index 00cfa052f0..86f847f7a2 100644 --- a/akka-actor/src/main/scala/akka/io/Tcp.scala +++ b/akka-actor/src/main/scala/akka/io/Tcp.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor/src/main/scala/akka/io/TcpConnection.scala b/akka-actor/src/main/scala/akka/io/TcpConnection.scala index 90cc34edce..fd9ee405ad 100644 --- a/akka-actor/src/main/scala/akka/io/TcpConnection.scala +++ b/akka-actor/src/main/scala/akka/io/TcpConnection.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor/src/main/scala/akka/io/TcpIncomingConnection.scala b/akka-actor/src/main/scala/akka/io/TcpIncomingConnection.scala index a5f835fcfa..9e3eec8fe1 100644 --- a/akka-actor/src/main/scala/akka/io/TcpIncomingConnection.scala +++ b/akka-actor/src/main/scala/akka/io/TcpIncomingConnection.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor/src/main/scala/akka/io/TcpListener.scala b/akka-actor/src/main/scala/akka/io/TcpListener.scala index 50152806ce..ba22e051c8 100644 --- a/akka-actor/src/main/scala/akka/io/TcpListener.scala +++ b/akka-actor/src/main/scala/akka/io/TcpListener.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor/src/main/scala/akka/io/TcpManager.scala b/akka-actor/src/main/scala/akka/io/TcpManager.scala index d03ff3dae7..c9e1b04c5b 100644 --- a/akka-actor/src/main/scala/akka/io/TcpManager.scala +++ b/akka-actor/src/main/scala/akka/io/TcpManager.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor/src/main/scala/akka/io/TcpOutgoingConnection.scala b/akka-actor/src/main/scala/akka/io/TcpOutgoingConnection.scala index 79e6426467..6febd33276 100644 --- a/akka-actor/src/main/scala/akka/io/TcpOutgoingConnection.scala +++ b/akka-actor/src/main/scala/akka/io/TcpOutgoingConnection.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor/src/main/scala/akka/io/Udp.scala b/akka-actor/src/main/scala/akka/io/Udp.scala index 53125c158b..646368eaf8 100644 --- a/akka-actor/src/main/scala/akka/io/Udp.scala +++ b/akka-actor/src/main/scala/akka/io/Udp.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor/src/main/scala/akka/io/UdpConnected.scala b/akka-actor/src/main/scala/akka/io/UdpConnected.scala index f643b5c231..13568a41cb 100644 --- a/akka-actor/src/main/scala/akka/io/UdpConnected.scala +++ b/akka-actor/src/main/scala/akka/io/UdpConnected.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor/src/main/scala/akka/io/UdpConnectedManager.scala b/akka-actor/src/main/scala/akka/io/UdpConnectedManager.scala index 51688af8a9..02e25b6be5 100644 --- a/akka-actor/src/main/scala/akka/io/UdpConnectedManager.scala +++ b/akka-actor/src/main/scala/akka/io/UdpConnectedManager.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor/src/main/scala/akka/io/UdpConnection.scala b/akka-actor/src/main/scala/akka/io/UdpConnection.scala index 8c9fe546fa..ae5c1c3eba 100644 --- a/akka-actor/src/main/scala/akka/io/UdpConnection.scala +++ b/akka-actor/src/main/scala/akka/io/UdpConnection.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor/src/main/scala/akka/io/UdpListener.scala b/akka-actor/src/main/scala/akka/io/UdpListener.scala index cd18a2bd1a..faff4b8e9d 100644 --- a/akka-actor/src/main/scala/akka/io/UdpListener.scala +++ b/akka-actor/src/main/scala/akka/io/UdpListener.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor/src/main/scala/akka/io/UdpManager.scala b/akka-actor/src/main/scala/akka/io/UdpManager.scala index 4bf011f76d..197dc36a95 100644 --- a/akka-actor/src/main/scala/akka/io/UdpManager.scala +++ b/akka-actor/src/main/scala/akka/io/UdpManager.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor/src/main/scala/akka/io/UdpSender.scala b/akka-actor/src/main/scala/akka/io/UdpSender.scala index e0d3c3c9ca..21fcf2b404 100644 --- a/akka-actor/src/main/scala/akka/io/UdpSender.scala +++ b/akka-actor/src/main/scala/akka/io/UdpSender.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor/src/main/scala/akka/io/WithUdpSend.scala b/akka-actor/src/main/scala/akka/io/WithUdpSend.scala index ae89c9e80e..63bffa5f91 100644 --- a/akka-actor/src/main/scala/akka/io/WithUdpSend.scala +++ b/akka-actor/src/main/scala/akka/io/WithUdpSend.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.io diff --git a/akka-actor/src/main/scala/akka/japi/JavaAPI.scala b/akka-actor/src/main/scala/akka/japi/JavaAPI.scala index 30721e6c1b..07020c0b5a 100644 --- a/akka-actor/src/main/scala/akka/japi/JavaAPI.scala +++ b/akka-actor/src/main/scala/akka/japi/JavaAPI.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.japi diff --git a/akka-actor/src/main/scala/akka/japi/function/Function.scala b/akka-actor/src/main/scala/akka/japi/function/Function.scala index ce635a26c7..03007f292e 100644 --- a/akka-actor/src/main/scala/akka/japi/function/Function.scala +++ b/akka-actor/src/main/scala/akka/japi/function/Function.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.japi.function diff --git a/akka-actor/src/main/scala/akka/japi/pf/CaseStatements.scala b/akka-actor/src/main/scala/akka/japi/pf/CaseStatements.scala index b6c08c037e..d95cd97cae 100644 --- a/akka-actor/src/main/scala/akka/japi/pf/CaseStatements.scala +++ b/akka-actor/src/main/scala/akka/japi/pf/CaseStatements.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.japi.pf diff --git a/akka-actor/src/main/scala/akka/pattern/AskSupport.scala b/akka-actor/src/main/scala/akka/pattern/AskSupport.scala index 0fb169056c..ec2a3d9c70 100644 --- a/akka-actor/src/main/scala/akka/pattern/AskSupport.scala +++ b/akka-actor/src/main/scala/akka/pattern/AskSupport.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.pattern diff --git a/akka-actor/src/main/scala/akka/pattern/BackoffOnRestartSupervisor.scala b/akka-actor/src/main/scala/akka/pattern/BackoffOnRestartSupervisor.scala index 362674d1b1..b52036fae6 100644 --- a/akka-actor/src/main/scala/akka/pattern/BackoffOnRestartSupervisor.scala +++ b/akka-actor/src/main/scala/akka/pattern/BackoffOnRestartSupervisor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.pattern diff --git a/akka-actor/src/main/scala/akka/pattern/BackoffOptions.scala b/akka-actor/src/main/scala/akka/pattern/BackoffOptions.scala index 3dad0069c7..49f7331629 100644 --- a/akka-actor/src/main/scala/akka/pattern/BackoffOptions.scala +++ b/akka-actor/src/main/scala/akka/pattern/BackoffOptions.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.pattern diff --git a/akka-actor/src/main/scala/akka/pattern/BackoffSupervisor.scala b/akka-actor/src/main/scala/akka/pattern/BackoffSupervisor.scala index d589ce20e9..6015e38ce1 100644 --- a/akka-actor/src/main/scala/akka/pattern/BackoffSupervisor.scala +++ b/akka-actor/src/main/scala/akka/pattern/BackoffSupervisor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.pattern diff --git a/akka-actor/src/main/scala/akka/pattern/CircuitBreaker.scala b/akka-actor/src/main/scala/akka/pattern/CircuitBreaker.scala index a0be3ff2f6..5520d25e0a 100644 --- a/akka-actor/src/main/scala/akka/pattern/CircuitBreaker.scala +++ b/akka-actor/src/main/scala/akka/pattern/CircuitBreaker.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.pattern diff --git a/akka-actor/src/main/scala/akka/pattern/FutureTimeoutSupport.scala b/akka-actor/src/main/scala/akka/pattern/FutureTimeoutSupport.scala index fbec79d4b2..51fb9193a7 100644 --- a/akka-actor/src/main/scala/akka/pattern/FutureTimeoutSupport.scala +++ b/akka-actor/src/main/scala/akka/pattern/FutureTimeoutSupport.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.pattern @@ -8,6 +8,10 @@ import scala.concurrent.{ ExecutionContext, Promise, Future } import akka.actor._ import scala.util.control.NonFatal import scala.concurrent.duration.FiniteDuration +import java.util.concurrent.CompletionStage +import java.util.concurrent.CompletableFuture +import akka.dispatch.Futures +import java.util.function.BiConsumer trait FutureTimeoutSupport { /** @@ -22,4 +26,29 @@ trait FutureTimeoutSupport { using.scheduleOnce(duration) { p completeWith { try value catch { case NonFatal(t) ⇒ Future.failed(t) } } } p.future } + + /** + * Returns a [[scala.concurrent.Future]] that will be completed with the success or failure of the provided value + * after the specified duration. + */ + def afterCompletionStage[T](duration: FiniteDuration, using: Scheduler)(value: ⇒ CompletionStage[T])(implicit ec: ExecutionContext): CompletionStage[T] = + if (duration.isFinite() && duration.length < 1) { + try value catch { case NonFatal(t) ⇒ Futures.failedCompletionStage(t) } + } else { + val p = new CompletableFuture[T] + using.scheduleOnce(duration) { + try { + val future = value + future.whenComplete(new BiConsumer[T, Throwable] { + override def accept(t: T, ex: Throwable): Unit = { + if (t != null) p.complete(t) + if (ex != null) p.completeExceptionally(ex) + } + }) + } catch { + case NonFatal(ex) ⇒ p.completeExceptionally(ex) + } + } + p + } } diff --git a/akka-actor/src/main/scala/akka/pattern/GracefulStopSupport.scala b/akka-actor/src/main/scala/akka/pattern/GracefulStopSupport.scala index d9e9c09839..1a3e0c374f 100644 --- a/akka-actor/src/main/scala/akka/pattern/GracefulStopSupport.scala +++ b/akka-actor/src/main/scala/akka/pattern/GracefulStopSupport.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.pattern diff --git a/akka-actor/src/main/scala/akka/pattern/Patterns.scala b/akka-actor/src/main/scala/akka/pattern/Patterns.scala index 17d2814134..c202937eb1 100644 --- a/akka-actor/src/main/scala/akka/pattern/Patterns.scala +++ b/akka-actor/src/main/scala/akka/pattern/Patterns.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.pattern @@ -7,6 +7,8 @@ import akka.actor.{ ActorSelection, Scheduler } import java.util.concurrent.{ Callable, TimeUnit } import scala.concurrent.ExecutionContext import scala.concurrent.duration.FiniteDuration +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters._ object Patterns { import akka.japi @@ -250,9 +252,262 @@ object Patterns { scalaAfter(duration, scheduler)(value.call())(context) /** - * Returns a [[scala.concurrent.Future]] that will be completed with the success or failure of the provided value + * Returns a [[scala.concurrent.Future]] that will be completed with the success or failure of the provided Callable * after the specified duration. */ def after[T](duration: FiniteDuration, scheduler: Scheduler, context: ExecutionContext, value: Future[T]): Future[T] = scalaAfter(duration, scheduler)(value)(context) } + +object PatternsCS { + import akka.japi + import akka.actor.{ ActorRef, ActorSystem } + import akka.pattern.{ ask ⇒ scalaAsk, pipe ⇒ scalaPipe, gracefulStop ⇒ scalaGracefulStop, after ⇒ scalaAfter } + import akka.util.Timeout + import scala.concurrent.Future + import scala.concurrent.duration._ + + /** + * Java API for `akka.pattern.ask`: + * Sends a message asynchronously and returns a [[java.util.concurrent.CompletionStage]] + * holding the eventual reply message; this means that the target actor + * needs to send the result to the `sender` reference provided. The CompletionStage + * will be completed with an [[akka.pattern.AskTimeoutException]] after the + * given timeout has expired; this is independent from any timeout applied + * while awaiting a result for this future (i.e. in + * `Await.result(..., timeout)`). + * + * Warning: + * When using future callbacks, inside actors you need to carefully avoid closing over + * the containing actor’s object, i.e. do not call methods or access mutable state + * on the enclosing actor from within the callback. This would break the actor + * encapsulation and may introduce synchronization bugs and race conditions because + * the callback will be scheduled concurrently to the enclosing actor. Unfortunately + * there is not yet a way to detect these illegal accesses at compile time. + * + * Recommended usage: + * + * {{{ + * final CompletionStage f = Patterns.ask(worker, request, timeout); + * f.onSuccess(new Procedure() { + * public void apply(Object o) { + * nextActor.tell(new EnrichedResult(request, o)); + * } + * }); + * }}} + */ + def ask(actor: ActorRef, message: Any, timeout: Timeout): CompletionStage[AnyRef] = + scalaAsk(actor, message)(timeout).toJava.asInstanceOf[CompletionStage[AnyRef]] + + /** + * A variation of ask which allows to implement "replyTo" pattern by including + * sender reference in message. + * + * {{{ + * final CompletionStage f = Patterns.ask( + * worker, + * new akka.japi.Function { + * Object apply(ActorRef askSender) { + * return new Request(askSender); + * } + * }, + * timeout); + * }}} + */ + def ask(actor: ActorRef, messageFactory: japi.Function[ActorRef, Any], timeout: Timeout): CompletionStage[AnyRef] = + scalaAsk(actor, messageFactory.apply _)(timeout).toJava.asInstanceOf[CompletionStage[AnyRef]] + + /** + * Java API for `akka.pattern.ask`: + * Sends a message asynchronously and returns a [[java.util.concurrent.CompletionStage]] + * holding the eventual reply message; this means that the target actor + * needs to send the result to the `sender` reference provided. The CompletionStage + * will be completed with an [[akka.pattern.AskTimeoutException]] after the + * given timeout has expired; this is independent from any timeout applied + * while awaiting a result for this future (i.e. in + * `Await.result(..., timeout)`). + * + * Warning: + * When using future callbacks, inside actors you need to carefully avoid closing over + * the containing actor’s object, i.e. do not call methods or access mutable state + * on the enclosing actor from within the callback. This would break the actor + * encapsulation and may introduce synchronization bugs and race conditions because + * the callback will be scheduled concurrently to the enclosing actor. Unfortunately + * there is not yet a way to detect these illegal accesses at compile time. + * + * Recommended usage: + * + * {{{ + * final CompletionStage f = Patterns.ask(worker, request, timeout); + * f.onSuccess(new Procedure() { + * public void apply(Object o) { + * nextActor.tell(new EnrichedResult(request, o)); + * } + * }); + * }}} + */ + def ask(actor: ActorRef, message: Any, timeoutMillis: Long): CompletionStage[AnyRef] = + scalaAsk(actor, message)(new Timeout(timeoutMillis, TimeUnit.MILLISECONDS)).toJava.asInstanceOf[CompletionStage[AnyRef]] + + /** + * A variation of ask which allows to implement "replyTo" pattern by including + * sender reference in message. + * + * {{{ + * final CompletionStage f = Patterns.ask( + * worker, + * new akka.japi.Function { + * Object apply(ActorRef askSender) { + * return new Request(askSender); + * } + * }, + * timeout); + * }}} + */ + def ask(actor: ActorRef, messageFactory: japi.Function[ActorRef, Any], timeoutMillis: Long): CompletionStage[AnyRef] = + scalaAsk(actor, messageFactory.apply _)(Timeout(timeoutMillis.millis)).toJava.asInstanceOf[CompletionStage[AnyRef]] + + /** + * Java API for `akka.pattern.ask`: + * Sends a message asynchronously and returns a [[java.util.concurrent.CompletionStage]] + * holding the eventual reply message; this means that the target [[akka.actor.ActorSelection]] + * needs to send the result to the `sender` reference provided. The CompletionStage + * will be completed with an [[akka.pattern.AskTimeoutException]] after the + * given timeout has expired; this is independent from any timeout applied + * while awaiting a result for this future (i.e. in + * `Await.result(..., timeout)`). + * + * Warning: + * When using future callbacks, inside actors you need to carefully avoid closing over + * the containing actor’s object, i.e. do not call methods or access mutable state + * on the enclosing actor from within the callback. This would break the actor + * encapsulation and may introduce synchronization bugs and race conditions because + * the callback will be scheduled concurrently to the enclosing actor. Unfortunately + * there is not yet a way to detect these illegal accesses at compile time. + * + * Recommended usage: + * + * {{{ + * final CompletionStage f = Patterns.ask(selection, request, timeout); + * f.onSuccess(new Procedure() { + * public void apply(Object o) { + * nextActor.tell(new EnrichedResult(request, o)); + * } + * }); + * }}} + */ + def ask(selection: ActorSelection, message: Any, timeout: Timeout): CompletionStage[AnyRef] = + scalaAsk(selection, message)(timeout).toJava.asInstanceOf[CompletionStage[AnyRef]] + + /** + * Java API for `akka.pattern.ask`: + * Sends a message asynchronously and returns a [[java.util.concurrent.CompletionStage]] + * holding the eventual reply message; this means that the target [[akka.actor.ActorSelection]] + * needs to send the result to the `sender` reference provided. The CompletionStage + * will be completed with an [[akka.pattern.AskTimeoutException]] after the + * given timeout has expired; this is independent from any timeout applied + * while awaiting a result for this future (i.e. in + * `Await.result(..., timeout)`). + * + * Warning: + * When using future callbacks, inside actors you need to carefully avoid closing over + * the containing actor’s object, i.e. do not call methods or access mutable state + * on the enclosing actor from within the callback. This would break the actor + * encapsulation and may introduce synchronization bugs and race conditions because + * the callback will be scheduled concurrently to the enclosing actor. Unfortunately + * there is not yet a way to detect these illegal accesses at compile time. + * + * Recommended usage: + * + * {{{ + * final CompletionStage f = Patterns.ask(selection, request, timeout); + * f.onSuccess(new Procedure() { + * public void apply(Object o) { + * nextActor.tell(new EnrichedResult(request, o)); + * } + * }); + * }}} + */ + def ask(selection: ActorSelection, message: Any, timeoutMillis: Long): CompletionStage[AnyRef] = + scalaAsk(selection, message)(new Timeout(timeoutMillis, TimeUnit.MILLISECONDS)).toJava.asInstanceOf[CompletionStage[AnyRef]] + + /** + * A variation of ask which allows to implement "replyTo" pattern by including + * sender reference in message. + * + * {{{ + * final CompletionStage f = Patterns.ask( + * selection, + * new akka.japi.Function { + * Object apply(ActorRef askSender) { + * return new Request(askSender); + * } + * }, + * timeout); + * }}} + */ + def ask(selection: ActorSelection, messageFactory: japi.Function[ActorRef, Any], timeoutMillis: Long): CompletionStage[AnyRef] = + scalaAsk(selection, messageFactory.apply _)(Timeout(timeoutMillis.millis)).toJava.asInstanceOf[CompletionStage[AnyRef]] + + /** + * Register an onComplete callback on this [[java.util.concurrent.CompletionStage]] to send + * the result to the given [[akka.actor.ActorRef]] or [[akka.actor.ActorSelection]]. + * Returns the original CompletionStage to allow method chaining. + * If the future was completed with failure it is sent as a [[akka.actor.Status.Failure]] + * to the recipient. + * + * Recommended usage example: + * + * {{{ + * final CompletionStage f = Patterns.ask(worker, request, timeout); + * // apply some transformation (i.e. enrich with request info) + * final CompletionStage transformed = f.map(new akka.japi.Function() { ... }); + * // send it on to the next stage + * Patterns.pipe(transformed).to(nextActor); + * }}} + */ + def pipe[T](future: CompletionStage[T], context: ExecutionContext): PipeableCompletionStage[T] = pipeCompletionStage(future)(context) + + /** + * Returns a [[java.util.concurrent.CompletionStage]] that will be completed with success (value `true`) when + * existing messages of the target actor has been processed and the actor has been + * terminated. + * + * Useful when you need to wait for termination or compose ordered termination of several actors. + * + * If the target actor isn't terminated within the timeout the [[java.util.concurrent.CompletionStage]] + * is completed with failure [[akka.pattern.AskTimeoutException]]. + */ + def gracefulStop(target: ActorRef, timeout: FiniteDuration): CompletionStage[java.lang.Boolean] = + scalaGracefulStop(target, timeout).toJava.asInstanceOf[CompletionStage[java.lang.Boolean]] + + /** + * Returns a [[java.util.concurrent.CompletionStage]] that will be completed with success (value `true`) when + * existing messages of the target actor has been processed and the actor has been + * terminated. + * + * Useful when you need to wait for termination or compose ordered termination of several actors. + * + * If you want to invoke specialized stopping logic on your target actor instead of PoisonPill, you can pass your + * stop command as `stopMessage` parameter + * + * If the target actor isn't terminated within the timeout the [[java.util.concurrent.CompletionStage]] + * is completed with failure [[akka.pattern.AskTimeoutException]]. + */ + def gracefulStop(target: ActorRef, timeout: FiniteDuration, stopMessage: Any): CompletionStage[java.lang.Boolean] = + scalaGracefulStop(target, timeout, stopMessage).toJava.asInstanceOf[CompletionStage[java.lang.Boolean]] + + /** + * Returns a [[java.util.concurrent.CompletionStage]] that will be completed with the success or failure of the provided Callable + * after the specified duration. + */ + def after[T](duration: FiniteDuration, scheduler: Scheduler, context: ExecutionContext, value: Callable[CompletionStage[T]]): CompletionStage[T] = + afterCompletionStage(duration, scheduler)(value.call())(context) + + /** + * Returns a [[java.util.concurrent.CompletionStage]] that will be completed with the success or failure of the provided value + * after the specified duration. + */ + def after[T](duration: FiniteDuration, scheduler: Scheduler, context: ExecutionContext, value: CompletionStage[T]): CompletionStage[T] = + afterCompletionStage(duration, scheduler)(value)(context) +} diff --git a/akka-actor/src/main/scala/akka/pattern/PipeToSupport.scala b/akka-actor/src/main/scala/akka/pattern/PipeToSupport.scala index de34e22b77..1289ea7878 100644 --- a/akka-actor/src/main/scala/akka/pattern/PipeToSupport.scala +++ b/akka-actor/src/main/scala/akka/pattern/PipeToSupport.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.pattern @@ -8,23 +8,23 @@ import scala.concurrent.{ Future, ExecutionContext } import scala.util.{ Failure, Success } import akka.actor.{ Status, ActorRef, Actor } import akka.actor.ActorSelection +import java.util.concurrent.CompletionStage +import java.util.function.BiConsumer trait PipeToSupport { final class PipeableFuture[T](val future: Future[T])(implicit executionContext: ExecutionContext) { def pipeTo(recipient: ActorRef)(implicit sender: ActorRef = Actor.noSender): Future[T] = { - future onComplete { + future andThen { case Success(r) ⇒ recipient ! r case Failure(f) ⇒ recipient ! Status.Failure(f) } - future } def pipeToSelection(recipient: ActorSelection)(implicit sender: ActorRef = Actor.noSender): Future[T] = { - future onComplete { + future andThen { case Success(r) ⇒ recipient ! r case Failure(f) ⇒ recipient ! Status.Failure(f) } - future } def to(recipient: ActorRef): PipeableFuture[T] = to(recipient, Actor.noSender) def to(recipient: ActorRef, sender: ActorRef): PipeableFuture[T] = { @@ -38,6 +38,35 @@ trait PipeToSupport { } } + final class PipeableCompletionStage[T](val future: CompletionStage[T])(implicit executionContext: ExecutionContext) { + def pipeTo(recipient: ActorRef)(implicit sender: ActorRef = Actor.noSender): CompletionStage[T] = { + future whenComplete new BiConsumer[T, Throwable] { + override def accept(t: T, ex: Throwable) { + if (t != null) recipient ! t + if (ex != null) recipient ! Status.Failure(ex) + } + } + } + def pipeToSelection(recipient: ActorSelection)(implicit sender: ActorRef = Actor.noSender): CompletionStage[T] = { + future whenComplete new BiConsumer[T, Throwable] { + override def accept(t: T, ex: Throwable) { + if (t != null) recipient ! t + if (ex != null) recipient ! Status.Failure(ex) + } + } + } + def to(recipient: ActorRef): PipeableCompletionStage[T] = to(recipient, Actor.noSender) + def to(recipient: ActorRef, sender: ActorRef): PipeableCompletionStage[T] = { + pipeTo(recipient)(sender) + this + } + def to(recipient: ActorSelection): PipeableCompletionStage[T] = to(recipient, Actor.noSender) + def to(recipient: ActorSelection, sender: ActorRef): PipeableCompletionStage[T] = { + pipeToSelection(recipient)(sender) + this + } + } + /** * Import this implicit conversion to gain the `pipeTo` method on [[scala.concurrent.Future]]: * @@ -56,4 +85,23 @@ trait PipeToSupport { * the failure is sent in a [[akka.actor.Status.Failure]] to the recipient. */ implicit def pipe[T](future: Future[T])(implicit executionContext: ExecutionContext): PipeableFuture[T] = new PipeableFuture(future) + + /** + * Import this implicit conversion to gain the `pipeTo` method on [[scala.concurrent.Future]]: + * + * {{{ + * import akka.pattern.pipe + * + * Future { doExpensiveCalc() } pipeTo nextActor + * + * or + * + * pipe(someFuture) to nextActor + * + * }}} + * + * The successful result of the future is sent as a message to the recipient, or + * the failure is sent in a [[akka.actor.Status.Failure]] to the recipient. + */ + implicit def pipeCompletionStage[T](future: CompletionStage[T])(implicit executionContext: ExecutionContext): PipeableCompletionStage[T] = new PipeableCompletionStage(future) } diff --git a/akka-actor/src/main/scala/akka/pattern/PromiseRef.scala b/akka-actor/src/main/scala/akka/pattern/PromiseRef.scala index e71299aff6..46d2222738 100644 --- a/akka-actor/src/main/scala/akka/pattern/PromiseRef.scala +++ b/akka-actor/src/main/scala/akka/pattern/PromiseRef.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.pattern diff --git a/akka-actor/src/main/scala/akka/pattern/extended/package.scala b/akka-actor/src/main/scala/akka/pattern/extended/package.scala index b3cbd4a5a8..4d135722ee 100644 --- a/akka-actor/src/main/scala/akka/pattern/extended/package.scala +++ b/akka-actor/src/main/scala/akka/pattern/extended/package.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka package pattern diff --git a/akka-actor/src/main/scala/akka/pattern/package.scala b/akka-actor/src/main/scala/akka/pattern/package.scala index bda2c27ac1..83fabb70b8 100644 --- a/akka-actor/src/main/scala/akka/pattern/package.scala +++ b/akka-actor/src/main/scala/akka/pattern/package.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka diff --git a/akka-actor/src/main/scala/akka/routing/Balancing.scala b/akka-actor/src/main/scala/akka/routing/Balancing.scala index 7cbd19c6de..2b19227b7a 100644 --- a/akka-actor/src/main/scala/akka/routing/Balancing.scala +++ b/akka-actor/src/main/scala/akka/routing/Balancing.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor/src/main/scala/akka/routing/Broadcast.scala b/akka-actor/src/main/scala/akka/routing/Broadcast.scala index 135ceaffee..c0f6c058a7 100644 --- a/akka-actor/src/main/scala/akka/routing/Broadcast.scala +++ b/akka-actor/src/main/scala/akka/routing/Broadcast.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor/src/main/scala/akka/routing/ConsistentHash.scala b/akka-actor/src/main/scala/akka/routing/ConsistentHash.scala index 39c6c4a098..c756e05295 100644 --- a/akka-actor/src/main/scala/akka/routing/ConsistentHash.scala +++ b/akka-actor/src/main/scala/akka/routing/ConsistentHash.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor/src/main/scala/akka/routing/ConsistentHashing.scala b/akka-actor/src/main/scala/akka/routing/ConsistentHashing.scala index b6f530c8dc..08db206f17 100644 --- a/akka-actor/src/main/scala/akka/routing/ConsistentHashing.scala +++ b/akka-actor/src/main/scala/akka/routing/ConsistentHashing.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor/src/main/scala/akka/routing/Listeners.scala b/akka-actor/src/main/scala/akka/routing/Listeners.scala index c35a8bbfee..c59dffd1af 100644 --- a/akka-actor/src/main/scala/akka/routing/Listeners.scala +++ b/akka-actor/src/main/scala/akka/routing/Listeners.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor/src/main/scala/akka/routing/OptimalSizeExploringResizer.scala b/akka-actor/src/main/scala/akka/routing/OptimalSizeExploringResizer.scala index 075631b5fb..3ba84d1ddb 100644 --- a/akka-actor/src/main/scala/akka/routing/OptimalSizeExploringResizer.scala +++ b/akka-actor/src/main/scala/akka/routing/OptimalSizeExploringResizer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor/src/main/scala/akka/routing/Random.scala b/akka-actor/src/main/scala/akka/routing/Random.scala index 9a7e283aec..b9d246f9c8 100644 --- a/akka-actor/src/main/scala/akka/routing/Random.scala +++ b/akka-actor/src/main/scala/akka/routing/Random.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor/src/main/scala/akka/routing/Resizer.scala b/akka-actor/src/main/scala/akka/routing/Resizer.scala index 72a18b9847..76c10434f3 100644 --- a/akka-actor/src/main/scala/akka/routing/Resizer.scala +++ b/akka-actor/src/main/scala/akka/routing/Resizer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor/src/main/scala/akka/routing/RoundRobin.scala b/akka-actor/src/main/scala/akka/routing/RoundRobin.scala index 399b35b5ac..1cafa45ae8 100644 --- a/akka-actor/src/main/scala/akka/routing/RoundRobin.scala +++ b/akka-actor/src/main/scala/akka/routing/RoundRobin.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor/src/main/scala/akka/routing/RoutedActorCell.scala b/akka-actor/src/main/scala/akka/routing/RoutedActorCell.scala index 9c9f955717..d7b03c1694 100644 --- a/akka-actor/src/main/scala/akka/routing/RoutedActorCell.scala +++ b/akka-actor/src/main/scala/akka/routing/RoutedActorCell.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor/src/main/scala/akka/routing/RoutedActorRef.scala b/akka-actor/src/main/scala/akka/routing/RoutedActorRef.scala index ee6b3156ae..5580c2ebd0 100644 --- a/akka-actor/src/main/scala/akka/routing/RoutedActorRef.scala +++ b/akka-actor/src/main/scala/akka/routing/RoutedActorRef.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor/src/main/scala/akka/routing/Router.scala b/akka-actor/src/main/scala/akka/routing/Router.scala index 7262c4142f..a85f87fcc6 100644 --- a/akka-actor/src/main/scala/akka/routing/Router.scala +++ b/akka-actor/src/main/scala/akka/routing/Router.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor/src/main/scala/akka/routing/RouterConfig.scala b/akka-actor/src/main/scala/akka/routing/RouterConfig.scala index 53b50a80dc..bf187c237c 100644 --- a/akka-actor/src/main/scala/akka/routing/RouterConfig.scala +++ b/akka-actor/src/main/scala/akka/routing/RouterConfig.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor/src/main/scala/akka/routing/ScatterGatherFirstCompleted.scala b/akka-actor/src/main/scala/akka/routing/ScatterGatherFirstCompleted.scala index 6823e990a6..d0e586dad4 100644 --- a/akka-actor/src/main/scala/akka/routing/ScatterGatherFirstCompleted.scala +++ b/akka-actor/src/main/scala/akka/routing/ScatterGatherFirstCompleted.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor/src/main/scala/akka/routing/SmallestMailbox.scala b/akka-actor/src/main/scala/akka/routing/SmallestMailbox.scala index 86cca9aa24..4bade792c4 100644 --- a/akka-actor/src/main/scala/akka/routing/SmallestMailbox.scala +++ b/akka-actor/src/main/scala/akka/routing/SmallestMailbox.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor/src/main/scala/akka/routing/TailChopping.scala b/akka-actor/src/main/scala/akka/routing/TailChopping.scala index 2cbbe7fa9e..6f5994bda8 100644 --- a/akka-actor/src/main/scala/akka/routing/TailChopping.scala +++ b/akka-actor/src/main/scala/akka/routing/TailChopping.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.routing diff --git a/akka-actor/src/main/scala/akka/serialization/Serialization.scala b/akka-actor/src/main/scala/akka/serialization/Serialization.scala index 07d553dc3d..84944fcb03 100644 --- a/akka-actor/src/main/scala/akka/serialization/Serialization.scala +++ b/akka-actor/src/main/scala/akka/serialization/Serialization.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.serialization diff --git a/akka-actor/src/main/scala/akka/serialization/SerializationExtension.scala b/akka-actor/src/main/scala/akka/serialization/SerializationExtension.scala index 723cfd7afa..3665b0f308 100644 --- a/akka-actor/src/main/scala/akka/serialization/SerializationExtension.scala +++ b/akka-actor/src/main/scala/akka/serialization/SerializationExtension.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.serialization diff --git a/akka-actor/src/main/scala/akka/serialization/Serializer.scala b/akka-actor/src/main/scala/akka/serialization/Serializer.scala index 7cadd47acc..b1f050c593 100644 --- a/akka-actor/src/main/scala/akka/serialization/Serializer.scala +++ b/akka-actor/src/main/scala/akka/serialization/Serializer.scala @@ -1,7 +1,7 @@ package akka.serialization /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ import java.io.{ ObjectOutputStream, ByteArrayOutputStream, ObjectInputStream, ByteArrayInputStream } diff --git a/akka-actor/src/main/scala/akka/util/BoundedBlockingQueue.scala b/akka-actor/src/main/scala/akka/util/BoundedBlockingQueue.scala index 6a51b50adf..d93bd9447f 100644 --- a/akka-actor/src/main/scala/akka/util/BoundedBlockingQueue.scala +++ b/akka-actor/src/main/scala/akka/util/BoundedBlockingQueue.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/BoxedType.scala b/akka-actor/src/main/scala/akka/util/BoxedType.scala index 9ef4322503..443da9eabe 100644 --- a/akka-actor/src/main/scala/akka/util/BoxedType.scala +++ b/akka-actor/src/main/scala/akka/util/BoxedType.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/ByteIterator.scala b/akka-actor/src/main/scala/akka/util/ByteIterator.scala index 65094a613e..8c626fc9bd 100644 --- a/akka-actor/src/main/scala/akka/util/ByteIterator.scala +++ b/akka-actor/src/main/scala/akka/util/ByteIterator.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/ByteString.scala b/akka-actor/src/main/scala/akka/util/ByteString.scala index dcb74b6fbf..64ad348fad 100644 --- a/akka-actor/src/main/scala/akka/util/ByteString.scala +++ b/akka-actor/src/main/scala/akka/util/ByteString.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/ClassLoaderObjectInputStream.scala b/akka-actor/src/main/scala/akka/util/ClassLoaderObjectInputStream.scala index b451756cf3..3baf987e7a 100644 --- a/akka-actor/src/main/scala/akka/util/ClassLoaderObjectInputStream.scala +++ b/akka-actor/src/main/scala/akka/util/ClassLoaderObjectInputStream.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/Collections.scala b/akka-actor/src/main/scala/akka/util/Collections.scala index 781df2e8ec..27a05eaea8 100644 --- a/akka-actor/src/main/scala/akka/util/Collections.scala +++ b/akka-actor/src/main/scala/akka/util/Collections.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/Crypt.scala b/akka-actor/src/main/scala/akka/util/Crypt.scala index c1a08720a5..ad3bccd7b1 100644 --- a/akka-actor/src/main/scala/akka/util/Crypt.scala +++ b/akka-actor/src/main/scala/akka/util/Crypt.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/HashCode.scala b/akka-actor/src/main/scala/akka/util/HashCode.scala index 07a328b3c9..3b8937b145 100644 --- a/akka-actor/src/main/scala/akka/util/HashCode.scala +++ b/akka-actor/src/main/scala/akka/util/HashCode.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/Helpers.scala b/akka-actor/src/main/scala/akka/util/Helpers.scala index 8be8841372..8f5b847cec 100644 --- a/akka-actor/src/main/scala/akka/util/Helpers.scala +++ b/akka-actor/src/main/scala/akka/util/Helpers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/Index.scala b/akka-actor/src/main/scala/akka/util/Index.scala index f90c4cb9ae..305f276299 100644 --- a/akka-actor/src/main/scala/akka/util/Index.scala +++ b/akka-actor/src/main/scala/akka/util/Index.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/JavaDurationConverters.scala b/akka-actor/src/main/scala/akka/util/JavaDurationConverters.scala index 32b61e5fd9..65f7f44ce3 100644 --- a/akka-actor/src/main/scala/akka/util/JavaDurationConverters.scala +++ b/akka-actor/src/main/scala/akka/util/JavaDurationConverters.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util import java.time.{ Duration ⇒ JDuration } diff --git a/akka-actor/src/main/scala/akka/util/LineNumbers.scala b/akka-actor/src/main/scala/akka/util/LineNumbers.scala index f04469c8fc..3acfc32157 100644 --- a/akka-actor/src/main/scala/akka/util/LineNumbers.scala +++ b/akka-actor/src/main/scala/akka/util/LineNumbers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/LockUtil.scala b/akka-actor/src/main/scala/akka/util/LockUtil.scala index c09ffe4392..3d2e72fca3 100644 --- a/akka-actor/src/main/scala/akka/util/LockUtil.scala +++ b/akka-actor/src/main/scala/akka/util/LockUtil.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/PrettyDuration.scala b/akka-actor/src/main/scala/akka/util/PrettyDuration.scala index 991ca662c1..d9f0f5af67 100644 --- a/akka-actor/src/main/scala/akka/util/PrettyDuration.scala +++ b/akka-actor/src/main/scala/akka/util/PrettyDuration.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/Reflect.scala b/akka-actor/src/main/scala/akka/util/Reflect.scala index e375e50376..58700447b4 100644 --- a/akka-actor/src/main/scala/akka/util/Reflect.scala +++ b/akka-actor/src/main/scala/akka/util/Reflect.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util import scala.util.control.NonFatal diff --git a/akka-actor/src/main/scala/akka/util/SerializedSuspendableExecutionContext.scala b/akka-actor/src/main/scala/akka/util/SerializedSuspendableExecutionContext.scala index 3058147b24..37a23f1651 100644 --- a/akka-actor/src/main/scala/akka/util/SerializedSuspendableExecutionContext.scala +++ b/akka-actor/src/main/scala/akka/util/SerializedSuspendableExecutionContext.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/StablePriorityQueue.scala b/akka-actor/src/main/scala/akka/util/StablePriorityQueue.scala index 2b9752b411..f6cb43a1e8 100644 --- a/akka-actor/src/main/scala/akka/util/StablePriorityQueue.scala +++ b/akka-actor/src/main/scala/akka/util/StablePriorityQueue.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/SubclassifiedIndex.scala b/akka-actor/src/main/scala/akka/util/SubclassifiedIndex.scala index 7a92c0dc85..81b0f2e8c8 100644 --- a/akka-actor/src/main/scala/akka/util/SubclassifiedIndex.scala +++ b/akka-actor/src/main/scala/akka/util/SubclassifiedIndex.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/Timeout.scala b/akka-actor/src/main/scala/akka/util/Timeout.scala index 5a7d9f0819..cedcb1b779 100644 --- a/akka-actor/src/main/scala/akka/util/Timeout.scala +++ b/akka-actor/src/main/scala/akka/util/Timeout.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/TypedMultiMap.scala b/akka-actor/src/main/scala/akka/util/TypedMultiMap.scala index ecaa7af56b..f271f8846c 100644 --- a/akka-actor/src/main/scala/akka/util/TypedMultiMap.scala +++ b/akka-actor/src/main/scala/akka/util/TypedMultiMap.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/Unsafe.java b/akka-actor/src/main/scala/akka/util/Unsafe.java index e545d1b2af..d32650716f 100644 --- a/akka-actor/src/main/scala/akka/util/Unsafe.java +++ b/akka-actor/src/main/scala/akka/util/Unsafe.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util; diff --git a/akka-actor/src/main/scala/akka/util/WildcardTree.scala b/akka-actor/src/main/scala/akka/util/WildcardTree.scala index 809d74b0e3..ea97ad7d96 100644 --- a/akka-actor/src/main/scala/akka/util/WildcardTree.scala +++ b/akka-actor/src/main/scala/akka/util/WildcardTree.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.util diff --git a/akka-agent/src/main/scala/akka/agent/Agent.scala b/akka-agent/src/main/scala/akka/agent/Agent.scala index 8fa5802fc2..55c00130ed 100644 --- a/akka-agent/src/main/scala/akka/agent/Agent.scala +++ b/akka-agent/src/main/scala/akka/agent/Agent.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.agent diff --git a/akka-bench-jmh/build.sbt b/akka-bench-jmh/build.sbt index 008d14d269..b7beb17645 100644 --- a/akka-bench-jmh/build.sbt +++ b/akka-bench-jmh/build.sbt @@ -7,5 +7,5 @@ disablePlugins(Unidoc) AkkaBuild.defaultSettings AkkaBuild.dontPublishSettings - +AkkaBuild.dontPublishDocsSettings Dependencies.benchJmh diff --git a/akka-bench-jmh-dev/src/main/scala/akka/BenchRunner.scala b/akka-bench-jmh/src/main/scala/akka/BenchRunner.scala similarity index 100% rename from akka-bench-jmh-dev/src/main/scala/akka/BenchRunner.scala rename to akka-bench-jmh/src/main/scala/akka/BenchRunner.scala diff --git a/akka-bench-jmh/src/main/scala/akka/actor/ActorCreationBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/actor/ActorCreationBenchmark.scala index 777fd0fbe6..1a4671b305 100644 --- a/akka-bench-jmh/src/main/scala/akka/actor/ActorCreationBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/actor/ActorCreationBenchmark.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-bench-jmh/src/main/scala/akka/actor/ActorPathValidationBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/actor/ActorPathValidationBenchmark.scala index 65647c76a7..715c6900e9 100644 --- a/akka-bench-jmh/src/main/scala/akka/actor/ActorPathValidationBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/actor/ActorPathValidationBenchmark.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-bench-jmh/src/main/scala/akka/actor/ForkJoinActorBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/actor/ForkJoinActorBenchmark.scala index 243d0940f2..12549e0f88 100644 --- a/akka-bench-jmh/src/main/scala/akka/actor/ForkJoinActorBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/actor/ForkJoinActorBenchmark.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-bench-jmh/src/main/scala/akka/actor/RouterPoolCreationBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/actor/RouterPoolCreationBenchmark.scala index 191b2d01cc..7b53353b8e 100644 --- a/akka-bench-jmh/src/main/scala/akka/actor/RouterPoolCreationBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/actor/RouterPoolCreationBenchmark.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-bench-jmh/src/main/scala/akka/actor/ScheduleBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/actor/ScheduleBenchmark.scala index ab7fff9f6c..2dd97f51a2 100644 --- a/akka-bench-jmh/src/main/scala/akka/actor/ScheduleBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/actor/ScheduleBenchmark.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-bench-jmh/src/main/scala/akka/actor/StashCreationBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/actor/StashCreationBenchmark.scala index 54baf8c547..34492ee10b 100644 --- a/akka-bench-jmh/src/main/scala/akka/actor/StashCreationBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/actor/StashCreationBenchmark.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-bench-jmh/src/main/scala/akka/actor/TellOnlyBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/actor/TellOnlyBenchmark.scala index c41ced8113..eeebeb6915 100644 --- a/akka-bench-jmh/src/main/scala/akka/actor/TellOnlyBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/actor/TellOnlyBenchmark.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.actor diff --git a/akka-bench-jmh/src/main/scala/akka/cluster/ddata/ORSetMergeBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/cluster/ddata/ORSetMergeBenchmark.scala index 7e5872b5e3..afe7a0e268 100644 --- a/akka-bench-jmh/src/main/scala/akka/cluster/ddata/ORSetMergeBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/cluster/ddata/ORSetMergeBenchmark.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-bench-jmh/src/main/scala/akka/cluster/ddata/VersionVectorBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/cluster/ddata/VersionVectorBenchmark.scala index c8c341774f..c44abcfff3 100644 --- a/akka-bench-jmh/src/main/scala/akka/cluster/ddata/VersionVectorBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/cluster/ddata/VersionVectorBenchmark.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-bench-jmh/src/main/scala/akka/dispatch/CachingConfigBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/dispatch/CachingConfigBenchmark.scala index ed205f0b76..f9b88d5955 100644 --- a/akka-bench-jmh/src/main/scala/akka/dispatch/CachingConfigBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/dispatch/CachingConfigBenchmark.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.dispatch diff --git a/akka-bench-jmh/src/main/scala/akka/event/LogLevelAccessBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/event/LogLevelAccessBenchmark.scala index a4685f1d22..25e1bc0693 100644 --- a/akka-bench-jmh/src/main/scala/akka/event/LogLevelAccessBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/event/LogLevelAccessBenchmark.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.event diff --git a/akka-bench-jmh-dev/src/main/scala/akka/http/HttpBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/http/HttpBenchmark.scala similarity index 94% rename from akka-bench-jmh-dev/src/main/scala/akka/http/HttpBenchmark.scala rename to akka-bench-jmh/src/main/scala/akka/http/HttpBenchmark.scala index 1d23a3fe0f..fb0c507ade 100644 --- a/akka-bench-jmh-dev/src/main/scala/akka/http/HttpBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/http/HttpBenchmark.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.http @@ -56,8 +56,7 @@ class HttpBenchmark { def shutdown() = { Await.ready(Http().shutdownAllConnectionPools(), 1.second) binding.unbind() - system.shutdown() - system.awaitTermination() + Await.result(system.terminate(), 5.seconds) } @Benchmark diff --git a/akka-bench-jmh/src/main/scala/akka/persistence/Common.scala b/akka-bench-jmh/src/main/scala/akka/persistence/Common.scala index 42ad5d1f85..6a746ce26c 100644 --- a/akka-bench-jmh/src/main/scala/akka/persistence/Common.scala +++ b/akka-bench-jmh/src/main/scala/akka/persistence/Common.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-bench-jmh/src/main/scala/akka/persistence/LevelDbBatchingBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/persistence/LevelDbBatchingBenchmark.scala index aaa2001036..8c3d1d73ab 100644 --- a/akka-bench-jmh/src/main/scala/akka/persistence/LevelDbBatchingBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/persistence/LevelDbBatchingBenchmark.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-bench-jmh/src/main/scala/akka/persistence/PersistenceActorDeferBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/persistence/PersistenceActorDeferBenchmark.scala index 6d1a384eec..8759573492 100644 --- a/akka-bench-jmh/src/main/scala/akka/persistence/PersistenceActorDeferBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/persistence/PersistenceActorDeferBenchmark.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorBenchmark.scala index ce95c367cd..6418e597e2 100644 --- a/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorBenchmark.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorWithAtLeastOnceDeliveryBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorWithAtLeastOnceDeliveryBenchmark.scala index de7e55d400..79b71d3680 100644 --- a/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorWithAtLeastOnceDeliveryBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorWithAtLeastOnceDeliveryBenchmark.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-bench-jmh-dev/src/main/scala/akka/stream/FlatMapMergeBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/stream/FlatMapMergeBenchmark.scala similarity index 77% rename from akka-bench-jmh-dev/src/main/scala/akka/stream/FlatMapMergeBenchmark.scala rename to akka-bench-jmh/src/main/scala/akka/stream/FlatMapMergeBenchmark.scala index 06c87028c9..97e3079092 100644 --- a/akka-bench-jmh-dev/src/main/scala/akka/stream/FlatMapMergeBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/stream/FlatMapMergeBenchmark.scala @@ -1,15 +1,16 @@ /** - * Copyright (C) 2014 Typesafe Inc. - */ + * Copyright (C) 2014-2016 Typesafe Inc. + */ package akka.stream +import akka.{Done, NotUsed} import akka.actor.ActorSystem import akka.stream.scaladsl._ import java.util.concurrent.TimeUnit import org.openjdk.jmh.annotations._ import scala.concurrent._ -import scala.concurrent.duration.Duration.Inf +import scala.concurrent.duration._ @State(Scope.Benchmark) @OutputTimeUnit(TimeUnit.MILLISECONDS) @@ -24,9 +25,9 @@ class FlatMapMergeBenchmark { @Param(Array("0", "1", "10")) val NumberOfStreams = 0 - var graph: RunnableGraph[Future[Unit]] = _ + var graph: RunnableGraph[Future[Done]] = _ - def createSource(count: Int): Graph[SourceShape[Int], Unit] = akka.stream.Fusing.aggressive(Source.repeat(1).take(count)) + def createSource(count: Int): Graph[SourceShape[Int], NotUsed] = akka.stream.Fusing.aggressive(Source.repeat(1).take(count)) @Setup def setup() { @@ -43,13 +44,12 @@ class FlatMapMergeBenchmark { @TearDown def shutdown() { - system.shutdown() - system.awaitTermination() + Await.result(system.terminate(), 5.seconds) } @Benchmark @OperationsPerInvocation(100000) // Note: needs to match NumberOfElements. def flat_map_merge_100k_elements() { - Await.result(graph.run(), Inf) + Await.result(graph.run(), Duration.Inf) } } diff --git a/akka-bench-jmh-dev/src/main/scala/akka/stream/FlowMapBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/stream/FlowMapBenchmark.scala similarity index 93% rename from akka-bench-jmh-dev/src/main/scala/akka/stream/FlowMapBenchmark.scala rename to akka-bench-jmh/src/main/scala/akka/stream/FlowMapBenchmark.scala index f318f53910..790e4a6f0b 100644 --- a/akka-bench-jmh-dev/src/main/scala/akka/stream/FlowMapBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/stream/FlowMapBenchmark.scala @@ -1,10 +1,11 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream import java.util.concurrent.TimeUnit +import akka.NotUsed import akka.actor.ActorSystem import akka.stream.scaladsl._ import com.typesafe.config.ConfigFactory @@ -13,6 +14,8 @@ import scala.concurrent.Lock import scala.util.Success import akka.stream.impl.fusing.GraphStages import org.reactivestreams._ +import scala.concurrent.Await +import scala.concurrent.duration._ @State(Scope.Benchmark) @OutputTimeUnit(TimeUnit.MILLISECONDS) @@ -57,7 +60,7 @@ class FlowMapBenchmark { final val successFailure = Success(new Exception) // safe to be benchmark scoped because the flows we construct in this bench are stateless - var flow: Source[Int, Unit] = _ + var flow: Source[Int, NotUsed] = _ @Param(Array("8", "32", "128")) val initialInputBufferSize = 0 @@ -109,8 +112,7 @@ class FlowMapBenchmark { @TearDown def shutdown() { - system.shutdown() - system.awaitTermination() + Await.result(system.terminate(), 5.seconds) } @Benchmark @@ -132,5 +134,4 @@ class FlowMapBenchmark { f } - } diff --git a/akka-bench-jmh-dev/src/main/scala/akka/stream/GraphBuilderBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/stream/GraphBuilderBenchmark.scala similarity index 91% rename from akka-bench-jmh-dev/src/main/scala/akka/stream/GraphBuilderBenchmark.scala rename to akka-bench-jmh/src/main/scala/akka/stream/GraphBuilderBenchmark.scala index 8cf52d7172..204eb1c89a 100644 --- a/akka-bench-jmh-dev/src/main/scala/akka/stream/GraphBuilderBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/stream/GraphBuilderBenchmark.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream diff --git a/akka-bench-jmh-dev/src/main/scala/akka/stream/InterpreterBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/stream/InterpreterBenchmark.scala similarity index 100% rename from akka-bench-jmh-dev/src/main/scala/akka/stream/InterpreterBenchmark.scala rename to akka-bench-jmh/src/main/scala/akka/stream/InterpreterBenchmark.scala diff --git a/akka-bench-jmh-dev/src/main/scala/akka/stream/MaterializationBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/stream/MaterializationBenchmark.scala similarity index 73% rename from akka-bench-jmh-dev/src/main/scala/akka/stream/MaterializationBenchmark.scala rename to akka-bench-jmh/src/main/scala/akka/stream/MaterializationBenchmark.scala index 79f6f6f893..4f9397a2a9 100644 --- a/akka-bench-jmh-dev/src/main/scala/akka/stream/MaterializationBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/stream/MaterializationBenchmark.scala @@ -1,13 +1,16 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream import java.util.concurrent.TimeUnit +import akka.NotUsed import akka.actor.ActorSystem import akka.stream.scaladsl._ import org.openjdk.jmh.annotations._ +import scala.concurrent.Await +import scala.concurrent.duration._ object MaterializationBenchmark { @@ -38,7 +41,7 @@ object MaterializationBenchmark { }) val graphWithNestedImportsBuilder = (numOfNestedGraphs: Int) => { - var flow: Graph[FlowShape[Unit, Unit], Unit] = Flow[Unit].map(identity) + var flow: Graph[FlowShape[Unit, Unit], NotUsed] = Flow[Unit].map(identity) for (_ <- 1 to numOfNestedGraphs) { flow = GraphDSL.create(flow) { b ⇒ flow ⇒ @@ -50,22 +53,23 @@ object MaterializationBenchmark { flow ⇒ import GraphDSL.Implicits._ Source.single(()) ~> flow ~> Sink.ignore - ClosedShape + ClosedShape }) } val graphWithImportedFlowBuilder = (numOfFlows: Int) => - RunnableGraph.fromGraph(GraphDSL.create(Source.single(())) { implicit b ⇒ source ⇒ - import GraphDSL.Implicits._ - val flow = Flow[Unit].map(identity) - var out: Outlet[Unit] = source.out - for (i <- 0 until numOfFlows) { - val flowShape = b.add(flow) - out ~> flowShape - out = flowShape.outlet - } - out ~> Sink.ignore - ClosedShape + RunnableGraph.fromGraph(GraphDSL.create(Source.single(())) { implicit b ⇒ + source ⇒ + import GraphDSL.Implicits._ + val flow = Flow[Unit].map(identity) + var out: Outlet[Unit] = source.out + for (i <- 0 until numOfFlows) { + val flowShape = b.add(flow) + out ~> flowShape + out = flowShape.outlet + } + out ~> Sink.ignore + ClosedShape }) } @@ -78,10 +82,10 @@ class MaterializationBenchmark { implicit val system = ActorSystem("MaterializationBenchmark") implicit val materializer = ActorMaterializer() - var flowWithMap: RunnableGraph[Unit] = _ - var graphWithJunctions: RunnableGraph[Unit] = _ - var graphWithNestedImports: RunnableGraph[Unit] = _ - var graphWithImportedFlow: RunnableGraph[Unit] = _ + var flowWithMap: RunnableGraph[NotUsed] = _ + var graphWithJunctions: RunnableGraph[NotUsed] = _ + var graphWithNestedImports: RunnableGraph[NotUsed] = _ + var graphWithImportedFlow: RunnableGraph[NotUsed] = _ @Param(Array("1", "10", "100", "1000")) val complexity = 0 @@ -96,8 +100,7 @@ class MaterializationBenchmark { @TearDown def shutdown() { - system.shutdown() - system.awaitTermination() + Await.result(system.terminate(), 5.seconds) } @Benchmark diff --git a/akka-bench-jmh-dev/src/main/scala/akka/stream/io/FileSourcesBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/stream/io/FileSourcesBenchmark.scala similarity index 81% rename from akka-bench-jmh-dev/src/main/scala/akka/stream/io/FileSourcesBenchmark.scala rename to akka-bench-jmh/src/main/scala/akka/stream/io/FileSourcesBenchmark.scala index eb4dc05bac..776da4c610 100644 --- a/akka-bench-jmh-dev/src/main/scala/akka/stream/io/FileSourcesBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/stream/io/FileSourcesBenchmark.scala @@ -1,20 +1,21 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.io -import java.io.{FileInputStream, File} +import java.io.{ FileInputStream, File } import java.util.concurrent.TimeUnit +import akka.{Done, NotUsed} import akka.actor.ActorSystem -import akka.stream.{Attributes, ActorMaterializer} +import akka.stream.{ Attributes, ActorMaterializer } import akka.stream.scaladsl._ import akka.util.ByteString import org.openjdk.jmh.annotations._ import scala.concurrent.duration._ -import scala.concurrent.{Promise, Await, Future} +import scala.concurrent.{ Promise, Await, Future } /** * Benchmark (bufSize) Mode Cnt Score Error Units @@ -45,9 +46,9 @@ class FileSourcesBenchmark { @Param(Array("2048")) val bufSize = 0 - var fileChannelSource: Source[ByteString, Future[Long]] = _ - var fileInputStreamSource: Source[ByteString, Future[Long]] = _ - var ioSourceLinesIterator: Source[ByteString, Unit] = _ + var fileChannelSource: Source[ByteString, Future[IOResult]] = _ + var fileInputStreamSource: Source[ByteString, Future[IOResult]] = _ + var ioSourceLinesIterator: Source[ByteString, NotUsed] = _ @Setup def setup() { @@ -63,8 +64,7 @@ class FileSourcesBenchmark { @TearDown def shutdown() { - system.shutdown() - system.awaitTermination() + Await.result(system.terminate(), Duration.Inf) } @Benchmark @@ -95,11 +95,10 @@ class FileSourcesBenchmark { */ @Benchmark def naive_ioSourceLinesIterator() = { - val p = Promise[Unit]() + val p = Promise[Done]() ioSourceLinesIterator.to(Sink.onComplete(p.complete(_))).run() Await.result(p.future, 30.seconds) } - } diff --git a/akka-camel/src/main/scala/akka/camel/Activation.scala b/akka-camel/src/main/scala/akka/camel/Activation.scala index ea43ba0c20..ee250614e3 100644 --- a/akka-camel/src/main/scala/akka/camel/Activation.scala +++ b/akka-camel/src/main/scala/akka/camel/Activation.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/main/scala/akka/camel/ActorNotRegisteredException.scala b/akka-camel/src/main/scala/akka/camel/ActorNotRegisteredException.scala index a4c8afd8a7..47434e97cf 100644 --- a/akka-camel/src/main/scala/akka/camel/ActorNotRegisteredException.scala +++ b/akka-camel/src/main/scala/akka/camel/ActorNotRegisteredException.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel /** diff --git a/akka-camel/src/main/scala/akka/camel/ActorRouteDefinition.scala b/akka-camel/src/main/scala/akka/camel/ActorRouteDefinition.scala index cb8603ec4a..035c710c3e 100644 --- a/akka-camel/src/main/scala/akka/camel/ActorRouteDefinition.scala +++ b/akka-camel/src/main/scala/akka/camel/ActorRouteDefinition.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/main/scala/akka/camel/Camel.scala b/akka-camel/src/main/scala/akka/camel/Camel.scala index 6804105dc9..4bac70fe1e 100644 --- a/akka-camel/src/main/scala/akka/camel/Camel.scala +++ b/akka-camel/src/main/scala/akka/camel/Camel.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/main/scala/akka/camel/CamelMessage.scala b/akka-camel/src/main/scala/akka/camel/CamelMessage.scala index 658e5a322c..9f572cb1d4 100644 --- a/akka-camel/src/main/scala/akka/camel/CamelMessage.scala +++ b/akka-camel/src/main/scala/akka/camel/CamelMessage.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/main/scala/akka/camel/CamelSupport.scala b/akka-camel/src/main/scala/akka/camel/CamelSupport.scala index 1fa772eaf7..f3f74cf859 100644 --- a/akka-camel/src/main/scala/akka/camel/CamelSupport.scala +++ b/akka-camel/src/main/scala/akka/camel/CamelSupport.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/main/scala/akka/camel/Consumer.scala b/akka-camel/src/main/scala/akka/camel/Consumer.scala index 63236f4224..6b05e80bc3 100644 --- a/akka-camel/src/main/scala/akka/camel/Consumer.scala +++ b/akka-camel/src/main/scala/akka/camel/Consumer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/main/scala/akka/camel/ContextProvider.scala b/akka-camel/src/main/scala/akka/camel/ContextProvider.scala index 3bc3ef91ef..381a5ca404 100644 --- a/akka-camel/src/main/scala/akka/camel/ContextProvider.scala +++ b/akka-camel/src/main/scala/akka/camel/ContextProvider.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/main/scala/akka/camel/Producer.scala b/akka-camel/src/main/scala/akka/camel/Producer.scala index 68740ae349..5773f3e464 100644 --- a/akka-camel/src/main/scala/akka/camel/Producer.scala +++ b/akka-camel/src/main/scala/akka/camel/Producer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/main/scala/akka/camel/internal/ActivationMessage.scala b/akka-camel/src/main/scala/akka/camel/internal/ActivationMessage.scala index 8113379df5..1ef256d37f 100644 --- a/akka-camel/src/main/scala/akka/camel/internal/ActivationMessage.scala +++ b/akka-camel/src/main/scala/akka/camel/internal/ActivationMessage.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel.internal diff --git a/akka-camel/src/main/scala/akka/camel/internal/ActivationTracker.scala b/akka-camel/src/main/scala/akka/camel/internal/ActivationTracker.scala index af5bb53709..22d334029e 100644 --- a/akka-camel/src/main/scala/akka/camel/internal/ActivationTracker.scala +++ b/akka-camel/src/main/scala/akka/camel/internal/ActivationTracker.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel.internal diff --git a/akka-camel/src/main/scala/akka/camel/internal/CamelExchangeAdapter.scala b/akka-camel/src/main/scala/akka/camel/internal/CamelExchangeAdapter.scala index a4a9baa9dd..90496c3656 100644 --- a/akka-camel/src/main/scala/akka/camel/internal/CamelExchangeAdapter.scala +++ b/akka-camel/src/main/scala/akka/camel/internal/CamelExchangeAdapter.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel.internal diff --git a/akka-camel/src/main/scala/akka/camel/internal/CamelSupervisor.scala b/akka-camel/src/main/scala/akka/camel/internal/CamelSupervisor.scala index 360aaea5ec..068c6361b0 100644 --- a/akka-camel/src/main/scala/akka/camel/internal/CamelSupervisor.scala +++ b/akka-camel/src/main/scala/akka/camel/internal/CamelSupervisor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel.internal diff --git a/akka-camel/src/main/scala/akka/camel/internal/ConsumerActorRouteBuilder.scala b/akka-camel/src/main/scala/akka/camel/internal/ConsumerActorRouteBuilder.scala index c30d4a3d0e..663079ed99 100644 --- a/akka-camel/src/main/scala/akka/camel/internal/ConsumerActorRouteBuilder.scala +++ b/akka-camel/src/main/scala/akka/camel/internal/ConsumerActorRouteBuilder.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel.internal diff --git a/akka-camel/src/main/scala/akka/camel/internal/DefaultCamel.scala b/akka-camel/src/main/scala/akka/camel/internal/DefaultCamel.scala index 65b27af5be..3ffef10129 100644 --- a/akka-camel/src/main/scala/akka/camel/internal/DefaultCamel.scala +++ b/akka-camel/src/main/scala/akka/camel/internal/DefaultCamel.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel.internal diff --git a/akka-camel/src/main/scala/akka/camel/internal/component/ActorComponent.scala b/akka-camel/src/main/scala/akka/camel/internal/component/ActorComponent.scala index 1172977778..447a89cc8e 100644 --- a/akka-camel/src/main/scala/akka/camel/internal/component/ActorComponent.scala +++ b/akka-camel/src/main/scala/akka/camel/internal/component/ActorComponent.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel.internal.component diff --git a/akka-camel/src/main/scala/akka/camel/javaapi/UntypedConsumerActor.scala b/akka-camel/src/main/scala/akka/camel/javaapi/UntypedConsumerActor.scala index d8fbd26487..c1f4cf4805 100644 --- a/akka-camel/src/main/scala/akka/camel/javaapi/UntypedConsumerActor.scala +++ b/akka-camel/src/main/scala/akka/camel/javaapi/UntypedConsumerActor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel.javaapi diff --git a/akka-camel/src/main/scala/akka/camel/javaapi/UntypedProducerActor.scala b/akka-camel/src/main/scala/akka/camel/javaapi/UntypedProducerActor.scala index 1ea80f1f3e..98e0426634 100644 --- a/akka-camel/src/main/scala/akka/camel/javaapi/UntypedProducerActor.scala +++ b/akka-camel/src/main/scala/akka/camel/javaapi/UntypedProducerActor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel.javaapi diff --git a/akka-camel/src/main/scala/akka/package.scala b/akka-camel/src/main/scala/akka/package.scala index cf389c4c8a..906fdb9ee6 100644 --- a/akka-camel/src/main/scala/akka/package.scala +++ b/akka-camel/src/main/scala/akka/package.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka diff --git a/akka-camel/src/test/java/akka/camel/ConsumerJavaTest.java b/akka-camel/src/test/java/akka/camel/ConsumerJavaTest.java index 3eeeea7bed..7907230378 100644 --- a/akka-camel/src/test/java/akka/camel/ConsumerJavaTest.java +++ b/akka-camel/src/test/java/akka/camel/ConsumerJavaTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel; diff --git a/akka-camel/src/test/java/akka/camel/MessageJavaTest.java b/akka-camel/src/test/java/akka/camel/MessageJavaTest.java index dda56addea..d9cf182ea7 100644 --- a/akka-camel/src/test/java/akka/camel/MessageJavaTest.java +++ b/akka-camel/src/test/java/akka/camel/MessageJavaTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel; diff --git a/akka-camel/src/test/java/akka/camel/SampleErrorHandlingConsumer.java b/akka-camel/src/test/java/akka/camel/SampleErrorHandlingConsumer.java index 550c05ecdb..fe3f1c5a03 100644 --- a/akka-camel/src/test/java/akka/camel/SampleErrorHandlingConsumer.java +++ b/akka-camel/src/test/java/akka/camel/SampleErrorHandlingConsumer.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel; diff --git a/akka-camel/src/test/java/akka/camel/SampleUntypedConsumer.java b/akka-camel/src/test/java/akka/camel/SampleUntypedConsumer.java index 38bb3eb7ab..56428009a8 100644 --- a/akka-camel/src/test/java/akka/camel/SampleUntypedConsumer.java +++ b/akka-camel/src/test/java/akka/camel/SampleUntypedConsumer.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel; diff --git a/akka-camel/src/test/java/akka/camel/SampleUntypedForwardingProducer.java b/akka-camel/src/test/java/akka/camel/SampleUntypedForwardingProducer.java index 7e7a368ffd..3968126ba6 100644 --- a/akka-camel/src/test/java/akka/camel/SampleUntypedForwardingProducer.java +++ b/akka-camel/src/test/java/akka/camel/SampleUntypedForwardingProducer.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel; diff --git a/akka-camel/src/test/java/akka/camel/SampleUntypedReplyingProducer.java b/akka-camel/src/test/java/akka/camel/SampleUntypedReplyingProducer.java index 0a78a0761e..42dc370df1 100644 --- a/akka-camel/src/test/java/akka/camel/SampleUntypedReplyingProducer.java +++ b/akka-camel/src/test/java/akka/camel/SampleUntypedReplyingProducer.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel; diff --git a/akka-camel/src/test/scala/akka/camel/ActivationIntegrationTest.scala b/akka-camel/src/test/scala/akka/camel/ActivationIntegrationTest.scala index 78c5cb512a..5876b34da1 100644 --- a/akka-camel/src/test/scala/akka/camel/ActivationIntegrationTest.scala +++ b/akka-camel/src/test/scala/akka/camel/ActivationIntegrationTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/test/scala/akka/camel/CamelConfigSpec.scala b/akka-camel/src/test/scala/akka/camel/CamelConfigSpec.scala index a973ceb5b9..aadc334ee0 100644 --- a/akka-camel/src/test/scala/akka/camel/CamelConfigSpec.scala +++ b/akka-camel/src/test/scala/akka/camel/CamelConfigSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/test/scala/akka/camel/CamelExchangeAdapterTest.scala b/akka-camel/src/test/scala/akka/camel/CamelExchangeAdapterTest.scala index f068cbc88c..2485c440bc 100644 --- a/akka-camel/src/test/scala/akka/camel/CamelExchangeAdapterTest.scala +++ b/akka-camel/src/test/scala/akka/camel/CamelExchangeAdapterTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/test/scala/akka/camel/CamelMessageTest.scala b/akka-camel/src/test/scala/akka/camel/CamelMessageTest.scala index 2439d81132..347d6fe209 100644 --- a/akka-camel/src/test/scala/akka/camel/CamelMessageTest.scala +++ b/akka-camel/src/test/scala/akka/camel/CamelMessageTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/test/scala/akka/camel/ConcurrentActivationTest.scala b/akka-camel/src/test/scala/akka/camel/ConcurrentActivationTest.scala index c2ae54382f..17bda7b59c 100644 --- a/akka-camel/src/test/scala/akka/camel/ConcurrentActivationTest.scala +++ b/akka-camel/src/test/scala/akka/camel/ConcurrentActivationTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/test/scala/akka/camel/ConsumerIntegrationTest.scala b/akka-camel/src/test/scala/akka/camel/ConsumerIntegrationTest.scala index 5532489b5e..2b3f417297 100644 --- a/akka-camel/src/test/scala/akka/camel/ConsumerIntegrationTest.scala +++ b/akka-camel/src/test/scala/akka/camel/ConsumerIntegrationTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/test/scala/akka/camel/DefaultCamelTest.scala b/akka-camel/src/test/scala/akka/camel/DefaultCamelTest.scala index d29b6a20ee..ba9037b825 100644 --- a/akka-camel/src/test/scala/akka/camel/DefaultCamelTest.scala +++ b/akka-camel/src/test/scala/akka/camel/DefaultCamelTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/test/scala/akka/camel/MessageScalaTest.scala b/akka-camel/src/test/scala/akka/camel/MessageScalaTest.scala index 20b0402e73..42f5fdbffb 100644 --- a/akka-camel/src/test/scala/akka/camel/MessageScalaTest.scala +++ b/akka-camel/src/test/scala/akka/camel/MessageScalaTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/test/scala/akka/camel/ProducerFeatureTest.scala b/akka-camel/src/test/scala/akka/camel/ProducerFeatureTest.scala index e5aad46997..dfa3baebc7 100644 --- a/akka-camel/src/test/scala/akka/camel/ProducerFeatureTest.scala +++ b/akka-camel/src/test/scala/akka/camel/ProducerFeatureTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/test/scala/akka/camel/TestSupport.scala b/akka-camel/src/test/scala/akka/camel/TestSupport.scala index 4fe8ac8e60..fd8491ed5c 100644 --- a/akka-camel/src/test/scala/akka/camel/TestSupport.scala +++ b/akka-camel/src/test/scala/akka/camel/TestSupport.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/test/scala/akka/camel/UntypedProducerTest.scala b/akka-camel/src/test/scala/akka/camel/UntypedProducerTest.scala index 303e1fc179..f6087a0a77 100644 --- a/akka-camel/src/test/scala/akka/camel/UntypedProducerTest.scala +++ b/akka-camel/src/test/scala/akka/camel/UntypedProducerTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/test/scala/akka/camel/internal/component/ActorComponentConfigurationTest.scala b/akka-camel/src/test/scala/akka/camel/internal/component/ActorComponentConfigurationTest.scala index 7bce201cc4..9a41bb1edd 100644 --- a/akka-camel/src/test/scala/akka/camel/internal/component/ActorComponentConfigurationTest.scala +++ b/akka-camel/src/test/scala/akka/camel/internal/component/ActorComponentConfigurationTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel.internal.component @@ -17,11 +17,11 @@ class ActorComponentConfigurationTest extends WordSpec with Matchers with Shared val component: Component = camel.context.getComponent("akka") "Endpoint url config should be correctly parsed" in { - val actorEndpointConfig = component.createEndpoint("akka://test/user/$a?autoAck=false&replyTimeout=987000000+nanos").asInstanceOf[ActorEndpointConfig] + val actorEndpointConfig = component.createEndpoint(s"akka://test/user/$$a?autoAck=false&replyTimeout=987000000+nanos").asInstanceOf[ActorEndpointConfig] actorEndpointConfig should have( - 'endpointUri("akka://test/user/$a?autoAck=false&replyTimeout=987000000+nanos"), - 'path(ActorEndpointPath.fromCamelPath("akka://test/user/$a")), + 'endpointUri(s"akka://test/user/$$a?autoAck=false&replyTimeout=987000000+nanos"), + 'path(ActorEndpointPath.fromCamelPath(s"akka://test/user/$$a")), 'autoAck(false), 'replyTimeout(987000000 nanos)) } diff --git a/akka-camel/src/test/scala/akka/camel/internal/component/ActorEndpointPathTest.scala b/akka-camel/src/test/scala/akka/camel/internal/component/ActorEndpointPathTest.scala index 775c1b10f8..5a686e94f4 100644 --- a/akka-camel/src/test/scala/akka/camel/internal/component/ActorEndpointPathTest.scala +++ b/akka-camel/src/test/scala/akka/camel/internal/component/ActorEndpointPathTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel.internal.component diff --git a/akka-camel/src/test/scala/akka/camel/internal/component/ActorProducerTest.scala b/akka-camel/src/test/scala/akka/camel/internal/component/ActorProducerTest.scala index 50361e1262..50227844ac 100644 --- a/akka-camel/src/test/scala/akka/camel/internal/component/ActorProducerTest.scala +++ b/akka-camel/src/test/scala/akka/camel/internal/component/ActorProducerTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel.internal.component diff --git a/akka-camel/src/test/scala/akka/camel/internal/component/DurationConverterTest.scala b/akka-camel/src/test/scala/akka/camel/internal/component/DurationConverterTest.scala index 014bf42d5d..6dd1315b7e 100644 --- a/akka-camel/src/test/scala/akka/camel/internal/component/DurationConverterTest.scala +++ b/akka-camel/src/test/scala/akka/camel/internal/component/DurationConverterTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.camel.internal.component diff --git a/akka-cluster-metrics/build.sbt b/akka-cluster-metrics/build.sbt index 02b421c41f..1997251d4d 100644 --- a/akka-cluster-metrics/build.sbt +++ b/akka-cluster-metrics/build.sbt @@ -12,7 +12,7 @@ OSGi.clusterMetrics Dependencies.clusterMetrics -//MimaKeys.previousArtifacts := akkaPreviousArtifacts("akka-cluster-metrics").value +MimaKeys.previousArtifacts := akkaPreviousArtifacts("akka-cluster-metrics").value parallelExecution in Test := false diff --git a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsCollector.scala b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsCollector.scala index 8f0bf7a95b..b1a91612bf 100644 --- a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsCollector.scala +++ b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsCollector.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.metrics diff --git a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsExtension.scala b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsExtension.scala index 37e7b6005a..dfbd0a9b99 100644 --- a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsExtension.scala +++ b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsExtension.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.metrics diff --git a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsRouting.scala b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsRouting.scala index 9cb780cb98..e1737b3608 100644 --- a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsRouting.scala +++ b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsRouting.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.metrics diff --git a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsSettings.scala b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsSettings.scala index 7b8b763d43..5f5f97a49c 100644 --- a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsSettings.scala +++ b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsSettings.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.metrics diff --git a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsStrategy.scala b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsStrategy.scala index 0c39dcc136..c603f86c20 100644 --- a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsStrategy.scala +++ b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsStrategy.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.metrics diff --git a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/EWMA.scala b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/EWMA.scala index 396b48cc36..41ad4fad75 100644 --- a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/EWMA.scala +++ b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/EWMA.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.metrics diff --git a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/Metric.scala b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/Metric.scala index 59294e7d53..5c1b9a2891 100644 --- a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/Metric.scala +++ b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/Metric.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.metrics diff --git a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/MetricsCollector.scala b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/MetricsCollector.scala index 85375d1c46..085a52b7ee 100644 --- a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/MetricsCollector.scala +++ b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/MetricsCollector.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.metrics diff --git a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/Provision.scala b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/Provision.scala index 19da06186d..31ebe836a3 100644 --- a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/Provision.scala +++ b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/Provision.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.metrics diff --git a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/protobuf/MessageSerializer.scala b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/protobuf/MessageSerializer.scala index 6d66b92c69..2cf7c091f1 100644 --- a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/protobuf/MessageSerializer.scala +++ b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/protobuf/MessageSerializer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.metrics.protobuf diff --git a/akka-cluster-metrics/src/multi-jvm/scala/akka/cluster/metrics/ClusterMetricsExtensionSpec.scala b/akka-cluster-metrics/src/multi-jvm/scala/akka/cluster/metrics/ClusterMetricsExtensionSpec.scala index 0d681b8b1f..2a24853654 100644 --- a/akka-cluster-metrics/src/multi-jvm/scala/akka/cluster/metrics/ClusterMetricsExtensionSpec.scala +++ b/akka-cluster-metrics/src/multi-jvm/scala/akka/cluster/metrics/ClusterMetricsExtensionSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.metrics diff --git a/akka-cluster-metrics/src/multi-jvm/scala/akka/cluster/metrics/ClusterMetricsRoutingSpec.scala b/akka-cluster-metrics/src/multi-jvm/scala/akka/cluster/metrics/ClusterMetricsRoutingSpec.scala index 12af43b20a..ae0c9f9a72 100644 --- a/akka-cluster-metrics/src/multi-jvm/scala/akka/cluster/metrics/ClusterMetricsRoutingSpec.scala +++ b/akka-cluster-metrics/src/multi-jvm/scala/akka/cluster/metrics/ClusterMetricsRoutingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.metrics diff --git a/akka-cluster-metrics/src/test/resources/logback-test.xml b/akka-cluster-metrics/src/test/resources/logback-test.xml index af0c295467..f4d306cdca 100644 --- a/akka-cluster-metrics/src/test/resources/logback-test.xml +++ b/akka-cluster-metrics/src/test/resources/logback-test.xml @@ -1,7 +1,7 @@ - + diff --git a/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/ClusterMetricsExtensionSpec.scala b/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/ClusterMetricsExtensionSpec.scala index f757edfd61..b79ca822ab 100644 --- a/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/ClusterMetricsExtensionSpec.scala +++ b/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/ClusterMetricsExtensionSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.metrics diff --git a/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/ClusterMetricsRoutingSpec.scala b/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/ClusterMetricsRoutingSpec.scala index 208d2b7c8e..2adea64666 100644 --- a/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/ClusterMetricsRoutingSpec.scala +++ b/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/ClusterMetricsRoutingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.metrics diff --git a/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/ClusterMetricsSettingsSpec.scala b/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/ClusterMetricsSettingsSpec.scala index 98d1dff7a0..ae2ea9f214 100644 --- a/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/ClusterMetricsSettingsSpec.scala +++ b/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/ClusterMetricsSettingsSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.metrics diff --git a/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/EWMASpec.scala b/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/EWMASpec.scala index f85876a2b0..5b8abf2f4b 100644 --- a/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/EWMASpec.scala +++ b/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/EWMASpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.metrics diff --git a/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/MetricSpec.scala b/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/MetricSpec.scala index 70cf3e9da2..e365a53b01 100644 --- a/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/MetricSpec.scala +++ b/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/MetricSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.metrics diff --git a/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/MetricsCollectorSpec.scala b/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/MetricsCollectorSpec.scala index 22bf68ff28..c1f4f41a6d 100644 --- a/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/MetricsCollectorSpec.scala +++ b/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/MetricsCollectorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.metrics diff --git a/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/TestUtil.scala b/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/TestUtil.scala index a3715ea9c9..7a233f5828 100644 --- a/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/TestUtil.scala +++ b/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/TestUtil.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.metrics diff --git a/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/protobuf/MessageSerializerSpec.scala b/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/protobuf/MessageSerializerSpec.scala index e68b9f8b94..fcc22481d0 100644 --- a/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/protobuf/MessageSerializerSpec.scala +++ b/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/protobuf/MessageSerializerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.metrics.protobuf diff --git a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ClusterSharding.scala b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ClusterSharding.scala index 938fec5e90..24d83d9373 100644 --- a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ClusterSharding.scala +++ b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ClusterSharding.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.sharding @@ -128,7 +128,7 @@ import akka.dispatch.Dispatchers * reduce memory consumption. This is done by the application specific implementation of * the entity actors for example by defining receive timeout (`context.setReceiveTimeout`). * If a message is already enqueued to the entity when it stops itself the enqueued message - * in the mailbox will be dropped. To support graceful passivation without loosing such + * in the mailbox will be dropped. To support graceful passivation without losing such * messages the entity actor can send [[ShardRegion.Passivate]] to its parent `ShardRegion`. * The specified wrapped message in `Passivate` will be sent back to the entity, which is * then supposed to stop itself. Incoming messages will be buffered by the `ShardRegion` diff --git a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ClusterShardingSerializable.scala b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ClusterShardingSerializable.scala index 7d5ffb8211..b14d80e2b7 100644 --- a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ClusterShardingSerializable.scala +++ b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ClusterShardingSerializable.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.cluster.sharding diff --git a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ClusterShardingSettings.scala b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ClusterShardingSettings.scala index bac03c9c58..fa902b354d 100644 --- a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ClusterShardingSettings.scala +++ b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ClusterShardingSettings.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.cluster.sharding diff --git a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/RemoveInternalClusterShardingData.scala b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/RemoveInternalClusterShardingData.scala index a8e7e8d019..1066e4e730 100644 --- a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/RemoveInternalClusterShardingData.scala +++ b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/RemoveInternalClusterShardingData.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.cluster.sharding diff --git a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/Shard.scala b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/Shard.scala index 5adf71fc9c..6c8f8faf8e 100644 --- a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/Shard.scala +++ b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/Shard.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.sharding diff --git a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ShardCoordinator.scala b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ShardCoordinator.scala index 8be730f4b1..289505f93c 100644 --- a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ShardCoordinator.scala +++ b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ShardCoordinator.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.sharding diff --git a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ShardRegion.scala b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ShardRegion.scala index 884000caff..381fdb3566 100644 --- a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ShardRegion.scala +++ b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ShardRegion.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.sharding @@ -135,7 +135,7 @@ object ShardRegion { * reduce memory consumption. This is done by the application specific implementation of * the entity actors for example by defining receive timeout (`context.setReceiveTimeout`). * If a message is already enqueued to the entity when it stops itself the enqueued message - * in the mailbox will be dropped. To support graceful passivation without loosing such + * in the mailbox will be dropped. To support graceful passivation without losing such * messages the entity actor can send this `Passivate` message to its parent `ShardRegion`. * The specified wrapped `stopMessage` will be sent back to the entity, which is * then supposed to stop itself. Incoming messages will be buffered by the `ShardRegion` diff --git a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/protobuf/ClusterShardingMessageSerializer.scala b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/protobuf/ClusterShardingMessageSerializer.scala index be45ad81eb..edb6bd19cf 100644 --- a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/protobuf/ClusterShardingMessageSerializer.scala +++ b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/protobuf/ClusterShardingMessageSerializer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.sharding.protobuf diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingCustomShardAllocationSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingCustomShardAllocationSpec.scala index 6f822d4ae4..de0cde285a 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingCustomShardAllocationSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingCustomShardAllocationSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.sharding diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingFailureSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingFailureSpec.scala index 07a612aa98..e75d56d487 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingFailureSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingFailureSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.sharding diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingGetStateSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingGetStateSpec.scala index c856c556f8..49f73ce0e0 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingGetStateSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingGetStateSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.sharding diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingGetStatsSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingGetStatsSpec.scala index 988dcf373c..2e7b477806 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingGetStatsSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingGetStatsSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.sharding diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingGracefulShutdownSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingGracefulShutdownSpec.scala index 756c6c7678..254818085e 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingGracefulShutdownSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingGracefulShutdownSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.sharding diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingLeavingSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingLeavingSpec.scala index 31b79f8dfe..5d006a98cb 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingLeavingSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingLeavingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.cluster.sharding diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingSpec.scala index 68e196c159..8afdf768df 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.sharding diff --git a/akka-cluster-sharding/src/test/java/akka/cluster/sharding/ClusterShardingTest.java b/akka-cluster-sharding/src/test/java/akka/cluster/sharding/ClusterShardingTest.java index 6d0ec7d98a..51c22800d4 100644 --- a/akka-cluster-sharding/src/test/java/akka/cluster/sharding/ClusterShardingTest.java +++ b/akka-cluster-sharding/src/test/java/akka/cluster/sharding/ClusterShardingTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.sharding; diff --git a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/LeastShardAllocationStrategySpec.scala b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/LeastShardAllocationStrategySpec.scala index 3ca0d1f85b..5608f96757 100644 --- a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/LeastShardAllocationStrategySpec.scala +++ b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/LeastShardAllocationStrategySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.sharding diff --git a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/RemoveInternalClusterShardingDataSpec.scala b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/RemoveInternalClusterShardingDataSpec.scala index fce61544b2..c030453245 100644 --- a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/RemoveInternalClusterShardingDataSpec.scala +++ b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/RemoveInternalClusterShardingDataSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.cluster.sharding diff --git a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/protobuf/ClusterShardingMessageSerializerSpec.scala b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/protobuf/ClusterShardingMessageSerializerSpec.scala index d6ee7f85ca..5116292735 100644 --- a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/protobuf/ClusterShardingMessageSerializerSpec.scala +++ b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/protobuf/ClusterShardingMessageSerializerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.cluster.sharding.protobuf diff --git a/akka-cluster-tools/src/main/resources/reference.conf b/akka-cluster-tools/src/main/resources/reference.conf index f504547ae4..b5f5617cfb 100644 --- a/akka-cluster-tools/src/main/resources/reference.conf +++ b/akka-cluster-tools/src/main/resources/reference.conf @@ -33,7 +33,6 @@ akka.cluster.pub-sub { # If not specified default dispatcher is used. # If specified you need to define the settings of the actual dispatcher. use-dispatcher = "" - } # //#pub-sub-ext-config diff --git a/akka-cluster-tools/src/main/scala/akka/cluster/client/ClusterClient.scala b/akka-cluster-tools/src/main/scala/akka/cluster/client/ClusterClient.scala index 805e08e07f..6ee64e685f 100644 --- a/akka-cluster-tools/src/main/scala/akka/cluster/client/ClusterClient.scala +++ b/akka-cluster-tools/src/main/scala/akka/cluster/client/ClusterClient.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.client diff --git a/akka-cluster-tools/src/main/scala/akka/cluster/client/protobuf/ClusterClientMessageSerializer.scala b/akka-cluster-tools/src/main/scala/akka/cluster/client/protobuf/ClusterClientMessageSerializer.scala index 62b22ebb3c..19ea2d1743 100644 --- a/akka-cluster-tools/src/main/scala/akka/cluster/client/protobuf/ClusterClientMessageSerializer.scala +++ b/akka-cluster-tools/src/main/scala/akka/cluster/client/protobuf/ClusterClientMessageSerializer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.cluster.client.protobuf diff --git a/akka-cluster-tools/src/main/scala/akka/cluster/pubsub/DistributedPubSubMediator.scala b/akka-cluster-tools/src/main/scala/akka/cluster/pubsub/DistributedPubSubMediator.scala index 563791cb40..bbca5e2190 100644 --- a/akka-cluster-tools/src/main/scala/akka/cluster/pubsub/DistributedPubSubMediator.scala +++ b/akka-cluster-tools/src/main/scala/akka/cluster/pubsub/DistributedPubSubMediator.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.pubsub @@ -10,19 +10,7 @@ import scala.concurrent.duration._ import java.util.concurrent.ThreadLocalRandom import java.net.URLEncoder import java.net.URLDecoder -import akka.actor.Actor -import akka.actor.ActorContext -import akka.actor.ActorLogging -import akka.actor.ActorPath -import akka.actor.ActorRef -import akka.actor.ActorSystem -import akka.actor.Address -import akka.actor.ExtendedActorSystem -import akka.actor.Extension -import akka.actor.ExtensionId -import akka.actor.ExtensionIdProvider -import akka.actor.Props -import akka.actor.Terminated +import akka.actor._ import akka.cluster.Cluster import akka.cluster.ClusterEvent._ import akka.cluster.Member @@ -38,10 +26,7 @@ import akka.routing.ConsistentHashingRoutingLogic import akka.routing.BroadcastRoutingLogic import scala.collection.immutable.TreeMap import com.typesafe.config.Config -import akka.actor.NoSerializationVerificationNeeded -import akka.actor.Deploy import akka.dispatch.Dispatchers -import akka.actor.DeadLetterSuppression object DistributedPubSubSettings { /** @@ -251,6 +236,38 @@ object DistributedPubSubMediator { @SerialVersionUID(1L) final case class SendToOneSubscriber(msg: Any) + /** + * Messages used to encode protocol to make sure that we do not send Subscribe/Unsubscribe message to + * child (mediator -> topic, topic -> group) during a period of transition. Protects from situations like: + * + * Sending Subscribe/Unsubscribe message to child actor after child has been terminated + * but Terminate message did not yet arrive to parent. + * + * Sending Subscribe/Unsubscribe message to child actor that has Prune message queued and pruneDeadline set. + * + * In both of those situation parent actor still thinks that child actor is alive and forwards messages to it resulting in lost ACKs. + */ + trait ChildActorTerminationProtocol + + /** + * Passivate-like message sent from child to parent, used to signal that sender has no subscribers and no child actors. + */ + case object NoMoreSubscribers extends ChildActorTerminationProtocol + + /** + * Sent from parent to child actor to signalize that messages are being buffered. When received by child actor + * if no [[Subscribe]] message has been received after sending [[NoMoreSubscribers]] message child actor will stop itself. + */ + case object TerminateRequest extends ChildActorTerminationProtocol + + /** + * Sent from child to parent actor as response to [[TerminateRequest]] in case [[Subscribe]] message arrived + * after sending [[NoMoreSubscribers]] but before receiving [[TerminateRequest]]. + * + * When received by the parent buffered messages will be forwarded to child actor for processing. + */ + case object NewSubscriberArrived extends ChildActorTerminationProtocol + @SerialVersionUID(1L) final case class MediatorRouterEnvelope(msg: Any) extends RouterEnvelope { override def message = msg @@ -258,6 +275,10 @@ object DistributedPubSubMediator { def encName(s: String) = URLEncoder.encode(s, "utf-8") + def mkKey(ref: ActorRef): String = mkKey(ref.path) + + def mkKey(path: ActorPath): String = path.toStringWithoutAddress + trait TopicLike extends Actor { import context.dispatcher val pruneInterval: FiniteDuration = emptyTimeToLive / 2 @@ -286,7 +307,15 @@ object DistributedPubSubMediator { case Terminated(ref) ⇒ remove(ref) case Prune ⇒ - for (d ← pruneDeadline if d.isOverdue) context stop self + for (d ← pruneDeadline if d.isOverdue) { + pruneDeadline = None + context.parent ! NoMoreSubscribers + } + case TerminateRequest ⇒ + if (subscribers.isEmpty && context.children.isEmpty) + context stop self + else + context.parent ! NewSubscriberArrived case msg ⇒ subscribers foreach { _ forward msg } } @@ -303,28 +332,46 @@ object DistributedPubSubMediator { } } - class Topic(val emptyTimeToLive: FiniteDuration, routingLogic: RoutingLogic) extends TopicLike { + class Topic(val emptyTimeToLive: FiniteDuration, routingLogic: RoutingLogic) extends TopicLike with PerGroupingBuffer { def business = { case msg @ Subscribe(_, Some(group), _) ⇒ val encGroup = encName(group) - context.child(encGroup) match { - case Some(g) ⇒ g forward msg - case None ⇒ - val g = context.actorOf(Props(classOf[Group], emptyTimeToLive, routingLogic), name = encGroup) - g forward msg - context watch g - context.parent ! RegisterTopic(g) + bufferOr(mkKey(self.path / encGroup), msg, sender()) { + context.child(encGroup) match { + case Some(g) ⇒ g forward msg + case None ⇒ newGroupActor(encGroup) forward msg + } } pruneDeadline = None case msg @ Unsubscribe(_, Some(group), _) ⇒ - context.child(encName(group)) match { - case Some(g) ⇒ g forward msg - case None ⇒ // no such group here + val encGroup = encName(group) + bufferOr(mkKey(self.path / encGroup), msg, sender()) { + context.child(encGroup) match { + case Some(g) ⇒ g forward msg + case None ⇒ // no such group here + } } case msg: Subscribed ⇒ context.parent forward msg case msg: Unsubscribed ⇒ context.parent forward msg + case NoMoreSubscribers ⇒ + val key = mkKey(sender()) + initializeGrouping(key) + sender() ! TerminateRequest + case NewSubscriberArrived ⇒ + val key = mkKey(sender()) + forwardMessages(key, sender()) + case Terminated(ref) ⇒ + val key = mkKey(ref) + recreateAndForwardMessagesIfNeeded(key, newGroupActor(ref.path.name)) + } + + def newGroupActor(encGroup: String): ActorRef = { + val g = context.actorOf(Props(classOf[Group], emptyTimeToLive, routingLogic), name = encGroup) + context watch g + context.parent ! RegisterTopic(g) + g } } @@ -424,7 +471,7 @@ trait DistributedPubSubMessage extends Serializable * [[DistributedPubSubMediator.SubscribeAck]] and [[DistributedPubSubMediator.UnsubscribeAck]] * replies. */ -class DistributedPubSubMediator(settings: DistributedPubSubSettings) extends Actor with ActorLogging { +class DistributedPubSubMediator(settings: DistributedPubSubSettings) extends Actor with ActorLogging with PerGroupingBuffer { import DistributedPubSubMediator._ import DistributedPubSubMediator.Internal._ @@ -521,18 +568,28 @@ class DistributedPubSubMediator(settings: DistributedPubSubSettings) extends Act case msg @ Subscribe(topic, _, _) ⇒ // each topic is managed by a child actor with the same name as the topic + val encTopic = encName(topic) - context.child(encTopic) match { - case Some(t) ⇒ t forward msg - case None ⇒ - val t = context.actorOf(Props(classOf[Topic], removedTimeToLive, routingLogic), name = encTopic) - t forward msg - registerTopic(t) + + bufferOr(mkKey(self.path / encTopic), msg, sender()) { + context.child(encTopic) match { + case Some(t) ⇒ t forward msg + case None ⇒ newTopicActor(encTopic) forward msg + } } case msg @ RegisterTopic(t) ⇒ registerTopic(t) + case NoMoreSubscribers ⇒ + val key = mkKey(sender()) + initializeGrouping(key) + sender() ! TerminateRequest + + case NewSubscriberArrived ⇒ + val key = mkKey(sender()) + forwardMessages(key, sender()) + case GetTopics ⇒ { sender ! CurrentTopics(getCurrentTopics()) } @@ -541,9 +598,12 @@ class DistributedPubSubMediator(settings: DistributedPubSubSettings) extends Act ref ! ack case msg @ Unsubscribe(topic, _, _) ⇒ - context.child(encName(topic)) match { - case Some(t) ⇒ t forward msg - case None ⇒ // no such topic here + val encTopic = encName(topic) + bufferOr(mkKey(self.path / encTopic), msg, sender()) { + context.child(encTopic) match { + case Some(t) ⇒ t forward msg + case None ⇒ // no such topic here + } } case msg @ Unsubscribed(ack, ref) ⇒ @@ -585,6 +645,7 @@ class DistributedPubSubMediator(settings: DistributedPubSubSettings) extends Act put(key, None) case _ ⇒ } + recreateAndForwardMessagesIfNeeded(key, newTopicActor(a.path.name)) case state: CurrentClusterState ⇒ nodes = state.members.collect { @@ -669,9 +730,9 @@ class DistributedPubSubMediator(settings: DistributedPubSubSettings) extends Act context.watch(ref) } - def mkKey(ref: ActorRef): String = mkKey(ref.path) + def mkKey(ref: ActorRef): String = Internal.mkKey(ref) - def mkKey(path: ActorPath): String = path.toStringWithoutAddress + def mkKey(path: ActorPath): String = Internal.mkKey(path) def myVersions: Map[Address, Long] = registry.map { case (owner, bucket) ⇒ (owner -> bucket.version) } @@ -724,6 +785,12 @@ class DistributedPubSubMediator(settings: DistributedPubSubSettings) extends Act registry += owner -> bucket.copy(content = bucket.content -- oldRemoved) } } + + def newTopicActor(encTopic: String): ActorRef = { + val t = context.actorOf(Props(classOf[Topic], removedTimeToLive, routingLogic), name = encTopic) + registerTopic(t) + t + } } object DistributedPubSub extends ExtensionId[DistributedPubSub] with ExtensionIdProvider { diff --git a/akka-cluster-tools/src/main/scala/akka/cluster/pubsub/PerGroupingBuffer.scala b/akka-cluster-tools/src/main/scala/akka/cluster/pubsub/PerGroupingBuffer.scala new file mode 100644 index 0000000000..ffd10a8923 --- /dev/null +++ b/akka-cluster-tools/src/main/scala/akka/cluster/pubsub/PerGroupingBuffer.scala @@ -0,0 +1,48 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package akka.cluster.pubsub + +import akka.actor.ActorRef + +private[pubsub] trait PerGroupingBuffer { + private type BufferedMessages = Vector[(Any, ActorRef)] + + private var buffers: Map[String, BufferedMessages] = Map.empty + + private var totalBufferSize = 0 + + def bufferOr(grouping: String, message: Any, originalSender: ActorRef)(action: ⇒ Unit): Unit = { + buffers.get(grouping) match { + case None ⇒ action + case Some(messages) ⇒ + buffers = buffers.updated(grouping, messages :+ ((message, originalSender))) + totalBufferSize += 1 + } + } + + def recreateAndForwardMessagesIfNeeded(grouping: String, recipient: ⇒ ActorRef): Unit = { + buffers.get(grouping).filter(_.nonEmpty).foreach { messages ⇒ + forwardMessages(messages, recipient) + totalBufferSize -= messages.length + } + buffers -= grouping + } + + def forwardMessages(grouping: String, recipient: ActorRef): Unit = { + buffers.get(grouping).foreach { messages ⇒ + forwardMessages(messages, recipient) + totalBufferSize -= messages.length + } + buffers -= grouping + } + + private def forwardMessages(messages: BufferedMessages, recipient: ActorRef): Unit = { + messages.foreach { + case (message, originalSender) ⇒ recipient.tell(message, originalSender) + } + } + + def initializeGrouping(grouping: String): Unit = buffers += grouping -> Vector.empty +} diff --git a/akka-cluster-tools/src/main/scala/akka/cluster/pubsub/protobuf/DistributedPubSubMessageSerializer.scala b/akka-cluster-tools/src/main/scala/akka/cluster/pubsub/protobuf/DistributedPubSubMessageSerializer.scala index 2f24579e1c..cdb24a2647 100644 --- a/akka-cluster-tools/src/main/scala/akka/cluster/pubsub/protobuf/DistributedPubSubMessageSerializer.scala +++ b/akka-cluster-tools/src/main/scala/akka/cluster/pubsub/protobuf/DistributedPubSubMessageSerializer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.pubsub.protobuf diff --git a/akka-cluster-tools/src/main/scala/akka/cluster/singleton/ClusterSingletonManager.scala b/akka-cluster-tools/src/main/scala/akka/cluster/singleton/ClusterSingletonManager.scala index 2d93b47986..37ed3998c3 100644 --- a/akka-cluster-tools/src/main/scala/akka/cluster/singleton/ClusterSingletonManager.scala +++ b/akka-cluster-tools/src/main/scala/akka/cluster/singleton/ClusterSingletonManager.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.singleton diff --git a/akka-cluster-tools/src/main/scala/akka/cluster/singleton/ClusterSingletonProxy.scala b/akka-cluster-tools/src/main/scala/akka/cluster/singleton/ClusterSingletonProxy.scala index 7c81a17419..93d5545e96 100644 --- a/akka-cluster-tools/src/main/scala/akka/cluster/singleton/ClusterSingletonProxy.scala +++ b/akka-cluster-tools/src/main/scala/akka/cluster/singleton/ClusterSingletonProxy.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.singleton diff --git a/akka-cluster-tools/src/main/scala/akka/cluster/singleton/protobuf/ClusterSingletonMessageSerializer.scala b/akka-cluster-tools/src/main/scala/akka/cluster/singleton/protobuf/ClusterSingletonMessageSerializer.scala index 63e59a8f17..00e6a27f72 100644 --- a/akka-cluster-tools/src/main/scala/akka/cluster/singleton/protobuf/ClusterSingletonMessageSerializer.scala +++ b/akka-cluster-tools/src/main/scala/akka/cluster/singleton/protobuf/ClusterSingletonMessageSerializer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.cluster.singleton.protobuf diff --git a/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/client/ClusterClientSpec.scala b/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/client/ClusterClientSpec.scala index 482e3ba976..8d0384be68 100644 --- a/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/client/ClusterClientSpec.scala +++ b/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/client/ClusterClientSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.client diff --git a/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/pubsub/DistributedPubSubMediatorSpec.scala b/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/pubsub/DistributedPubSubMediatorSpec.scala index ea5d20a04c..1356ae36c5 100644 --- a/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/pubsub/DistributedPubSubMediatorSpec.scala +++ b/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/pubsub/DistributedPubSubMediatorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.pubsub diff --git a/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/singleton/ClusterSingletonManagerChaosSpec.scala b/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/singleton/ClusterSingletonManagerChaosSpec.scala index e390c57529..6b3e2dd64b 100644 --- a/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/singleton/ClusterSingletonManagerChaosSpec.scala +++ b/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/singleton/ClusterSingletonManagerChaosSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.singleton diff --git a/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/singleton/ClusterSingletonManagerLeaveSpec.scala b/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/singleton/ClusterSingletonManagerLeaveSpec.scala index 85f796432b..9740a3d9de 100644 --- a/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/singleton/ClusterSingletonManagerLeaveSpec.scala +++ b/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/singleton/ClusterSingletonManagerLeaveSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.singleton diff --git a/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/singleton/ClusterSingletonManagerSpec.scala b/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/singleton/ClusterSingletonManagerSpec.scala index 1f937fd769..2338090ef7 100644 --- a/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/singleton/ClusterSingletonManagerSpec.scala +++ b/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/singleton/ClusterSingletonManagerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.singleton diff --git a/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/singleton/ClusterSingletonManagerStartupSpec.scala b/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/singleton/ClusterSingletonManagerStartupSpec.scala index f9fddb7153..8ccae2570f 100644 --- a/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/singleton/ClusterSingletonManagerStartupSpec.scala +++ b/akka-cluster-tools/src/multi-jvm/scala/akka/cluster/singleton/ClusterSingletonManagerStartupSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.singleton diff --git a/akka-cluster-tools/src/test/java/akka/cluster/client/ClusterClientTest.java b/akka-cluster-tools/src/test/java/akka/cluster/client/ClusterClientTest.java index ce647c06b5..cc17424156 100644 --- a/akka-cluster-tools/src/test/java/akka/cluster/client/ClusterClientTest.java +++ b/akka-cluster-tools/src/test/java/akka/cluster/client/ClusterClientTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.client; diff --git a/akka-cluster-tools/src/test/java/akka/cluster/pubsub/DistributedPubSubMediatorTest.java b/akka-cluster-tools/src/test/java/akka/cluster/pubsub/DistributedPubSubMediatorTest.java index 2be1780836..19517d1d4a 100644 --- a/akka-cluster-tools/src/test/java/akka/cluster/pubsub/DistributedPubSubMediatorTest.java +++ b/akka-cluster-tools/src/test/java/akka/cluster/pubsub/DistributedPubSubMediatorTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.pubsub; diff --git a/akka-cluster-tools/src/test/java/akka/cluster/singleton/ClusterSingletonManagerTest.java b/akka-cluster-tools/src/test/java/akka/cluster/singleton/ClusterSingletonManagerTest.java index e363757e90..9edc69251a 100644 --- a/akka-cluster-tools/src/test/java/akka/cluster/singleton/ClusterSingletonManagerTest.java +++ b/akka-cluster-tools/src/test/java/akka/cluster/singleton/ClusterSingletonManagerTest.java @@ -1,30 +1,16 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.singleton; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.List; -import java.util.SortedSet; -import java.util.TreeSet; - import akka.actor.ActorSystem; import akka.actor.ActorRef; -import akka.actor.ActorSelection; import akka.actor.Props; -import akka.actor.UntypedActor; -import akka.cluster.Cluster; -import akka.cluster.Member; -import akka.cluster.ClusterEvent.CurrentClusterState; -import akka.cluster.ClusterEvent.MemberEvent; -import akka.cluster.ClusterEvent.MemberUp; -import akka.cluster.ClusterEvent.MemberRemoved; -import akka.cluster.MemberStatus; public class ClusterSingletonManagerTest { + @SuppressWarnings("null") public void demo() { final ActorSystem system = null; final ActorRef queue = null; diff --git a/akka-cluster-tools/src/test/scala/akka/cluster/client/protobuf/ClusterClientMessageSerializerSpec.scala b/akka-cluster-tools/src/test/scala/akka/cluster/client/protobuf/ClusterClientMessageSerializerSpec.scala index f05185d439..348c8f7087 100644 --- a/akka-cluster-tools/src/test/scala/akka/cluster/client/protobuf/ClusterClientMessageSerializerSpec.scala +++ b/akka-cluster-tools/src/test/scala/akka/cluster/client/protobuf/ClusterClientMessageSerializerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.cluster.client.protobuf diff --git a/akka-cluster-tools/src/test/scala/akka/cluster/pubsub/protobuf/DistributedPubSubMessageSerializerSpec.scala b/akka-cluster-tools/src/test/scala/akka/cluster/pubsub/protobuf/DistributedPubSubMessageSerializerSpec.scala index 6925b67e55..ced6886711 100644 --- a/akka-cluster-tools/src/test/scala/akka/cluster/pubsub/protobuf/DistributedPubSubMessageSerializerSpec.scala +++ b/akka-cluster-tools/src/test/scala/akka/cluster/pubsub/protobuf/DistributedPubSubMessageSerializerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.pubsub.protobuf diff --git a/akka-cluster-tools/src/test/scala/akka/cluster/singleton/ClusterSingletonProxySpec.scala b/akka-cluster-tools/src/test/scala/akka/cluster/singleton/ClusterSingletonProxySpec.scala index 312ae7816f..0b5b63b4db 100644 --- a/akka-cluster-tools/src/test/scala/akka/cluster/singleton/ClusterSingletonProxySpec.scala +++ b/akka-cluster-tools/src/test/scala/akka/cluster/singleton/ClusterSingletonProxySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.singleton diff --git a/akka-cluster-tools/src/test/scala/akka/cluster/singleton/protobuf/ClusterSingletonMessageSerializerSpec.scala b/akka-cluster-tools/src/test/scala/akka/cluster/singleton/protobuf/ClusterSingletonMessageSerializerSpec.scala index 4d87caa7b2..63c3882de3 100644 --- a/akka-cluster-tools/src/test/scala/akka/cluster/singleton/protobuf/ClusterSingletonMessageSerializerSpec.scala +++ b/akka-cluster-tools/src/test/scala/akka/cluster/singleton/protobuf/ClusterSingletonMessageSerializerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.cluster.singleton.protobuf diff --git a/akka-cluster/src/main/scala/akka/cluster/AutoDown.scala b/akka-cluster/src/main/scala/akka/cluster/AutoDown.scala index ba94e4bc5b..0bb6116d83 100644 --- a/akka-cluster/src/main/scala/akka/cluster/AutoDown.scala +++ b/akka-cluster/src/main/scala/akka/cluster/AutoDown.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/main/scala/akka/cluster/Cluster.scala b/akka-cluster/src/main/scala/akka/cluster/Cluster.scala index eb7ba075c8..e6cca61fc0 100644 --- a/akka-cluster/src/main/scala/akka/cluster/Cluster.scala +++ b/akka-cluster/src/main/scala/akka/cluster/Cluster.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster @@ -340,7 +340,7 @@ class Cluster(val system: ExtendedActorSystem) extends Extension { /** * The supplied thunk will be run, once, when current cluster member is `Removed`. * If the cluster has already been shutdown the thunk will run on the caller thread immediately. - * Typically used together `cluster.leave(cluster.selfAddress)` and then `system.shutdown()`. + * Typically used together `cluster.leave(cluster.selfAddress)` and then `system.terminate()`. */ def registerOnMemberRemoved[T](code: ⇒ T): Unit = registerOnMemberRemoved(new Runnable { override def run(): Unit = code }) @@ -348,7 +348,7 @@ class Cluster(val system: ExtendedActorSystem) extends Extension { /** * Java API: The supplied thunk will be run, once, when current cluster member is `Removed`. * If the cluster has already been shutdown the thunk will run on the caller thread immediately. - * Typically used together `cluster.leave(cluster.selfAddress)` and then `system.shutdown()`. + * Typically used together `cluster.leave(cluster.selfAddress)` and then `system.terminate()`. */ def registerOnMemberRemoved(callback: Runnable): Unit = { if (_isTerminated.get()) diff --git a/akka-cluster/src/main/scala/akka/cluster/ClusterActorRefProvider.scala b/akka-cluster/src/main/scala/akka/cluster/ClusterActorRefProvider.scala index 0b859f66b6..b9edcaf2f7 100644 --- a/akka-cluster/src/main/scala/akka/cluster/ClusterActorRefProvider.scala +++ b/akka-cluster/src/main/scala/akka/cluster/ClusterActorRefProvider.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/main/scala/akka/cluster/ClusterDaemon.scala b/akka-cluster/src/main/scala/akka/cluster/ClusterDaemon.scala index ae409d394a..56a77cc839 100644 --- a/akka-cluster/src/main/scala/akka/cluster/ClusterDaemon.scala +++ b/akka-cluster/src/main/scala/akka/cluster/ClusterDaemon.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/main/scala/akka/cluster/ClusterEvent.scala b/akka-cluster/src/main/scala/akka/cluster/ClusterEvent.scala index 28dc023160..9d16e86969 100644 --- a/akka-cluster/src/main/scala/akka/cluster/ClusterEvent.scala +++ b/akka-cluster/src/main/scala/akka/cluster/ClusterEvent.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/main/scala/akka/cluster/ClusterHeartbeat.scala b/akka-cluster/src/main/scala/akka/cluster/ClusterHeartbeat.scala index 713433d22d..e5e593b25b 100644 --- a/akka-cluster/src/main/scala/akka/cluster/ClusterHeartbeat.scala +++ b/akka-cluster/src/main/scala/akka/cluster/ClusterHeartbeat.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/main/scala/akka/cluster/ClusterJmx.scala b/akka-cluster/src/main/scala/akka/cluster/ClusterJmx.scala index b3859c1434..f369d3f2d8 100644 --- a/akka-cluster/src/main/scala/akka/cluster/ClusterJmx.scala +++ b/akka-cluster/src/main/scala/akka/cluster/ClusterJmx.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/main/scala/akka/cluster/ClusterMetricsCollector.scala b/akka-cluster/src/main/scala/akka/cluster/ClusterMetricsCollector.scala index 16bdf6b7be..57126541fc 100644 --- a/akka-cluster/src/main/scala/akka/cluster/ClusterMetricsCollector.scala +++ b/akka-cluster/src/main/scala/akka/cluster/ClusterMetricsCollector.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/main/scala/akka/cluster/ClusterReadView.scala b/akka-cluster/src/main/scala/akka/cluster/ClusterReadView.scala index 3501957900..be406092f9 100644 --- a/akka-cluster/src/main/scala/akka/cluster/ClusterReadView.scala +++ b/akka-cluster/src/main/scala/akka/cluster/ClusterReadView.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/main/scala/akka/cluster/ClusterRemoteWatcher.scala b/akka-cluster/src/main/scala/akka/cluster/ClusterRemoteWatcher.scala index c1a21e5977..bb85cf9b19 100644 --- a/akka-cluster/src/main/scala/akka/cluster/ClusterRemoteWatcher.scala +++ b/akka-cluster/src/main/scala/akka/cluster/ClusterRemoteWatcher.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/main/scala/akka/cluster/ClusterSettings.scala b/akka-cluster/src/main/scala/akka/cluster/ClusterSettings.scala index 7823eec52e..de79877a63 100644 --- a/akka-cluster/src/main/scala/akka/cluster/ClusterSettings.scala +++ b/akka-cluster/src/main/scala/akka/cluster/ClusterSettings.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/main/scala/akka/cluster/Gossip.scala b/akka-cluster/src/main/scala/akka/cluster/Gossip.scala index cb10b08cf0..f6aff04575 100644 --- a/akka-cluster/src/main/scala/akka/cluster/Gossip.scala +++ b/akka-cluster/src/main/scala/akka/cluster/Gossip.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/main/scala/akka/cluster/Member.scala b/akka-cluster/src/main/scala/akka/cluster/Member.scala index 6fb54cbdd7..165de85a9f 100644 --- a/akka-cluster/src/main/scala/akka/cluster/Member.scala +++ b/akka-cluster/src/main/scala/akka/cluster/Member.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/main/scala/akka/cluster/Reachability.scala b/akka-cluster/src/main/scala/akka/cluster/Reachability.scala index 27fb62b18a..9475009907 100644 --- a/akka-cluster/src/main/scala/akka/cluster/Reachability.scala +++ b/akka-cluster/src/main/scala/akka/cluster/Reachability.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/main/scala/akka/cluster/VectorClock.scala b/akka-cluster/src/main/scala/akka/cluster/VectorClock.scala index 7f30d1bf9b..a66dac1a95 100644 --- a/akka-cluster/src/main/scala/akka/cluster/VectorClock.scala +++ b/akka-cluster/src/main/scala/akka/cluster/VectorClock.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/main/scala/akka/cluster/protobuf/ClusterMessageSerializer.scala b/akka-cluster/src/main/scala/akka/cluster/protobuf/ClusterMessageSerializer.scala index 919f7141c9..a8d141c6a9 100644 --- a/akka-cluster/src/main/scala/akka/cluster/protobuf/ClusterMessageSerializer.scala +++ b/akka-cluster/src/main/scala/akka/cluster/protobuf/ClusterMessageSerializer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.protobuf diff --git a/akka-cluster/src/main/scala/akka/cluster/routing/AdaptiveLoadBalancing.scala b/akka-cluster/src/main/scala/akka/cluster/routing/AdaptiveLoadBalancing.scala index ffed9df126..a79a5765df 100644 --- a/akka-cluster/src/main/scala/akka/cluster/routing/AdaptiveLoadBalancing.scala +++ b/akka-cluster/src/main/scala/akka/cluster/routing/AdaptiveLoadBalancing.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.routing diff --git a/akka-cluster/src/main/scala/akka/cluster/routing/ClusterRouterConfig.scala b/akka-cluster/src/main/scala/akka/cluster/routing/ClusterRouterConfig.scala index f691325ec9..f09a32f0e0 100644 --- a/akka-cluster/src/main/scala/akka/cluster/routing/ClusterRouterConfig.scala +++ b/akka-cluster/src/main/scala/akka/cluster/routing/ClusterRouterConfig.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.routing diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/AttemptSysMsgRedeliverySpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/AttemptSysMsgRedeliverySpec.scala index 046254e2c8..5d6ec41cc1 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/AttemptSysMsgRedeliverySpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/AttemptSysMsgRedeliverySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/ClientDowningNodeThatIsUnreachableSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/ClientDowningNodeThatIsUnreachableSpec.scala index 2037a32239..2c9de4846f 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/ClientDowningNodeThatIsUnreachableSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/ClientDowningNodeThatIsUnreachableSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/ClientDowningNodeThatIsUpSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/ClientDowningNodeThatIsUpSpec.scala index 3104411fa6..91a7fc33ac 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/ClientDowningNodeThatIsUpSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/ClientDowningNodeThatIsUpSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/ClusterAccrualFailureDetectorSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/ClusterAccrualFailureDetectorSpec.scala index 4991b22da9..d283d6afa5 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/ClusterAccrualFailureDetectorSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/ClusterAccrualFailureDetectorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/ClusterDeathWatchSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/ClusterDeathWatchSpec.scala index 88a77b1237..4a101beeea 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/ClusterDeathWatchSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/ClusterDeathWatchSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/ClusterMetricsDisabledSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/ClusterMetricsDisabledSpec.scala index 5db76236ac..946e2a2b2f 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/ClusterMetricsDisabledSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/ClusterMetricsDisabledSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/ClusterMetricsSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/ClusterMetricsSpec.scala index 7631ca5e1e..7e6e9253f2 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/ClusterMetricsSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/ClusterMetricsSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/ConvergenceSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/ConvergenceSpec.scala index 479b7c1a86..a474115a8a 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/ConvergenceSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/ConvergenceSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/DeterministicOldestWhenJoiningSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/DeterministicOldestWhenJoiningSpec.scala index 51b5d1c28e..62a7951e9e 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/DeterministicOldestWhenJoiningSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/DeterministicOldestWhenJoiningSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/DisallowJoinOfTwoClustersSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/DisallowJoinOfTwoClustersSpec.scala index 7c536f351c..38503f4af2 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/DisallowJoinOfTwoClustersSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/DisallowJoinOfTwoClustersSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/InitialHeartbeatSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/InitialHeartbeatSpec.scala index 62fccc06c7..7eb41a96cd 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/InitialHeartbeatSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/InitialHeartbeatSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/JoinInProgressSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/JoinInProgressSpec.scala index a34266f8ad..f7488e8aa7 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/JoinInProgressSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/JoinInProgressSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/JoinSeedNodeSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/JoinSeedNodeSpec.scala index 671bd9cc27..fce601f68e 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/JoinSeedNodeSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/JoinSeedNodeSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/LeaderDowningAllOtherNodesSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/LeaderDowningAllOtherNodesSpec.scala index 685fd7ffc0..b7edf5e45c 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/LeaderDowningAllOtherNodesSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/LeaderDowningAllOtherNodesSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/LeaderDowningNodeThatIsUnreachableSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/LeaderDowningNodeThatIsUnreachableSpec.scala index 4237d7f868..bfbacf7737 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/LeaderDowningNodeThatIsUnreachableSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/LeaderDowningNodeThatIsUnreachableSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/LeaderElectionSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/LeaderElectionSpec.scala index 9728a2516d..c7ec3185d9 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/LeaderElectionSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/LeaderElectionSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/LeaderLeavingSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/LeaderLeavingSpec.scala index 3783118e54..00c0d58938 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/LeaderLeavingSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/LeaderLeavingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/MBeanSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/MBeanSpec.scala index 5371e999b9..12818eb5ac 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/MBeanSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/MBeanSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/MemberWeaklyUpSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/MemberWeaklyUpSpec.scala index d97a240bb3..3a807a84ed 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/MemberWeaklyUpSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/MemberWeaklyUpSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/MembershipChangeListenerExitingSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/MembershipChangeListenerExitingSpec.scala index e60e8ab84e..5002f39eea 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/MembershipChangeListenerExitingSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/MembershipChangeListenerExitingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/MembershipChangeListenerUpSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/MembershipChangeListenerUpSpec.scala index 87a01137a4..7d3a206681 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/MembershipChangeListenerUpSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/MembershipChangeListenerUpSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/MinMembersBeforeUpSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/MinMembersBeforeUpSpec.scala index 443748e14c..5e40f6a836 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/MinMembersBeforeUpSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/MinMembersBeforeUpSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/MultiNodeClusterSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/MultiNodeClusterSpec.scala index 9cd99742da..f91d3f1c32 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/MultiNodeClusterSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/MultiNodeClusterSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeChurnSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeChurnSpec.scala index fbcaba5d49..e76f7d06b8 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeChurnSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeChurnSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeDowningAndBeingRemovedSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeDowningAndBeingRemovedSpec.scala index 6b8240b53a..9aac06a36c 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeDowningAndBeingRemovedSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeDowningAndBeingRemovedSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeLeavingAndExitingAndBeingRemovedSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeLeavingAndExitingAndBeingRemovedSpec.scala index 407c2e2baf..14f33c021d 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeLeavingAndExitingAndBeingRemovedSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeLeavingAndExitingAndBeingRemovedSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeLeavingAndExitingSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeLeavingAndExitingSpec.scala index b31d5c7fb0..5372a2aa3b 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeLeavingAndExitingSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeLeavingAndExitingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeMembershipSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeMembershipSpec.scala index 1c8304ed46..a485d89366 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeMembershipSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeMembershipSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeUpSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeUpSpec.scala index 5c65e646ae..6bc69248d8 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeUpSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/NodeUpSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/RestartFirstSeedNodeSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/RestartFirstSeedNodeSpec.scala index b1a0fa4904..261f540be9 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/RestartFirstSeedNodeSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/RestartFirstSeedNodeSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/RestartNode2Spec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/RestartNode2Spec.scala index 596fb3d67d..7200863289 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/RestartNode2Spec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/RestartNode2Spec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/RestartNode3Spec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/RestartNode3Spec.scala index f1733ffd8d..f6c40343ac 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/RestartNode3Spec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/RestartNode3Spec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/RestartNodeSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/RestartNodeSpec.scala index 48aea86078..42dfc20c7f 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/RestartNodeSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/RestartNodeSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/SingletonClusterSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/SingletonClusterSpec.scala index 1bdf8ee9a1..d96fc3f063 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/SingletonClusterSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/SingletonClusterSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/SplitBrainSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/SplitBrainSpec.scala index d8a1e19085..3272292a7f 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/SplitBrainSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/SplitBrainSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/StressSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/StressSpec.scala index 6569268a4e..91cc7c2466 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/StressSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/StressSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/SunnyWeatherSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/SunnyWeatherSpec.scala index ba009d08af..3cee34fdc3 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/SunnyWeatherSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/SunnyWeatherSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/SurviveNetworkInstabilitySpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/SurviveNetworkInstabilitySpec.scala index a7d93f35e8..2676eb4dc3 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/SurviveNetworkInstabilitySpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/SurviveNetworkInstabilitySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/TransitionSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/TransitionSpec.scala index e9c7467398..c89ca4c895 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/TransitionSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/TransitionSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/UnreachableNodeJoinsAgainSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/UnreachableNodeJoinsAgainSpec.scala index 51a0006851..476bdc76af 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/UnreachableNodeJoinsAgainSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/UnreachableNodeJoinsAgainSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/AdaptiveLoadBalancingRouterSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/AdaptiveLoadBalancingRouterSpec.scala index ccf4fb902b..06e027181c 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/AdaptiveLoadBalancingRouterSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/AdaptiveLoadBalancingRouterSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.routing diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/ClusterConsistentHashingGroupSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/ClusterConsistentHashingGroupSpec.scala index 66bfede0d9..69d1d31d21 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/ClusterConsistentHashingGroupSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/ClusterConsistentHashingGroupSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.routing diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/ClusterConsistentHashingRouterSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/ClusterConsistentHashingRouterSpec.scala index 3f110f0b88..2a9d6a7197 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/ClusterConsistentHashingRouterSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/ClusterConsistentHashingRouterSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.routing diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/ClusterRoundRobinSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/ClusterRoundRobinSpec.scala index be5759e87c..031f5eb72d 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/ClusterRoundRobinSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/ClusterRoundRobinSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.routing diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/UseRoleIgnoredSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/UseRoleIgnoredSpec.scala index 0ab28b00fc..d499b1dd86 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/UseRoleIgnoredSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/UseRoleIgnoredSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.cluster.routing diff --git a/akka-cluster/src/test/java/akka/cluster/ClusterJavaCompileTest.java b/akka-cluster/src/test/java/akka/cluster/ClusterJavaCompileTest.java index 6a2afefe39..4100f7c7e7 100644 --- a/akka-cluster/src/test/java/akka/cluster/ClusterJavaCompileTest.java +++ b/akka-cluster/src/test/java/akka/cluster/ClusterJavaCompileTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster; diff --git a/akka-cluster/src/test/scala/akka/cluster/AutoDownSpec.scala b/akka-cluster/src/test/scala/akka/cluster/AutoDownSpec.scala index 74c35dbec2..655a61e8e2 100644 --- a/akka-cluster/src/test/scala/akka/cluster/AutoDownSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/AutoDownSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster @@ -88,7 +88,7 @@ class AutoDownSpec extends AkkaSpec { expectMsg(DownCalled(memberC.address)) } - "not down unreachable when loosing leadership inbetween detection and specified duration" in { + "not down unreachable when losing leadership inbetween detection and specified duration" in { val a = autoDownActor(2.seconds) a ! LeaderChanged(Some(memberA.address)) a ! UnreachableMember(memberC) diff --git a/akka-cluster/src/test/scala/akka/cluster/ClusterConfigSpec.scala b/akka-cluster/src/test/scala/akka/cluster/ClusterConfigSpec.scala index 84c8eb69f5..ee6ae9bfd6 100644 --- a/akka-cluster/src/test/scala/akka/cluster/ClusterConfigSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/ClusterConfigSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/ClusterDeployerSpec.scala b/akka-cluster/src/test/scala/akka/cluster/ClusterDeployerSpec.scala index af906dc6ef..9e4505ea27 100644 --- a/akka-cluster/src/test/scala/akka/cluster/ClusterDeployerSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/ClusterDeployerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/ClusterDomainEventPublisherSpec.scala b/akka-cluster/src/test/scala/akka/cluster/ClusterDomainEventPublisherSpec.scala index 70212aecc5..ca9884f1eb 100644 --- a/akka-cluster/src/test/scala/akka/cluster/ClusterDomainEventPublisherSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/ClusterDomainEventPublisherSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/ClusterDomainEventSpec.scala b/akka-cluster/src/test/scala/akka/cluster/ClusterDomainEventSpec.scala index 074bb225b2..fb9cf42d47 100644 --- a/akka-cluster/src/test/scala/akka/cluster/ClusterDomainEventSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/ClusterDomainEventSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/ClusterHeartbeatSenderStateSpec.scala b/akka-cluster/src/test/scala/akka/cluster/ClusterHeartbeatSenderStateSpec.scala index 1302e26856..e73f31e4e8 100644 --- a/akka-cluster/src/test/scala/akka/cluster/ClusterHeartbeatSenderStateSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/ClusterHeartbeatSenderStateSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/ClusterSpec.scala b/akka-cluster/src/test/scala/akka/cluster/ClusterSpec.scala index e363611422..8176c246bb 100644 --- a/akka-cluster/src/test/scala/akka/cluster/ClusterSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/ClusterSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/EWMASpec.scala b/akka-cluster/src/test/scala/akka/cluster/EWMASpec.scala index 002759d47d..93c9460f02 100644 --- a/akka-cluster/src/test/scala/akka/cluster/EWMASpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/EWMASpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/FailureDetectorPuppet.scala b/akka-cluster/src/test/scala/akka/cluster/FailureDetectorPuppet.scala index a460112865..ae21e9cada 100644 --- a/akka-cluster/src/test/scala/akka/cluster/FailureDetectorPuppet.scala +++ b/akka-cluster/src/test/scala/akka/cluster/FailureDetectorPuppet.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/GossipSpec.scala b/akka-cluster/src/test/scala/akka/cluster/GossipSpec.scala index 9f40b802b9..351c688bf7 100644 --- a/akka-cluster/src/test/scala/akka/cluster/GossipSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/GossipSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/HeartbeatNodeRingPerfSpec.scala b/akka-cluster/src/test/scala/akka/cluster/HeartbeatNodeRingPerfSpec.scala index eb437f0cc2..b4a52b9114 100644 --- a/akka-cluster/src/test/scala/akka/cluster/HeartbeatNodeRingPerfSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/HeartbeatNodeRingPerfSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/HeartbeatNodeRingSpec.scala b/akka-cluster/src/test/scala/akka/cluster/HeartbeatNodeRingSpec.scala index 022f40fe9f..bcde921030 100644 --- a/akka-cluster/src/test/scala/akka/cluster/HeartbeatNodeRingSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/HeartbeatNodeRingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/MemberOrderingSpec.scala b/akka-cluster/src/test/scala/akka/cluster/MemberOrderingSpec.scala index 086ac3e270..e12f6d6b4b 100644 --- a/akka-cluster/src/test/scala/akka/cluster/MemberOrderingSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/MemberOrderingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/MetricNumericConverterSpec.scala b/akka-cluster/src/test/scala/akka/cluster/MetricNumericConverterSpec.scala index d3bc56f4ef..7485ce4319 100644 --- a/akka-cluster/src/test/scala/akka/cluster/MetricNumericConverterSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/MetricNumericConverterSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/MetricValuesSpec.scala b/akka-cluster/src/test/scala/akka/cluster/MetricValuesSpec.scala index affcde9c88..f9101c4ea7 100644 --- a/akka-cluster/src/test/scala/akka/cluster/MetricValuesSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/MetricValuesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/MetricsCollectorSpec.scala b/akka-cluster/src/test/scala/akka/cluster/MetricsCollectorSpec.scala index b90aa10e60..c8ce1151d4 100644 --- a/akka-cluster/src/test/scala/akka/cluster/MetricsCollectorSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/MetricsCollectorSpec.scala @@ -1,6 +1,6 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/MetricsGossipSpec.scala b/akka-cluster/src/test/scala/akka/cluster/MetricsGossipSpec.scala index 23e4ea689c..fff4158726 100644 --- a/akka-cluster/src/test/scala/akka/cluster/MetricsGossipSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/MetricsGossipSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/NodeMetricsSpec.scala b/akka-cluster/src/test/scala/akka/cluster/NodeMetricsSpec.scala index 24ae24db8f..d9f416c0a9 100644 --- a/akka-cluster/src/test/scala/akka/cluster/NodeMetricsSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/NodeMetricsSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/ReachabilityPerfSpec.scala b/akka-cluster/src/test/scala/akka/cluster/ReachabilityPerfSpec.scala index 5433f7ac67..5a5d6dfb7c 100644 --- a/akka-cluster/src/test/scala/akka/cluster/ReachabilityPerfSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/ReachabilityPerfSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/ReachabilitySpec.scala b/akka-cluster/src/test/scala/akka/cluster/ReachabilitySpec.scala index fa5ed1a417..28a3784502 100644 --- a/akka-cluster/src/test/scala/akka/cluster/ReachabilitySpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/ReachabilitySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/SerializationChecksSpec.scala b/akka-cluster/src/test/scala/akka/cluster/SerializationChecksSpec.scala index 26321e746c..75d6a32389 100644 --- a/akka-cluster/src/test/scala/akka/cluster/SerializationChecksSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/SerializationChecksSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/SerializeCreatorsVerificationSpec.scala b/akka-cluster/src/test/scala/akka/cluster/SerializeCreatorsVerificationSpec.scala index 4731eaa8ca..6e137b9489 100644 --- a/akka-cluster/src/test/scala/akka/cluster/SerializeCreatorsVerificationSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/SerializeCreatorsVerificationSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/StartupWithOneThreadSpec.scala b/akka-cluster/src/test/scala/akka/cluster/StartupWithOneThreadSpec.scala index 8c71db2221..8b923a9eea 100644 --- a/akka-cluster/src/test/scala/akka/cluster/StartupWithOneThreadSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/StartupWithOneThreadSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/TestMember.scala b/akka-cluster/src/test/scala/akka/cluster/TestMember.scala index c1b88f894a..e9fbfd06fb 100644 --- a/akka-cluster/src/test/scala/akka/cluster/TestMember.scala +++ b/akka-cluster/src/test/scala/akka/cluster/TestMember.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/VectorClockPerfSpec.scala b/akka-cluster/src/test/scala/akka/cluster/VectorClockPerfSpec.scala index 547dd77649..737478217e 100644 --- a/akka-cluster/src/test/scala/akka/cluster/VectorClockPerfSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/VectorClockPerfSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/VectorClockSpec.scala b/akka-cluster/src/test/scala/akka/cluster/VectorClockSpec.scala index 6dc396e76a..c4edc4eb54 100644 --- a/akka-cluster/src/test/scala/akka/cluster/VectorClockSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/VectorClockSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/protobuf/ClusterMessageSerializerSpec.scala b/akka-cluster/src/test/scala/akka/cluster/protobuf/ClusterMessageSerializerSpec.scala index b667b78496..2e5606910d 100644 --- a/akka-cluster/src/test/scala/akka/cluster/protobuf/ClusterMessageSerializerSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/protobuf/ClusterMessageSerializerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.protobuf diff --git a/akka-cluster/src/test/scala/akka/cluster/routing/ClusterRouterSupervisorSpec.scala b/akka-cluster/src/test/scala/akka/cluster/routing/ClusterRouterSupervisorSpec.scala index 266afe08e8..b51cecdf9a 100644 --- a/akka-cluster/src/test/scala/akka/cluster/routing/ClusterRouterSupervisorSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/routing/ClusterRouterSupervisorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.routing diff --git a/akka-cluster/src/test/scala/akka/cluster/routing/MetricsSelectorSpec.scala b/akka-cluster/src/test/scala/akka/cluster/routing/MetricsSelectorSpec.scala index 77701a70d3..56946438f4 100644 --- a/akka-cluster/src/test/scala/akka/cluster/routing/MetricsSelectorSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/routing/MetricsSelectorSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.routing diff --git a/akka-cluster/src/test/scala/akka/cluster/routing/WeightedRouteesSpec.scala b/akka-cluster/src/test/scala/akka/cluster/routing/WeightedRouteesSpec.scala index e32ebfc5fc..06f72207ef 100644 --- a/akka-cluster/src/test/scala/akka/cluster/routing/WeightedRouteesSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/routing/WeightedRouteesSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.routing diff --git a/akka-contrib/docs/circuitbreaker.rst b/akka-contrib/docs/circuitbreaker.rst new file mode 100644 index 0000000000..628cb0b3a6 --- /dev/null +++ b/akka-contrib/docs/circuitbreaker.rst @@ -0,0 +1,54 @@ +.. _circuit-breaker-proxy: + +Circuit-Breaker Actor +===================== + +This is an alternative implementation of the :ref:`Akka Circuit Breaker Pattern `. +The main difference is that it is intended to be used only for request-reply interactions with an actor using the Circuit-Breaker as a proxy of the target one +in order to provide the same failfast functionalities and a protocol similar to the circuit-breaker implementation in Akka. + + +### Usage + +Let's assume we have an actor wrapping a back-end service and able to respond to ``Request`` calls with a ``Response`` object +containing an ``Either[String, String]`` to map successful and failed responses. The service is also potentially slowing down +because of the workload. + +A simple implementation can be given by this class + +.. includecode:: @contribSrc@/src/test/scala/akka/contrib/circuitbreaker/sample/CircuitBreaker.scala#simple-service + + +If we want to interface with this service using the Circuit Breaker we can use two approaches: + +Using a non-conversational approach: + +.. includecode:: @contribSrc@/src/test/scala/akka/contrib/circuitbreaker/sample/CircuitBreaker.scala#basic-sample + +Using the ``ask`` pattern, in this case it is useful to be able to map circuit open failures to the same type of failures +returned by the service (a ``Left[String]`` in our case): + +.. includecode:: @contribSrc@/src/test/scala/akka/contrib/circuitbreaker/sample/CircuitBreaker.scala#ask-sample + +If it is not possible to define define a specific error response, you can map the Open Circuit notification to a failure. +That also means that your ``CircuitBreakerActor`` will be useful to protect you from time out for extra workload or +temporary failures in the target actor. +You can decide to do that in two ways: + +The first is to use the ``askWithCircuitBreaker`` method on the ``ActorRef`` or ``ActorSelection`` instance pointing to +your circuit breaker proxy (enabled by importing ``import akka.contrib.circuitbreaker.Implicits.askWithCircuitBreaker``) + +.. includecode:: @contribSrc@/src/test/scala/akka/contrib/circuitbreaker/sample/CircuitBreaker.scala#ask-with-circuit-breaker-sample + +The second is to map the future response of your ``ask`` pattern application with the ``failForOpenCircuit`` +enabled by importing ``import akka.contrib.circuitbreaker.Implicits.futureExtensions`` + +.. includecode:: @contribSrc@/src/test/scala/akka/contrib/circuitbreaker/sample/CircuitBreaker.scala#ask-with-failure-sample + +#### Direct Communication With The Target Actor + +To send messages to the `target` actor without expecting any response you can wrap your message in a ``TellOnly`` or a ``Passthrough`` +envelope. The difference between the two is that ``TellOnly`` will forward the message only when in closed mode and +``Passthrough`` will do it in any state. You can for example use the ``Passthrough`` envelope to wrap a ``PoisonPill`` +message to terminate the target actor. That will cause the circuit breaker proxy to be terminated too + diff --git a/akka-contrib/docs/index.rst b/akka-contrib/docs/index.rst index 691f96b2a7..0f3c1f2263 100644 --- a/akka-contrib/docs/index.rst +++ b/akka-contrib/docs/index.rst @@ -36,6 +36,7 @@ The Current List of Modules peek-mailbox aggregator receive-pipeline + circuitbreaker Suggested Way of Using these Contributions ------------------------------------------ diff --git a/akka-contrib/src/main/scala/akka/contrib/circuitbreaker/CircuitBreakerProxy.scala b/akka-contrib/src/main/scala/akka/contrib/circuitbreaker/CircuitBreakerProxy.scala new file mode 100644 index 0000000000..995208ff47 --- /dev/null +++ b/akka-contrib/src/main/scala/akka/contrib/circuitbreaker/CircuitBreakerProxy.scala @@ -0,0 +1,259 @@ +/** + * Copyright (C) 2014-2016 Typesafe Inc. + */ +package akka.contrib.circuitbreaker + +import akka.actor._ +import akka.contrib.circuitbreaker.CircuitBreakerProxy.CircuitOpenFailure +import akka.event.LoggingAdapter +import akka.pattern._ +import akka.util.Timeout + +import scala.util.{ Failure, Success } + +/** + * This is an Actor which implements the circuit breaker pattern, + * you may also be interested in the raw circuit breaker [[akka.pattern.CircuitBreaker]] + */ +object CircuitBreakerProxy { + + /** + * Creates an circuit breaker actor proxying a target actor intended for request-reply interactions. + * It is possible to send messages through this proxy without expecting a response wrapping them into a + * [[akka.contrib.circuitbreaker.CircuitBreakerProxy.TellOnly]] or a + * [[akka.contrib.circuitbreaker.CircuitBreakerProxy.Passthrough]] the difference between the two being that + * a message wrapped into a [[akka.contrib.circuitbreaker.CircuitBreakerProxy.Passthrough]] is going to be + * forwarded even when the circuit is open (e.g. if you need to terminate the target and proxy actors sending + * a [[akka.actor.PoisonPill]] message) + * + * The circuit breaker implements the same state machine documented in [[akka.pattern.CircuitBreaker]] + * + * @param target the actor to proxy + * @param maxFailures maximum number of failures before opening the circuit + * @param callTimeout timeout before considering the ongoing call a failure + * @param resetTimeout time after which the channel will be closed after entering the open state + * @param circuitEventListener an actor that will receive a series of messages of type + * [[akka.contrib.circuitbreaker.CircuitBreakerProxy.CircuitBreakerEvent]] + * @param failureDetector function to detect if the a message received from the target actor as + * response from a request represent a failure + * @param failureMap function to map a failure into a response message. The failing response message is wrapped + * into a [[akka.contrib.circuitbreaker.CircuitBreakerProxy.CircuitOpenFailure]] object + */ + def props(target: ActorRef, + maxFailures: Int, + callTimeout: Timeout, + resetTimeout: Timeout, + circuitEventListener: Option[ActorRef], + failureDetector: Any ⇒ Boolean, + failureMap: CircuitOpenFailure ⇒ Any) = + Props(new CircuitBreakerProxy(target, maxFailures, callTimeout, resetTimeout, circuitEventListener, failureDetector, failureMap)) + + sealed trait CircuitBreakerCommand + + final case class TellOnly(msg: Any) extends CircuitBreakerCommand + final case class Passthrough(msg: Any) extends CircuitBreakerCommand + + sealed trait CircuitBreakerResponse + final case class CircuitOpenFailure(failedMsg: Any) + + sealed trait CircuitBreakerEvent + final case class CircuitOpen(circuit: ActorRef) extends CircuitBreakerCommand + final case class CircuitClosed(circuit: ActorRef) extends CircuitBreakerCommand + final case class CircuitHalfOpen(circuit: ActorRef) extends CircuitBreakerCommand + + sealed trait CircuitBreakerState + case object Open extends CircuitBreakerState + case object Closed extends CircuitBreakerState + case object HalfOpen extends CircuitBreakerState + + final case class CircuitBreakerStateData(failureCount: Int = 0, firstHalfOpenMessageSent: Boolean = false) + + final case class CircuitBreakerPropsBuilder( + maxFailures: Int, callTimeout: Timeout, resetTimeout: Timeout, + circuitEventListener: Option[ActorRef] = None, + failureDetector: Any ⇒ Boolean = { _ ⇒ false }, + openCircuitFailureConverter: CircuitOpenFailure ⇒ Any = identity) { + + def withMaxFailures(value: Int) = copy(maxFailures = value) + def withCallTimeout(value: Timeout) = copy(callTimeout = value) + def withResetTimeout(value: Timeout) = copy(resetTimeout = value) + def withCircuitEventListener(value: Option[ActorRef]) = copy(circuitEventListener = value) + def withFailureDetector(value: Any ⇒ Boolean) = copy(failureDetector = value) + def withOpenCircuitFailureConverter(value: CircuitOpenFailure ⇒ Any) = copy(openCircuitFailureConverter = value) + + /** + * Creates the props for a [[akka.contrib.circuitbreaker.CircuitBreakerProxy]] proxying the given target + * + * @param target the target actor ref + */ + def props(target: ActorRef) = CircuitBreakerProxy.props(target, maxFailures, callTimeout, resetTimeout, circuitEventListener, failureDetector, openCircuitFailureConverter) + + } + + private[CircuitBreakerProxy] object CircuitBreakerInternalEvents { + sealed trait CircuitBreakerInternalEvent + case object CallFailed extends CircuitBreakerInternalEvent + case object CallSucceeded extends CircuitBreakerInternalEvent + } +} + +import akka.contrib.circuitbreaker.CircuitBreakerProxy._ + +final class CircuitBreakerProxy( + target: ActorRef, + maxFailures: Int, + callTimeout: Timeout, + resetTimeout: Timeout, + circuitEventListener: Option[ActorRef], + failureDetector: Any ⇒ Boolean, + failureMap: CircuitOpenFailure ⇒ Any) extends Actor with ActorLogging with FSM[CircuitBreakerState, CircuitBreakerStateData] { + + import CircuitBreakerInternalEvents._ + + context watch target + + startWith(Closed, CircuitBreakerStateData(failureCount = 0)) + + def callSucceededHandling: StateFunction = { + case Event(CallSucceeded, state) ⇒ + log.debug("Received call succeeded notification in state {} resetting counter", state) + goto(Closed) using CircuitBreakerStateData(failureCount = 0, firstHalfOpenMessageSent = false) + } + + def passthroughHandling: StateFunction = { + case Event(Passthrough(message), state) ⇒ + log.debug("Received a passthrough message in state {}, forwarding the message to the target actor without altering current state", state) + target ! message + stay + } + + def targetTerminationHandling: StateFunction = { + case Event(Terminated(`target`), state) ⇒ + log.debug("Target actor {} terminated while in state {}, terminating this proxy too", target, state) + stop + } + + def commonStateHandling: StateFunction = { callSucceededHandling orElse passthroughHandling orElse targetTerminationHandling } + + when(Closed) { + commonStateHandling orElse { + case Event(TellOnly(message), _) ⇒ + log.debug("Closed: Sending message {} without expecting any response", message) + target ! message + stay + + case Event(CallFailed, state) ⇒ + log.debug("Received call failed notification in state {} incrementing counter", state) + val newState = state.copy(failureCount = state.failureCount + 1) + if (newState.failureCount < maxFailures) { + stay using newState + } else { + goto(Open) using newState + } + + case Event(message, state) ⇒ + log.debug("CLOSED: Sending message {} expecting a response withing timeout {}", message, callTimeout) + val currentSender = sender() + forwardRequest(message, sender, state, log) + stay + + } + } + + when(Open, stateTimeout = resetTimeout.duration) { + commonStateHandling orElse { + case Event(StateTimeout, state) ⇒ + log.debug("Timeout expired for state OPEN, going to half open") + goto(HalfOpen) using state.copy(firstHalfOpenMessageSent = false) + + case Event(CallFailed, state) ⇒ + log.debug("Open: Call received a further call failed notification, probably from a previous timed out event, ignoring") + stay + + case Event(openNotification @ CircuitOpenFailure(_), _) ⇒ + log.warning("Unexpected circuit open notification {} sent to myself. Please report this as a bug.", openNotification) + stay + + case Event(message, state) ⇒ + val failureNotification = failureMap(CircuitOpenFailure(message)) + log.debug("OPEN: Failing request for message {}, sending failure notification {} to sender {}", message, failureNotification, sender) + sender ! failureNotification + stay + + } + } + + when(HalfOpen) { + commonStateHandling orElse { + case Event(TellOnly(message), _) ⇒ + log.debug("HalfOpen: Dropping TellOnly request for message {}", message) + stay + + case Event(CallFailed, CircuitBreakerStateData(_, true)) ⇒ + log.debug("HalfOpen: First forwarded call failed returning to OPEN state") + goto(Open) + + case Event(CallFailed, CircuitBreakerStateData(_, false)) ⇒ + log.debug("HalfOpen: Call received a further call failed notification, probably from a previous timed out event, ignoring") + stay + + case Event(message, state @ CircuitBreakerStateData(_, false)) ⇒ + log.debug("HalfOpen: First message {} received, forwarding it to target {}", message, target) + forwardRequest(message, sender, state, log) + stay using state.copy(firstHalfOpenMessageSent = true) + + case Event(message, CircuitBreakerStateData(_, true)) ⇒ + val failureNotification = failureMap(CircuitOpenFailure(message)) + log.debug("HALF-OPEN: Failing request for message {}, sending failure notification {} to sender {}", message, failureNotification, sender) + sender ! failureNotification + stay + } + } + + def forwardRequest(message: Any, currentSender: ActorRef, state: CircuitBreakerStateData, log: LoggingAdapter) = { + import context.dispatcher + + target.ask(message)(callTimeout).onComplete { + case Success(response) ⇒ + log.debug("Request '{}' has been replied to with response {}, forwarding to original sender {}", message, currentSender) + + currentSender ! response + + val isFailure = failureDetector(response) + + if (isFailure) { + log.debug("Response '{}' is considered as failure sending self-message to ask incrementing failure count (origin state was {})", + response, state) + + self ! CallFailed + } else { + + log.debug("Request '{}' succeeded with response {}, returning response to sender {} and sending message to ask to reset failure count (origin state was {})", + message, response, currentSender, state) + + self ! CallSucceeded + } + + case Failure(reason) ⇒ + log.debug("Request '{}' to target {} failed with exception {}, sending self-message to ask incrementing failure count (origin state was {})", + message, target, reason, state) + + self ! CallFailed + } + } + + onTransition { + case from -> Closed ⇒ + log.debug("Moving from state {} to state CLOSED", from) + circuitEventListener foreach { _ ! CircuitClosed(self) } + + case from -> HalfOpen ⇒ + log.debug("Moving from state {} to state HALF OPEN", from) + circuitEventListener foreach { _ ! CircuitHalfOpen(self) } + + case from -> Open ⇒ + log.debug("Moving from state {} to state OPEN", from) + circuitEventListener foreach { _ ! CircuitOpen(self) } + } + +} diff --git a/akka-contrib/src/main/scala/akka/contrib/circuitbreaker/askExtensions.scala b/akka-contrib/src/main/scala/akka/contrib/circuitbreaker/askExtensions.scala new file mode 100644 index 0000000000..78931576ef --- /dev/null +++ b/akka-contrib/src/main/scala/akka/contrib/circuitbreaker/askExtensions.scala @@ -0,0 +1,101 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ +package akka.contrib.circuitbreaker + +import akka.actor.{ ActorSelection, Actor, ActorRef } +import akka.contrib.circuitbreaker.CircuitBreakerProxy.CircuitOpenFailure +import akka.util.Timeout +import scala.language.implicitConversions + +import scala.concurrent.{ ExecutionContext, Future } + +sealed class OpenCircuitException(message: String) extends RuntimeException(message) +private[circuitbreaker] final object OpenCircuitException extends OpenCircuitException("Unable to complete operation since the Circuit Breaker Actor Proxy is in Open State") + +/** + * Convenience implicit conversions to provide circuit-breaker aware management of the ask pattern, + * either directly replacing the `ask/?` with `askWithCircuitBreaker` or with an extension method to the + * `Future` result of an `ask` pattern to fail in case of + * [[akka.contrib.circuitbreaker.CircuitBreakerProxy.CircuitOpenFailure]] response + */ +object Implicits { + /** + * Import this implicit to enable the methods `failForOpenCircuit` and `failForOpenCircuitWith` + * to [[scala.concurrent.Future]] converting + * [[akka.contrib.circuitbreaker.CircuitBreakerProxy.CircuitOpenFailure]] into a failure caused either by an + * [[akka.contrib.circuitbreaker.OpenCircuitException]] or by an exception built with the given + * exception builder + */ + implicit def futureExtensions(future: Future[Any]) = new CircuitBreakerAwareFuture(future) + + /** + * Import this implicit method to get an extended versions of the `ask` pattern for + * [[akka.actor.ActorRef]] and [[akka.actor.ActorSelection]] converting + * [[akka.contrib.circuitbreaker.CircuitBreakerProxy.CircuitOpenFailure]] into a failure caused by an + * [[akka.contrib.circuitbreaker.OpenCircuitException]] + */ + implicit def askWithCircuitBreaker(actorRef: ActorRef) = new AskeableWithCircuitBreakerActor(actorRef) + + /** + * Wraps the `ask` method in [[akka.pattern.AskSupport]] method to convert + * [[akka.contrib.circuitbreaker.CircuitBreakerProxy.CircuitOpenFailure]] responses into a failure response caused + * by an [[akka.contrib.circuitbreaker.OpenCircuitException]] + */ + @throws[akka.contrib.circuitbreaker.OpenCircuitException]("if the call failed because the circuit breaker proxy state was OPEN") + def askWithCircuitBreaker(circuitBreakerProxy: ActorRef, message: Any)(implicit executionContext: ExecutionContext, timeout: Timeout): Future[Any] = + circuitBreakerProxy.internalAskWithCircuitBreaker(message, timeout, ActorRef.noSender) + + /** + * Wraps the `ask` method in [[akka.pattern.AskSupport]] method to convert failures connected to the circuit + * breaker being in open state + */ + @throws[akka.contrib.circuitbreaker.OpenCircuitException]("if the call failed because the circuit breaker proxy state was OPEN") + def askWithCircuitBreaker(circuitBreakerProxy: ActorRef, message: Any, sender: ActorRef)(implicit executionContext: ExecutionContext, timeout: Timeout): Future[Any] = + circuitBreakerProxy.internalAskWithCircuitBreaker(message, timeout, sender) + +} + +/** + * Extends [[scala.concurrent.Future]] with the method `failForOpenCircuitWith` to handle + * [[akka.contrib.circuitbreaker.CircuitBreakerProxy.CircuitOpenFailure]] failure responses throwing + * an exception built with the given exception builder + */ +final class CircuitBreakerAwareFuture(val future: Future[Any]) extends AnyVal { + @throws[OpenCircuitException] + def failForOpenCircuit(implicit executionContext: ExecutionContext): Future[Any] = failForOpenCircuitWith(OpenCircuitException) + + def failForOpenCircuitWith(throwing: ⇒ Throwable)(implicit executionContext: ExecutionContext): Future[Any] = { + future.flatMap { + _ match { + case CircuitOpenFailure(_) ⇒ Future.failed(throwing) + case result ⇒ Future.successful(result) + } + } + } +} + +final class AskeableWithCircuitBreakerActor(val actorRef: ActorRef) extends AnyVal { + def askWithCircuitBreaker(message: Any)(implicit executionContext: ExecutionContext, timeout: Timeout, sender: ActorRef = Actor.noSender): Future[Any] = + internalAskWithCircuitBreaker(message, timeout, sender) + + @throws[OpenCircuitException] + private[circuitbreaker] def internalAskWithCircuitBreaker(message: Any, timeout: Timeout, sender: ActorRef)(implicit executionContext: ExecutionContext) = { + import akka.pattern.ask + import Implicits.futureExtensions + + ask(actorRef, message, sender)(timeout).failForOpenCircuit + } +} + +final class AskeableWithCircuitBreakerActorSelection(val actorSelection: ActorSelection) extends AnyVal { + def askWithCircuitBreaker(message: Any)(implicit executionContext: ExecutionContext, timeout: Timeout, sender: ActorRef = Actor.noSender): Future[Any] = + internalAskWithCircuitBreaker(message, timeout, sender) + + private[circuitbreaker] def internalAskWithCircuitBreaker(message: Any, timeout: Timeout, sender: ActorRef)(implicit executionContext: ExecutionContext) = { + import akka.pattern.ask + import Implicits.futureExtensions + + ask(actorSelection, message, sender)(timeout).failForOpenCircuit + } +} \ No newline at end of file diff --git a/akka-contrib/src/main/scala/akka/contrib/jul/JavaLogger.scala b/akka-contrib/src/main/scala/akka/contrib/jul/JavaLogger.scala index e2c99105f1..b4e42b715e 100644 --- a/akka-contrib/src/main/scala/akka/contrib/jul/JavaLogger.scala +++ b/akka-contrib/src/main/scala/akka/contrib/jul/JavaLogger.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.contrib.jul diff --git a/akka-contrib/src/main/scala/akka/contrib/mailbox/PeekMailbox.scala b/akka-contrib/src/main/scala/akka/contrib/mailbox/PeekMailbox.scala index 94ed1dc77d..79f7c0db64 100644 --- a/akka-contrib/src/main/scala/akka/contrib/mailbox/PeekMailbox.scala +++ b/akka-contrib/src/main/scala/akka/contrib/mailbox/PeekMailbox.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.contrib.mailbox diff --git a/akka-contrib/src/main/scala/akka/contrib/pattern/Aggregator.scala b/akka-contrib/src/main/scala/akka/contrib/pattern/Aggregator.scala index 64ed357bdc..7a1b29f196 100644 --- a/akka-contrib/src/main/scala/akka/contrib/pattern/Aggregator.scala +++ b/akka-contrib/src/main/scala/akka/contrib/pattern/Aggregator.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.contrib.pattern diff --git a/akka-contrib/src/main/scala/akka/contrib/pattern/ReliableProxy.scala b/akka-contrib/src/main/scala/akka/contrib/pattern/ReliableProxy.scala index e963b865db..2db29800dd 100644 --- a/akka-contrib/src/main/scala/akka/contrib/pattern/ReliableProxy.scala +++ b/akka-contrib/src/main/scala/akka/contrib/pattern/ReliableProxy.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.contrib.pattern diff --git a/akka-contrib/src/main/scala/akka/contrib/throttle/TimerBasedThrottler.scala b/akka-contrib/src/main/scala/akka/contrib/throttle/TimerBasedThrottler.scala index 60baf00346..1694b99702 100644 --- a/akka-contrib/src/main/scala/akka/contrib/throttle/TimerBasedThrottler.scala +++ b/akka-contrib/src/main/scala/akka/contrib/throttle/TimerBasedThrottler.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.contrib.throttle diff --git a/akka-contrib/src/multi-jvm/scala/akka/contrib/pattern/ReliableProxySpec.scala b/akka-contrib/src/multi-jvm/scala/akka/contrib/pattern/ReliableProxySpec.scala index 840bab6392..6bc6bb849a 100644 --- a/akka-contrib/src/multi-jvm/scala/akka/contrib/pattern/ReliableProxySpec.scala +++ b/akka-contrib/src/multi-jvm/scala/akka/contrib/pattern/ReliableProxySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.contrib.pattern diff --git a/akka-contrib/src/test/java/akka/contrib/pattern/ReliableProxyTest.java b/akka-contrib/src/test/java/akka/contrib/pattern/ReliableProxyTest.java index f45db1d864..3c25f8110b 100644 --- a/akka-contrib/src/test/java/akka/contrib/pattern/ReliableProxyTest.java +++ b/akka-contrib/src/test/java/akka/contrib/pattern/ReliableProxyTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.contrib.pattern; diff --git a/akka-contrib/src/test/java/akka/contrib/throttle/TimerBasedThrottlerTest.java b/akka-contrib/src/test/java/akka/contrib/throttle/TimerBasedThrottlerTest.java index 8f09d843cc..ff6d11a762 100644 --- a/akka-contrib/src/test/java/akka/contrib/throttle/TimerBasedThrottlerTest.java +++ b/akka-contrib/src/test/java/akka/contrib/throttle/TimerBasedThrottlerTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.contrib.throttle; @@ -10,7 +10,6 @@ import org.junit.Test; import java.util.concurrent.TimeUnit; import scala.concurrent.duration.Duration; import com.typesafe.config.ConfigFactory; -import akka.actor.Actor; import akka.actor.ActorRef; import akka.actor.ActorSystem; import akka.actor.Props; diff --git a/akka-contrib/src/test/scala/akka/contrib/circuitbreaker/CircuitBreakerProxySpec.scala b/akka-contrib/src/test/scala/akka/contrib/circuitbreaker/CircuitBreakerProxySpec.scala new file mode 100644 index 0000000000..8ec386134b --- /dev/null +++ b/akka-contrib/src/test/scala/akka/contrib/circuitbreaker/CircuitBreakerProxySpec.scala @@ -0,0 +1,565 @@ +/** + * Copyright (C) 2014-2016 Typesafe Inc. + */ +package akka.contrib.circuitbreaker + +import akka.actor.{ ActorRef, PoisonPill } +import akka.contrib.circuitbreaker.CircuitBreakerProxy._ +import akka.testkit.{ AkkaSpec, TestProbe } +import akka.util.Timeout +import org.scalatest.concurrent.ScalaFutures +import org.scalatest.time.{ Millis, Second, Span } +import org.scalatest.{ GivenWhenThen, Matchers } + +import scala.concurrent.duration._ +import scala.language.postfixOps + +class CircuitBreakerProxySpec extends AkkaSpec() with GivenWhenThen with Matchers with ScalaFutures { + + val baseCircuitBreakerPropsBuilder = + CircuitBreakerPropsBuilder( + maxFailures = 2, + callTimeout = 200 millis, + resetTimeout = 1 second, + failureDetector = { + _ == "FAILURE" + }) + + trait CircuitBreakerScenario { + val sender = TestProbe() + val eventListener = TestProbe() + val receiver = TestProbe() + + def circuitBreaker: ActorRef + + def defaultCircuitBreaker = system.actorOf(baseCircuitBreakerPropsBuilder.props(target = receiver.ref)) + + def receiverRespondsWithFailureToRequest(request: Any) = { + sender.send(circuitBreaker, request) + receiver.expectMsg(request) + receiver.reply("FAILURE") + sender.expectMsg("FAILURE") + } + + def receiverRespondsToRequestWith(request: Any, reply: Any) = { + sender.send(circuitBreaker, request) + receiver.expectMsg(request) + receiver.reply(reply) + sender.expectMsg(reply) + } + + def circuitBreakerReceivesSelfNotificationMessage() = + receiver.expectNoMsg(baseCircuitBreakerPropsBuilder.resetTimeout.duration / 4) + + def resetTimeoutExpires() = + receiver.expectNoMsg(baseCircuitBreakerPropsBuilder.resetTimeout.duration + 100.millis) + + def callTimeoutExpiresWithoutResponse() = + sender.expectNoMsg(baseCircuitBreakerPropsBuilder.callTimeout.duration + 100.millis) + + def messageIsRejectedWithOpenCircuitNotification(message: Any) = { + sender.send(circuitBreaker, message) + sender.expectMsg(CircuitOpenFailure(message)) + } + + } + + "CircuitBreakerActor" should { + + "act as a transparent proxy in case of successful requests-replies - forward to target" in { + Given("A circuit breaker proxy pointing to a target actor") + val receiver = TestProbe() + val circuitBreaker = system.actorOf(baseCircuitBreakerPropsBuilder.props(target = receiver.ref)) + + When("A message is sent to the proxy actor") + TestProbe().send(circuitBreaker, "test message") + + Then("The target actor receives the message") + receiver.expectMsg("test message") + } + + "act as a transparent proxy in case of successful requests-replies - full cycle" in { + Given("A circuit breaker proxy pointing to a target actor") + val receiver = TestProbe() + val circuitBreaker = system.actorOf(baseCircuitBreakerPropsBuilder.props(target = receiver.ref)) + + When("A sender sends a message to the target actor via the proxy actor") + val sender = TestProbe() + sender.send(circuitBreaker, "test message") + + receiver.expectMsg("test message") + + And("The target actor replies to the message") + receiver.reply("response") + + Then("The reply is sent to the sender") + sender.expectMsg("response") + } + + "forward further messages before receiving the response of the first one" in { + Given("A circuit breaker proxy pointing to a target actor") + val receiver = TestProbe() + val circuitBreaker = system.actorOf(baseCircuitBreakerPropsBuilder.props(target = receiver.ref)) + + When("A batch of messages is sent to the target actor via the proxy") + val sender = TestProbe() + sender.send(circuitBreaker, "test message1") + sender.send(circuitBreaker, "test message2") + sender.send(circuitBreaker, "test message3") + + And("The receiver doesn't reply to any of those messages") + + Then("All the messages in the batch are sent") + receiver.expectMsg("test message1") + receiver.expectMsg("test message2") + receiver.expectMsg("test message3") + } + + "send responses to the right sender" in { + Given("A circuit breaker proxy pointing to a target actor") + val receiver = TestProbe() + val circuitBreaker = system.actorOf(baseCircuitBreakerPropsBuilder.props(target = receiver.ref)) + + And("Two different senders actors") + val sender1 = TestProbe() + val sender2 = TestProbe() + + When("The two actors are sending messages to the target actor through the proxy") + sender1.send(circuitBreaker, "test message1") + sender2.send(circuitBreaker, "test message2") + + And("The target actor replies to those messages") + receiver.expectMsg("test message1") + receiver.reply("response1") + + receiver.expectMsg("test message2") + receiver.reply("response2") + + Then("The replies are forwarded to the correct sender") + sender1.expectMsg("response1") + sender2.expectMsg("response2") + } + + "return failed responses too" in { + Given("A circuit breaker proxy pointing to a target actor") + val receiver = TestProbe() + val circuitBreaker = system.actorOf(baseCircuitBreakerPropsBuilder.props(target = receiver.ref)) + + When("A sender sends a request to the target actor through the proxy") + val sender = TestProbe() + sender.send(circuitBreaker, "request") + + And("The target actor replies with a failure response") + receiver.expectMsg("request") + receiver.reply("FAILURE") + + Then("The failure response is returned ") + sender.expectMsg("FAILURE") + } + + "enter open state after reaching the threshold of failed responses" in new CircuitBreakerScenario { + Given("A circuit breaker proxy pointing to a target actor") + val circuitBreaker = defaultCircuitBreaker + + When("A number of consecutive request equal to the maxFailures configuration of the circuit breaker is failing") + (1 to baseCircuitBreakerPropsBuilder.maxFailures) foreach { index ⇒ + receiverRespondsWithFailureToRequest(s"request$index") + } + + circuitBreakerReceivesSelfNotificationMessage() + + Then("The circuit is in Open stage: If a further message is sent it is not forwarded") + sender.send(circuitBreaker, "request in open state") + receiver.expectNoMsg + } + + "respond with a CircuitOpenFailure message when in open state " in new CircuitBreakerScenario { + Given("A circuit breaker proxy pointing to a target actor") + val circuitBreaker = defaultCircuitBreaker + + When("A number of consecutive request equal to the maxFailures configuration of the circuit breaker is failing") + (1 to baseCircuitBreakerPropsBuilder.maxFailures) foreach { index ⇒ + receiverRespondsWithFailureToRequest(s"request$index") + } + + circuitBreakerReceivesSelfNotificationMessage() + + Then("The circuit is in Open stage: any further request is replied-to with a CircuitOpenFailure response") + sender.send(circuitBreaker, "request in open state") + sender.expectMsg(CircuitOpenFailure("request in open state")) + } + + "respond with the converted CircuitOpenFailure if a converter is provided" in new CircuitBreakerScenario { + Given("A circuit breaker proxy pointing to a target actor built with a function to convert CircuitOpenFailure response into a String response") + val circuitBreaker = system.actorOf( + baseCircuitBreakerPropsBuilder + .copy(openCircuitFailureConverter = { failureMsg ⇒ s"NOT SENT: ${failureMsg.failedMsg}" }) + .props(receiver.ref)) + + When("A number of consecutive request equal to the maxFailures configuration of the circuit breaker is failing") + (1 to baseCircuitBreakerPropsBuilder.maxFailures) foreach { index ⇒ + receiverRespondsWithFailureToRequest(s"request$index") + } + + circuitBreakerReceivesSelfNotificationMessage() + + Then("Any further request receives instead of the CircuitOpenFailure response the converted one") + sender.send(circuitBreaker, "request in open state") + sender.expectMsg("NOT SENT: request in open state") + } + + "enter open state after reaching the threshold of timed-out responses" in new CircuitBreakerScenario { + Given("A circuit breaker actor proxying a test probe") + val circuitBreaker = defaultCircuitBreaker + + When("A number of request equal to the timed-out responses threashold is done without receiving response within the configured timeout") + sender.send(circuitBreaker, "request1") + sender.send(circuitBreaker, "request2") + + callTimeoutExpiresWithoutResponse() + + receiver.expectMsg("request1") + receiver.reply("this should be timed out 1") + + receiver.expectMsg("request2") + receiver.reply("this should be timed out 2") + + circuitBreakerReceivesSelfNotificationMessage() + + Then("The circuit is in Open stage: any further request is replied-to with a CircuitOpenFailure response") + sender.send(circuitBreaker, "request in open state") + receiver.expectNoMsg + } + + "enter HALF OPEN state after the given state timeout, sending the first message only" in new CircuitBreakerScenario { + Given("A circuit breaker actor proxying a test probe") + val circuitBreaker = defaultCircuitBreaker + + When("ENTERING OPEN STATE") + receiverRespondsWithFailureToRequest("request1") + receiverRespondsWithFailureToRequest("request2") + + circuitBreakerReceivesSelfNotificationMessage() + + Then("Messages are ignored") + messageIsRejectedWithOpenCircuitNotification("IGNORED SINCE IN OPEN STATE1") + messageIsRejectedWithOpenCircuitNotification("IGNORED SINCE IN OPEN STATE2") + + When("ENTERING HALF OPEN STATE") + resetTimeoutExpires() + + Then("First message should be forwarded, following ones ignored if the failure persist") + sender.send(circuitBreaker, "First message in half-open state, should be forwarded") + sender.send(circuitBreaker, "Second message in half-open state, should be ignored") + + receiver.expectMsg("First message in half-open state, should be forwarded") + receiver.expectNoMsg() + + sender.expectMsg(CircuitOpenFailure("Second message in half-open state, should be ignored")) + + } + + "return to CLOSED state from HALF-OPEN if a successful message response notification is received" in new CircuitBreakerScenario { + Given("A circuit breaker actor proxying a test probe") + val circuitBreaker = defaultCircuitBreaker + + When("Entering HALF OPEN state") + receiverRespondsWithFailureToRequest("request1") + receiverRespondsWithFailureToRequest("request2") + + resetTimeoutExpires() + + And("Receiving a successful response") + receiverRespondsToRequestWith("First message in half-open state, should be forwarded", "This should close the circuit") + + circuitBreakerReceivesSelfNotificationMessage() + + Then("circuit is re-closed") + sender.send(circuitBreaker, "request1") + receiver.expectMsg("request1") + + sender.send(circuitBreaker, "request2") + receiver.expectMsg("request2") + + } + + "return to OPEN state from HALF-OPEN if a FAILURE message response is received" in new CircuitBreakerScenario { + Given("A circuit breaker actor proxying a test probe") + val circuitBreaker = defaultCircuitBreaker + + When("Entering HALF OPEN state") + receiverRespondsWithFailureToRequest("request1") + receiverRespondsWithFailureToRequest("request2") + + resetTimeoutExpires() + + And("Receiving a failure response") + receiverRespondsWithFailureToRequest("First message in half-open state, should be forwarded") + + circuitBreakerReceivesSelfNotificationMessage() + + Then("circuit is opened again") + sender.send(circuitBreaker, "this should be ignored") + receiver.expectNoMsg() + sender.expectMsg(CircuitOpenFailure("this should be ignored")) + + } + + "notify an event status change listener when changing state" in new CircuitBreakerScenario { + Given("A circuit breaker actor proxying a test probe") + override val circuitBreaker = system.actorOf( + baseCircuitBreakerPropsBuilder + .copy(circuitEventListener = Some(eventListener.ref)) + .props(target = receiver.ref)) + + When("Entering OPEN state") + receiverRespondsWithFailureToRequest("request1") + receiverRespondsWithFailureToRequest("request2") + + circuitBreakerReceivesSelfNotificationMessage() + + Then("An event is sent") + eventListener.expectMsg(CircuitOpen(circuitBreaker)) + + When("Entering HALF OPEN state") + resetTimeoutExpires() + + Then("An event is sent") + eventListener.expectMsg(CircuitHalfOpen(circuitBreaker)) + + When("Entering CLOSED state") + receiverRespondsToRequestWith("First message in half-open state, should be forwarded", "This should close the circuit") + Then("An event is sent") + eventListener.expectMsg(CircuitClosed(circuitBreaker)) + + } + + "stop if the target actor terminates itself" in new CircuitBreakerScenario { + Given("An actor that will terminate when receiving a message") + import akka.actor.ActorDSL._ + val suicidalActor = actor( + new Act { + become { + case anyMessage ⇒ + sender() ! "dying now" + context stop self + } + }) + + And("A circuit breaker actor proxying another actor") + val circuitBreaker = system.actorOf( + baseCircuitBreakerPropsBuilder.props(target = suicidalActor)) + + val suicidalActorWatch = TestProbe() + suicidalActorWatch.watch(suicidalActor) + + val circuitBreakerWatch = TestProbe() + circuitBreakerWatch.watch(circuitBreaker) + + When("The target actor stops") + sender.send(circuitBreaker, "this message will kill the target") + sender.expectMsg("dying now") + suicidalActorWatch.expectTerminated(suicidalActor) + + Then("The circuit breaker proxy actor is terminated too") + circuitBreakerWatch.expectTerminated(circuitBreaker) + } + + "stop if the target actor is stopped" in new CircuitBreakerScenario { + Given("A circuit breaker actor proxying a test probe") + val circuitBreaker = defaultCircuitBreaker + + val receiverActorWatch = TestProbe() + receiverActorWatch.watch(receiver.ref) + + val circuitBreakerWatch = TestProbe() + circuitBreakerWatch.watch(circuitBreaker) + + When("The target actor stops") + sender.send(circuitBreaker, Passthrough(PoisonPill)) + receiverActorWatch.expectTerminated(receiver.ref) + + Then("The circuit breaker proxy actor is terminated too") + circuitBreakerWatch.expectTerminated(circuitBreaker) + } + + "send a any message enveloped into a TellOnly case class without expecting a response in closed state" in new CircuitBreakerScenario { + Given("A circuit breaker actor proxying a test probe") + val circuitBreaker = defaultCircuitBreaker + + When("A number of request equal to the timed-out responses wrapped in a TellOnly threashold is done without receiving response within the configured timeout") + sender.send(circuitBreaker, TellOnly("Fire and forget 1")) + sender.send(circuitBreaker, TellOnly("Fire and forget 2")) + receiver.expectMsg("Fire and forget 1") + receiver.expectMsg("Fire and forget 2") + + And("No response is received") + callTimeoutExpiresWithoutResponse() + + Then("The circuit is still closed") + sender.send(circuitBreaker, "This should be received too") + receiver.expectMsg("This should be received too") + } + + "block messages wrapped in TellOnly when in open state" in new CircuitBreakerScenario { + Given("A circuit breaker actor proxying a test probe") + val circuitBreaker = defaultCircuitBreaker + + When("Circuit enters OPEN state") + (1 to baseCircuitBreakerPropsBuilder.maxFailures) foreach { index ⇒ + receiverRespondsWithFailureToRequest(s"request$index") + } + + circuitBreakerReceivesSelfNotificationMessage() + + Then("A TellOnly wrapped message is not sent") + sender.send(circuitBreaker, TellOnly("This should NOT be received")) + receiver.expectNoMsg() + } + + "send a any message enveloped into a Passthrough case class without expecting a response even in closed state" in new CircuitBreakerScenario { + Given("A circuit breaker actor proxying a test probe") + val circuitBreaker = defaultCircuitBreaker + + When("Circuit enters OPEN state") + (1 to baseCircuitBreakerPropsBuilder.maxFailures) foreach { index ⇒ + receiverRespondsWithFailureToRequest(s"request$index") + } + + circuitBreakerReceivesSelfNotificationMessage() + + Then("A Passthrough wrapped message is sent") + sender.send(circuitBreaker, Passthrough("This should be received")) + receiver.expectMsg("This should be received") + + And("The circuit is still closed for ordinary messages") + sender.send(circuitBreaker, "This should NOT be received") + receiver.expectNoMsg() + } + } + + "Ask Extension" should { + implicit val patienceConfig = PatienceConfig(timeout = Span(1, Second), interval = Span(100, Millis)) + import Implicits.askWithCircuitBreaker + + import scala.concurrent.ExecutionContext.Implicits.global + implicit val timeout: Timeout = 2.seconds + + "work as a ASK pattern if circuit is closed" in new CircuitBreakerScenario { + Given("A circuit breaker actor proxying a test probe") + val circuitBreaker = defaultCircuitBreaker + + When("Doing a askWithCircuitBreaker request") + val responseFuture = circuitBreaker.askWithCircuitBreaker("request") + + Then("The message is sent to the target actor") + receiver.expectMsg("request") + + When("Then target actor replies") + receiver.reply("response") + + Then("The response is available as result of the future returned by the askWithCircuitBreaker method") + whenReady(responseFuture) { response ⇒ + response should be("response") + } + } + + "transform the response into a failure with CircuitOpenException cause if circuit is open" in new CircuitBreakerScenario { + Given("A circuit breaker actor proxying a test probe") + val circuitBreaker = defaultCircuitBreaker + + When("The circuit breaker proxy enters OPEN state") + receiverRespondsWithFailureToRequest("request1") + receiverRespondsWithFailureToRequest("request2") + + circuitBreakerReceivesSelfNotificationMessage() + + And("Doing a askWithCircuitBreaker request") + val responseFuture = circuitBreaker.askWithCircuitBreaker("request") + + Then("The message is NOT sent to the target actor") + receiver.expectNoMsg() + + And("The response is converted into a failure") + whenReady(responseFuture.failed) { failure ⇒ + failure shouldBe a[OpenCircuitException] + } + } + } + + "Future Extension" should { + implicit val patienceConfig = PatienceConfig(timeout = Span(1, Second), interval = Span(100, Millis)) + import Implicits.futureExtensions + import akka.pattern.ask + + import scala.concurrent.ExecutionContext.Implicits.global + implicit val timeout: Timeout = 2.seconds + + "work as a ASK pattern if circuit is closed" in new CircuitBreakerScenario { + Given("A circuit breaker actor proxying a test probe") + val circuitBreaker = defaultCircuitBreaker + + When("Doing a askWithCircuitBreaker request") + val responseFuture = (circuitBreaker ? "request").failForOpenCircuit + + Then("The message is sent to the target actor") + receiver.expectMsg("request") + + When("Then target actor replies") + receiver.reply("response") + + Then("The response is available as result of the future returned by the askWithCircuitBreaker method") + whenReady(responseFuture) { response ⇒ + response should be("response") + } + } + + "transform the response into a failure with CircuitOpenException cause if circuit is open" in new CircuitBreakerScenario { + Given("A circuit breaker actor proxying a test probe") + val circuitBreaker = defaultCircuitBreaker + + When("The circuit breaker proxy enters OPEN state") + receiverRespondsWithFailureToRequest("request1") + receiverRespondsWithFailureToRequest("request2") + + circuitBreakerReceivesSelfNotificationMessage() + + And("Doing a askWithCircuitBreaker request") + val responseFuture = (circuitBreaker ? "request").failForOpenCircuit + + Then("The message is NOT sent to the target actor") + receiver.expectNoMsg() + + And("The response is converted into a failure") + whenReady(responseFuture.failed) { failure ⇒ + failure shouldBe a[OpenCircuitException] + } + } + + "transform the response into a failure with the given exception as cause if circuit is open" in new CircuitBreakerScenario { + class MyException(message: String) extends Exception(message) + + Given("A circuit breaker actor proxying a test probe") + val circuitBreaker = defaultCircuitBreaker + + When("The circuit breaker proxy enters OPEN state") + receiverRespondsWithFailureToRequest("request1") + receiverRespondsWithFailureToRequest("request2") + + circuitBreakerReceivesSelfNotificationMessage() + + And("Doing a askWithCircuitBreaker request") + val responseFuture = (circuitBreaker ? "request").failForOpenCircuitWith(new MyException("Circuit is open")) + + Then("The message is NOT sent to the target actor") + receiver.expectNoMsg() + + And("The response is converted into a failure") + whenReady(responseFuture.failed) { failure ⇒ + failure shouldBe a[MyException] + failure.getMessage() should be("Circuit is open") + } + } + } + +} diff --git a/akka-contrib/src/test/scala/akka/contrib/circuitbreaker/sample/CircuitBreaker.scala b/akka-contrib/src/test/scala/akka/contrib/circuitbreaker/sample/CircuitBreaker.scala new file mode 100644 index 0000000000..18641cb884 --- /dev/null +++ b/akka-contrib/src/test/scala/akka/contrib/circuitbreaker/sample/CircuitBreaker.scala @@ -0,0 +1,204 @@ +/** + * Copyright (C) 2014-2016 Typesafe Inc. + */ +package akka.contrib.circuitbreaker.sample + +import akka.actor.{ Actor, ActorLogging, ActorRef } +import akka.contrib.circuitbreaker.CircuitBreakerProxy.{ CircuitBreakerPropsBuilder, CircuitOpenFailure } +import akka.contrib.circuitbreaker.sample.CircuitBreaker.AskFor +import akka.util.Timeout + +import scala.concurrent.duration._ +import scala.util.{ Failure, Success, Random } + +//#simple-service +object SimpleService { + case class Request(content: String) + case class Response(content: Either[String, String]) + case object ResetCount +} + +/** + * This is a simple actor simulating a service + * - Becoming slower with the increase of frequency of input requests + * - Failing around 30% of the requests + */ +class SimpleService extends Actor with ActorLogging { + import SimpleService._ + + var messageCount = 0 + + import context.dispatcher + + context.system.scheduler.schedule(1.second, 1.second, self, ResetCount) + + override def receive = { + case ResetCount ⇒ + messageCount = 0 + + case Request(content) ⇒ + messageCount += 1 + // simulate workload + Thread.sleep(100 * messageCount) + // Fails around 30% of the times + if (Random.nextInt(100) < 70) { + sender ! Response(Right(s"Successfully processed $content")) + } else { + sender ! Response(Left(s"Failure processing $content")) + } + + } +} +//#simple-service + +object CircuitBreaker { + case class AskFor(what: String) +} + +//#basic-sample +class CircuitBreaker(potentiallyFailingService: ActorRef) extends Actor with ActorLogging { + import SimpleService._ + + val serviceCircuitBreaker = + context.actorOf( + CircuitBreakerPropsBuilder(maxFailures = 3, callTimeout = 2.seconds, resetTimeout = 30.seconds) + .copy( + failureDetector = { + _ match { + case Response(Left(_)) ⇒ true + case _ ⇒ false + } + }) + .props(potentiallyFailingService), + "serviceCircuitBreaker") + + override def receive: Receive = { + case AskFor(requestToForward) ⇒ + serviceCircuitBreaker ! Request(requestToForward) + + case Right(Response(content)) ⇒ + //handle response + log.info("Got successful response {}", content) + + case Response(Right(content)) ⇒ + //handle response + log.info("Got successful response {}", content) + + case Response(Left(content)) ⇒ + //handle response + log.info("Got failed response {}", content) + + case CircuitOpenFailure(failedMsg) ⇒ + log.warning("Unable to send message {}", failedMsg) + } +} +//#basic-sample + +//#ask-sample +class CircuitBreakerAsk(potentiallyFailingService: ActorRef) extends Actor with ActorLogging { + import SimpleService._ + import akka.pattern._ + + implicit val askTimeout: Timeout = 2.seconds + + val serviceCircuitBreaker = + context.actorOf( + CircuitBreakerPropsBuilder(maxFailures = 3, callTimeout = askTimeout, resetTimeout = 30.seconds) + .copy( + failureDetector = { + _ match { + case Response(Left(_)) ⇒ true + case _ ⇒ false + } + }) + .copy( + openCircuitFailureConverter = { failure ⇒ + Left(s"Circuit open when processing ${failure.failedMsg}") + }) + .props(potentiallyFailingService), + "serviceCircuitBreaker") + + import context.dispatcher + + override def receive: Receive = { + case AskFor(requestToForward) ⇒ + (serviceCircuitBreaker ? Request(requestToForward)).mapTo[Either[String, String]].onComplete { + case Success(Right(successResponse)) ⇒ + //handle response + log.info("Got successful response {}", successResponse) + + case Success(Left(failureResponse)) ⇒ + //handle response + log.info("Got successful response {}", failureResponse) + + case Failure(exception) ⇒ + //handle response + log.info("Got successful response {}", exception) + + } + } +} +//#ask-sample + +//#ask-with-failure-sample +class CircuitBreakerAskWithFailure(potentiallyFailingService: ActorRef) extends Actor with ActorLogging { + import SimpleService._ + import akka.pattern._ + import akka.contrib.circuitbreaker.Implicits.futureExtensions + + implicit val askTimeout: Timeout = 2.seconds + + val serviceCircuitBreaker = + context.actorOf( + CircuitBreakerPropsBuilder(maxFailures = 3, callTimeout = askTimeout, resetTimeout = 30.seconds) + .props(target = potentiallyFailingService), + "serviceCircuitBreaker") + + import context.dispatcher + + override def receive: Receive = { + case AskFor(requestToForward) ⇒ + (serviceCircuitBreaker ? Request(requestToForward)).failForOpenCircuit.mapTo[String].onComplete { + case Success(successResponse) ⇒ + //handle response + log.info("Got successful response {}", successResponse) + + case Failure(exception) ⇒ + //handle response + log.info("Got successful response {}", exception) + + } + } +} +//#ask-with-failure-sample + +//#ask-with-circuit-breaker-sample +class CircuitBreakerAskWithCircuitBreaker(potentiallyFailingService: ActorRef) extends Actor with ActorLogging { + import SimpleService._ + import akka.contrib.circuitbreaker.Implicits.askWithCircuitBreaker + + implicit val askTimeout: Timeout = 2.seconds + + val serviceCircuitBreaker = + context.actorOf( + CircuitBreakerPropsBuilder(maxFailures = 3, callTimeout = askTimeout, resetTimeout = 30.seconds) + .props(target = potentiallyFailingService), + "serviceCircuitBreaker") + + import context.dispatcher + + override def receive: Receive = { + case AskFor(requestToForward) ⇒ + serviceCircuitBreaker.askWithCircuitBreaker(Request(requestToForward)).mapTo[String].onComplete { + case Success(successResponse) ⇒ + //handle response + log.info("Got successful response {}", successResponse) + + case Failure(exception) ⇒ + //handle response + log.info("Got successful response {}", exception) + + } + } +} +//#ask-with-circuit-breaker-sample \ No newline at end of file diff --git a/akka-contrib/src/test/scala/akka/contrib/jul/JavaLoggerSpec.scala b/akka-contrib/src/test/scala/akka/contrib/jul/JavaLoggerSpec.scala index f3857fa95e..85338ee1e5 100644 --- a/akka-contrib/src/test/scala/akka/contrib/jul/JavaLoggerSpec.scala +++ b/akka-contrib/src/test/scala/akka/contrib/jul/JavaLoggerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.contrib.jul diff --git a/akka-contrib/src/test/scala/akka/contrib/mailbox/PeekMailboxSpec.scala b/akka-contrib/src/test/scala/akka/contrib/mailbox/PeekMailboxSpec.scala index 3f06fa9701..c6390a3708 100644 --- a/akka-contrib/src/test/scala/akka/contrib/mailbox/PeekMailboxSpec.scala +++ b/akka-contrib/src/test/scala/akka/contrib/mailbox/PeekMailboxSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.contrib.mailbox diff --git a/akka-contrib/src/test/scala/akka/contrib/pattern/AggregatorSpec.scala b/akka-contrib/src/test/scala/akka/contrib/pattern/AggregatorSpec.scala index b9f0fc9278..acd2e56070 100644 --- a/akka-contrib/src/test/scala/akka/contrib/pattern/AggregatorSpec.scala +++ b/akka-contrib/src/test/scala/akka/contrib/pattern/AggregatorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.contrib.pattern diff --git a/akka-contrib/src/test/scala/akka/contrib/pattern/ReliableProxyDocSpec.scala b/akka-contrib/src/test/scala/akka/contrib/pattern/ReliableProxyDocSpec.scala index b38b67364e..27816a837d 100644 --- a/akka-contrib/src/test/scala/akka/contrib/pattern/ReliableProxyDocSpec.scala +++ b/akka-contrib/src/test/scala/akka/contrib/pattern/ReliableProxyDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.contrib.pattern diff --git a/akka-contrib/src/test/scala/akka/contrib/throttle/TimerBasedThrottlerSpec.scala b/akka-contrib/src/test/scala/akka/contrib/throttle/TimerBasedThrottlerSpec.scala index 83f9f4bf10..9edae87c17 100644 --- a/akka-contrib/src/test/scala/akka/contrib/throttle/TimerBasedThrottlerSpec.scala +++ b/akka-contrib/src/test/scala/akka/contrib/throttle/TimerBasedThrottlerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.contrib.throttle diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/DistributedData.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/DistributedData.scala index 138c62d5b9..a5270b102e 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/DistributedData.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/DistributedData.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/FastMerge.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/FastMerge.scala index 385dcf16e9..105dd67e48 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/FastMerge.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/FastMerge.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/Flag.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/Flag.scala index 233e6a66bd..7f653fd1b7 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/Flag.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/Flag.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/GCounter.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/GCounter.scala index b4d679edc2..3205014fda 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/GCounter.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/GCounter.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/GSet.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/GSet.scala index 4c577449e8..d8d8133ab6 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/GSet.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/GSet.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/Key.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/Key.scala index 99fc56a94e..371c1f64a1 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/Key.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/Key.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/LWWMap.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/LWWMap.scala index c12496b2ba..962cfb910d 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/LWWMap.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/LWWMap.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/LWWRegister.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/LWWRegister.scala index 0bf35b7f8e..e8d4f00801 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/LWWRegister.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/LWWRegister.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/ORMap.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/ORMap.scala index 4bd44ca720..e68ba0cf59 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/ORMap.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/ORMap.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/ORMultiMap.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/ORMultiMap.scala index 71ad5f02e1..1814f0fd06 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/ORMultiMap.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/ORMultiMap.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/ORSet.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/ORSet.scala index 3bb45bebfe..a067851f45 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/ORSet.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/ORSet.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/PNCounter.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/PNCounter.scala index 8be1943639..aa11e83405 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/PNCounter.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/PNCounter.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/PNCounterMap.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/PNCounterMap.scala index 4da387d284..f7bb634151 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/PNCounterMap.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/PNCounterMap.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/PruningState.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/PruningState.scala index 2328c3a9a7..ddda400f11 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/PruningState.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/PruningState.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/ReplicatedData.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/ReplicatedData.scala index 736f7d9e53..5060a3e862 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/ReplicatedData.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/ReplicatedData.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/Replicator.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/Replicator.scala index 2b94d51c0f..618d383268 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/Replicator.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/Replicator.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/VersionVector.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/VersionVector.scala index c3a21cfd19..f8899bae11 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/VersionVector.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/VersionVector.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/protobuf/ReplicatedDataSerializer.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/protobuf/ReplicatedDataSerializer.scala index 17a8928cfa..d5d4ab3319 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/protobuf/ReplicatedDataSerializer.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/protobuf/ReplicatedDataSerializer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata.protobuf diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/protobuf/ReplicatorMessageSerializer.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/protobuf/ReplicatorMessageSerializer.scala index 4ba296e73d..4384638b4d 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/protobuf/ReplicatorMessageSerializer.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/protobuf/ReplicatorMessageSerializer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata.protobuf diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/protobuf/SerializationSupport.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/protobuf/SerializationSupport.scala index 646849f278..578775d5c7 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/protobuf/SerializationSupport.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/protobuf/SerializationSupport.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.cluster.ddata.protobuf diff --git a/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/JepsenInspiredInsertSpec.scala b/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/JepsenInspiredInsertSpec.scala index 54de6e2e27..fa1c6f7f3c 100644 --- a/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/JepsenInspiredInsertSpec.scala +++ b/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/JepsenInspiredInsertSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/PerformanceSpec.scala b/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/PerformanceSpec.scala index e7fb8a839a..ce31abe7fa 100644 --- a/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/PerformanceSpec.scala +++ b/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/PerformanceSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/ReplicatorChaosSpec.scala b/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/ReplicatorChaosSpec.scala index becfcf2dfc..062e36b30d 100644 --- a/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/ReplicatorChaosSpec.scala +++ b/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/ReplicatorChaosSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/ReplicatorPruningSpec.scala b/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/ReplicatorPruningSpec.scala index b91c1b8795..3073cfcb17 100644 --- a/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/ReplicatorPruningSpec.scala +++ b/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/ReplicatorPruningSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/ReplicatorSpec.scala b/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/ReplicatorSpec.scala index 00a250c0d1..25bd7e2b56 100644 --- a/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/ReplicatorSpec.scala +++ b/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/ReplicatorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/STMultiNodeSpec.scala b/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/STMultiNodeSpec.scala index 0f47ca7b78..4ed71715e4 100644 --- a/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/STMultiNodeSpec.scala +++ b/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/STMultiNodeSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/test/java/akka/cluster/ddata/JavaImplOfReplicatedData.java b/akka-distributed-data/src/test/java/akka/cluster/ddata/JavaImplOfReplicatedData.java index d45ea2687c..f3ee27d713 100644 --- a/akka-distributed-data/src/test/java/akka/cluster/ddata/JavaImplOfReplicatedData.java +++ b/akka-distributed-data/src/test/java/akka/cluster/ddata/JavaImplOfReplicatedData.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata; diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/FlagSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/FlagSpec.scala index cf7d86be34..9ed3e913b2 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/FlagSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/FlagSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/GCounterSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/GCounterSpec.scala index dd7bf99c15..d86a6b7ce7 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/GCounterSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/GCounterSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/GSetSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/GSetSpec.scala index 3f9c17bbd5..849d1d5ca4 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/GSetSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/GSetSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/LWWMapSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/LWWMapSpec.scala index 6b857c9995..cf8cbc472b 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/LWWMapSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/LWWMapSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/LWWRegisterSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/LWWRegisterSpec.scala index ac3e9f0fab..2dbb62537a 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/LWWRegisterSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/LWWRegisterSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/LocalConcurrencySpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/LocalConcurrencySpec.scala index dd54752e81..f9667754c1 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/LocalConcurrencySpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/LocalConcurrencySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/LotsOfDataBot.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/LotsOfDataBot.scala index 83b0c9ed59..a8b6c2eea5 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/LotsOfDataBot.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/LotsOfDataBot.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORMapSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORMapSpec.scala index 00127efc94..da3c06b283 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORMapSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORMapSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORMultiMapSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORMultiMapSpec.scala index fb08b36097..aad636a941 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORMultiMapSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORMultiMapSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORSetSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORSetSpec.scala index 620b90f7e7..33d2736fc5 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORSetSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORSetSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/PNCounterMapSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/PNCounterMapSpec.scala index 0990bae398..41ba17109c 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/PNCounterMapSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/PNCounterMapSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/PNCounterSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/PNCounterSpec.scala index 3f1b83a4b9..6207b1b8ef 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/PNCounterSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/PNCounterSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/PruningStateSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/PruningStateSpec.scala index ae60aecbf1..bce2dc5d73 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/PruningStateSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/PruningStateSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/VersionVectorSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/VersionVectorSpec.scala index 918a8e5019..586d1ef8f6 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/VersionVectorSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/VersionVectorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/WriteAggregatorSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/WriteAggregatorSpec.scala index 92de652ca6..d24301dc53 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/WriteAggregatorSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/WriteAggregatorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/protobuf/ReplicatedDataSerializerSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/protobuf/ReplicatedDataSerializerSpec.scala index c5d667b5aa..b131723d14 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/protobuf/ReplicatedDataSerializerSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/protobuf/ReplicatedDataSerializerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata.protobuf diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/protobuf/ReplicatorMessageSerializerSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/protobuf/ReplicatorMessageSerializerSpec.scala index bef819ce28..599732590d 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/protobuf/ReplicatorMessageSerializerSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/protobuf/ReplicatorMessageSerializerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.cluster.ddata.protobuf diff --git a/akka-docs-dev/_sphinx/exts/includecode.py b/akka-docs-dev/_sphinx/exts/includecode.py deleted file mode 100644 index 6ee9ed3bed..0000000000 --- a/akka-docs-dev/_sphinx/exts/includecode.py +++ /dev/null @@ -1,145 +0,0 @@ -import os -import codecs -from os import path - -from docutils import nodes -from docutils.parsers.rst import Directive, directives - -class IncludeCode(Directive): - """ - Include a code example from a file with sections delimited with special comments. - """ - - has_content = False - required_arguments = 1 - optional_arguments = 0 - final_argument_whitespace = False - option_spec = { - 'section': directives.unchanged_required, - 'comment': directives.unchanged_required, - 'marker': directives.unchanged_required, - 'include': directives.unchanged_required, - 'exclude': directives.unchanged_required, - 'hideexcludes': directives.flag, - 'linenos': directives.flag, - 'language': directives.unchanged_required, - 'encoding': directives.encoding, - 'prepend': directives.unchanged_required, - 'append': directives.unchanged_required, - } - - def run(self): - document = self.state.document - arg0 = self.arguments[0] - (filename, sep, section) = arg0.partition('#') - - if not document.settings.file_insertion_enabled: - return [document.reporter.warning('File insertion disabled', - line=self.lineno)] - env = document.settings.env - if filename.startswith('/') or filename.startswith(os.sep): - rel_fn = filename[1:] - else: - docdir = path.dirname(env.doc2path(env.docname, base=None)) - rel_fn = path.join(docdir, filename) - try: - fn = path.join(env.srcdir, rel_fn) - except UnicodeDecodeError: - # the source directory is a bytestring with non-ASCII characters; - # let's try to encode the rel_fn in the file system encoding - rel_fn = rel_fn.encode(sys.getfilesystemencoding()) - fn = path.join(env.srcdir, rel_fn) - - encoding = self.options.get('encoding', env.config.source_encoding) - codec_info = codecs.lookup(encoding) - try: - f = codecs.StreamReaderWriter(open(fn, 'U'), - codec_info[2], codec_info[3], 'strict') - lines = f.readlines() - f.close() - except (IOError, OSError): - return [document.reporter.warning( - 'Include file %r not found or reading it failed' % filename, - line=self.lineno)] - except UnicodeError: - return [document.reporter.warning( - 'Encoding %r used for reading included file %r seems to ' - 'be wrong, try giving an :encoding: option' % - (encoding, filename))] - - comment = self.options.get('comment', '//') - marker = self.options.get('marker', comment + '#') - lenm = len(marker) - if not section: - section = self.options.get('section') - include_sections = self.options.get('include', '') - exclude_sections = self.options.get('exclude', '') - include = set(include_sections.split(',')) if include_sections else set() - exclude = set(exclude_sections.split(',')) if exclude_sections else set() - hideexcludes = 'hideexcludes' in self.options - if section: - include |= set([section]) - within = set() - res = [] - excluding = False - for line in lines: - index = line.find(marker) - if index >= 0: - section_name = line[index+lenm:].strip() - if section_name in within: - within ^= set([section_name]) - if excluding and not (exclude & within): - excluding = False - else: - within |= set([section_name]) - if not excluding and (exclude & within): - excluding = True - if not hideexcludes: - res.append(' ' * index + comment + ' ' + section_name.replace('-', ' ') + ' ...\n') - elif not (exclude & within) and (not include or (include & within)): - res.append(line) - lines = res - - def countwhile(predicate, iterable): - count = 0 - for x in iterable: - if predicate(x): - count += 1 - else: - return count - - nonempty = filter(lambda l: l.strip(), lines) - if not nonempty: - return [document.reporter.error( - "Snippet ({}#{}) not found!".format(filename, section), - line=self.lineno - )] - - tabcounts = map(lambda l: countwhile(lambda c: c == ' ', l), nonempty) - tabshift = min(tabcounts) if tabcounts else 0 - - if tabshift > 0: - lines = map(lambda l: l[tabshift:] if len(l) > tabshift else l, lines) - - prepend = self.options.get('prepend') - append = self.options.get('append') - if prepend: - lines.insert(0, prepend + '\n') - if append: - lines.append(append + '\n') - - text = ''.join(lines) - retnode = nodes.literal_block(text, text, source=fn) - retnode.line = 1 - retnode.attributes['line_number'] = self.lineno - language = self.options.get('language') - if language: - retnode['language'] = language - if 'linenos' in self.options: - retnode['linenos'] = True - document.settings.env.note_dependency(rel_fn) - return [retnode] - -def setup(app): - app.require_sphinx('1.0') - app.add_directive('includecode', IncludeCode) diff --git a/akka-docs-dev/_sphinx/pygments/setup.py b/akka-docs-dev/_sphinx/pygments/setup.py deleted file mode 100644 index cdfa31d397..0000000000 --- a/akka-docs-dev/_sphinx/pygments/setup.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -Akka syntax styles for Pygments. -""" - -from setuptools import setup - -entry_points = """ -[pygments.styles] -simple = styles.simple:SimpleStyle -""" - -setup( - name = 'akkastyles', - version = '0.1', - description = __doc__, - author = "Akka", - packages = ['styles'], - entry_points = entry_points -) diff --git a/akka-docs-dev/_sphinx/pygments/styles/__init__.py b/akka-docs-dev/_sphinx/pygments/styles/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/akka-docs-dev/_sphinx/pygments/styles/simple.py b/akka-docs-dev/_sphinx/pygments/styles/simple.py deleted file mode 100644 index bdf3c7878e..0000000000 --- a/akka-docs-dev/_sphinx/pygments/styles/simple.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -""" - pygments.styles.akka - ~~~~~~~~~~~~~~~~~~~~~~~~ - - Simple style for Scala highlighting. -""" - -from pygments.style import Style -from pygments.token import Keyword, Name, Comment, String, Error, \ - Number, Operator, Generic, Whitespace - - -class SimpleStyle(Style): - """ - Simple style for Scala highlighting. - """ - - background_color = "#f0f0f0" - default_style = "" - - styles = { - Whitespace: "#f0f0f0", - Comment: "#777766", - Comment.Preproc: "", - Comment.Special: "", - - Keyword: "#000080", - Keyword.Pseudo: "", - Keyword.Type: "", - - Operator: "#000000", - Operator.Word: "", - - Name.Builtin: "#000000", - Name.Function: "#000000", - Name.Class: "#000000", - Name.Namespace: "#000000", - Name.Exception: "#000000", - Name.Variable: "#000000", - Name.Constant: "bold #000000", - Name.Label: "#000000", - Name.Entity: "#000000", - Name.Attribute: "#000000", - Name.Tag: "#000000", - Name.Decorator: "#000000", - - String: "#008000", - String.Doc: "", - String.Interpol: "", - String.Escape: "", - String.Regex: "", - String.Symbol: "", - String.Other: "", - Number: "#008000", - - Error: "border:#FF0000" - } diff --git a/akka-docs-dev/_sphinx/static/akka-intellij-code-style.jar b/akka-docs-dev/_sphinx/static/akka-intellij-code-style.jar deleted file mode 100644 index 55866c22c5..0000000000 Binary files a/akka-docs-dev/_sphinx/static/akka-intellij-code-style.jar and /dev/null differ diff --git a/akka-docs-dev/_sphinx/static/akka.png b/akka-docs-dev/_sphinx/static/akka.png deleted file mode 100644 index 8cdc42272e..0000000000 Binary files a/akka-docs-dev/_sphinx/static/akka.png and /dev/null differ diff --git a/akka-docs-dev/_sphinx/static/favicon.ico b/akka-docs-dev/_sphinx/static/favicon.ico deleted file mode 100644 index 06e53b6403..0000000000 Binary files a/akka-docs-dev/_sphinx/static/favicon.ico and /dev/null differ diff --git a/akka-docs-dev/_sphinx/static/logo.png b/akka-docs-dev/_sphinx/static/logo.png deleted file mode 100644 index 8cdc42272e..0000000000 Binary files a/akka-docs-dev/_sphinx/static/logo.png and /dev/null differ diff --git a/akka-docs-dev/_sphinx/themes/akka/layout.html b/akka-docs-dev/_sphinx/themes/akka/layout.html deleted file mode 100644 index 010150742e..0000000000 --- a/akka-docs-dev/_sphinx/themes/akka/layout.html +++ /dev/null @@ -1,371 +0,0 @@ -{# - akka/layout.html - ~~~~~~~~~~~~~~~~~ -#} - -{% extends "basic/layout.html" %} -{% set script_files = script_files + ['_static/toc.js'] %} -{% set script_files = script_files + ['_static/prettify.js'] %} -{% set script_files = script_files + ['_static/highlightCode.js'] %} -{% set script_files = script_files + ['_static/effects.core.js'] %} -{% set script_files = script_files + ['_static/effects.highlight.js'] %} -{% set script_files = script_files + ['_static/scrollTo.js'] %} -{% set script_files = script_files + ['_static/contentsFix.js'] %} -{% set script_files = script_files + ['_static/warnOldDocs.js'] %} -{% set script_files = script_files + ['_static/ga.js'] %} -{% set css_files = css_files + ['_static/prettify.css'] %} -{% set css_files = css_files + ['_static/base.css'] %} -{% set css_files = css_files + ['_static/docs.css'] %} -{% set css_files = css_files + ['http://fonts.googleapis.com/css?family=Source+Sans+Pro:300,400,600,700'] %} - -{# do not display relbars #} -{% block relbar1 %}{% endblock %} -{% block relbar2 %}{% endblock %} - -{% block extrahead %} - {%- if include_analytics %} - - - - - {%- endif %} -{% endblock %} - -{% block content %} - {%- block akkaheader %} - - {%- endblock %} -
-
-
{{ title }}
- - -
-
-
-
-
- -
-
-
- {%- if include_analytics %} -
-
Loading
-
- {%- endif -%} -
- {% block body %}{% endblock %} -
-
- {%- if suppressToc is sameas True -%} - {%- else -%} -

Contents

-
-
-
-
-
- {%- endif -%} -
-
-
-
-
- {%- block akkafooter %} - -{%- if include_analytics %} - - - -{%- endif %} - - {% block footer %}{% endblock %} - {%- endblock %} -{% endblock %} - - diff --git a/akka-docs-dev/_sphinx/themes/akka/static/akka_full_color.svg b/akka-docs-dev/_sphinx/themes/akka/static/akka_full_color.svg deleted file mode 100644 index 239d3edb5d..0000000000 --- a/akka-docs-dev/_sphinx/themes/akka/static/akka_full_color.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/akka-docs-dev/_sphinx/themes/akka/static/akka_icon_full_color.svg b/akka-docs-dev/_sphinx/themes/akka/static/akka_icon_full_color.svg deleted file mode 100644 index 8d02b531b7..0000000000 --- a/akka-docs-dev/_sphinx/themes/akka/static/akka_icon_full_color.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/akka-docs-dev/_sphinx/themes/akka/static/akka_icon_reverse.svg b/akka-docs-dev/_sphinx/themes/akka/static/akka_icon_reverse.svg deleted file mode 100644 index e32101e54f..0000000000 --- a/akka-docs-dev/_sphinx/themes/akka/static/akka_icon_reverse.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/akka-docs-dev/_sphinx/themes/akka/static/akka_reverse.svg b/akka-docs-dev/_sphinx/themes/akka/static/akka_reverse.svg deleted file mode 100644 index a002ae7937..0000000000 --- a/akka-docs-dev/_sphinx/themes/akka/static/akka_reverse.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/akka-docs-dev/_sphinx/themes/akka/static/base.css b/akka-docs-dev/_sphinx/themes/akka/static/base.css deleted file mode 100644 index 0ccd0c35c4..0000000000 --- a/akka-docs-dev/_sphinx/themes/akka/static/base.css +++ /dev/null @@ -1,94 +0,0 @@ -body { color: rgba(0, 0, 0, 0.6); } -.navbar { background: #ffffff; position: relative; z-index: 150; margin-bottom: 0px; border-top: solid 5px #15A9CE; height: 75px;} -.navbar .nav { float: right; } -.navbar-logo { float: left; } -.navbar-logo .svg-logo { - height: 75px; -} -.logo { margin-top: 30px; margin-bottom: 30px;} -.main { background: #15A9CE; height: 520px;} -.rel { position: relative; } -.box { font-family: "Source Sans Pro", "Helvetica Neue", sans-serif; font-size: 36px; font-weight: 400; color: rgba(255, 255, 255, 1); line-height: 38px; margin: 80px 0 20px;} -.small-box { font-size: 18px; line-height: 22px; font-weight: 300; color: #0B5567;} -.bold { font-weight: 600; } -.hexagons { position: absolute; top: 80px; left: 490px; height: 385px; width: 252px; background: url('{{ site.baseurl }}/resources/images/hexagons.png') no-repeat center top; } -.light-strip { background: #f2f2eb; min-height: 200px;} -.under-main { background: #ffffff; } -.darker-strip { background: #EFF2F5; min-height: 200px;} -.under-light-strip { background: #f2f2eb } -.simple-concurrency { position: absolute; top: 155px; left: 290px; width: 200px; text-align: right; font-family: "Source Sans Pro", "Helvetica Neue", sans-serif; font-size: 20px; font-weight: 400; color: rgba(255, 255, 255, 1); line-height: 22px;} -.simple-concurrency p { font-size: 14px; line-height: 18px; font-weight: 300; color: #0B5567; } -.fault-tolerance { position: absolute; top: 290px; left: 290px; width: 200px; text-align: right; font-family: "Source Sans Pro", "Helvetica Neue", sans-serif; font-size: 20px; font-weight: 400; color: rgba(255, 255, 255, 1); line-height: 22px;} -.fault-tolerance p { font-size: 14px; line-height: 18px; font-weight: 300; color: #0B5567; } -.high-performance { position: absolute; top: 110px; left: 780px; width: 200px; text-align: left; font-family: "Source Sans Pro", "Helvetica Neue", sans-serif; font-size: 20px; font-weight: 400; color: rgba(255, 255, 255, 1); line-height: 22px;} -.high-performance p { font-size: 14px; line-height: 18px; font-weight: 300; color: #0B5567; } -.no-global-state { position: absolute; top: 245px; left: 780px; width: 200px; text-align: left; font-family: "Source Sans Pro", "Helvetica Neue", sans-serif; font-size: 20px; font-weight: 400; color: rgba(255, 255, 255, 1); line-height: 22px;} -.no-global-state p { font-size: 14px; line-height: 18px; font-weight: 300; color: #0B5567; } -.extensible { position: absolute; top: 385px; left: 780px; width: 200px; text-align: left; font-family: "Source Sans Pro", "Helvetica Neue", sans-serif; font-size: 20px; font-weight: 400; color: rgba(255, 255, 255, 1); line-height: 22px;} -.extensible p { font-size: 14px; line-height: 18px; font-weight: 300; color: #0B5567; } -.pad { padding-top: 40px; } -.normal { margin-left: 0px; padding-bottom: 30px;} -.normal h3 { color: #326a78; font-family: "Source Sans Pro", "Helvetica Neue", sans-serif; font-size: 22px; font-weight: 400; padding-bottom: 12px;} /*#595050, green: rgba( 44, 166, 33, 0.3)*/ -.no-margin { position: relative; margin-left: 0px; width: 460px; margin-right: 10px;} -.left p { margin-right: 30px; margin-top: 30px; } -.right p { margin-left: 30px; margin-top: 30px; } -hr { border: 0; height: 1px; background-image: -webkit-linear-gradient(left, rgba(0,0,0,0), rgba(89,80,80,.3), rgba(0,0,0,0)); background-image: -moz-linear-gradient(left, rgba(0,0,0,0), rgba(89,80,80,.3), rgba(0,0,0,0)); background-image: -ms-linear-gradient(left, rgba(0,0,0,0), rgba(89,80,80,.3), rgba(0,0,0,0)); background-image: -o-linear-gradient(left, rgba(0,0,0,0), rgba(89,80,80,.3), rgba(0,0,0,0)); } -.slide { margin-top: 26px; overflow: hidden; z-index: 1; width: 460px; position: relative; background: #fefbf3; border: 1px solid rgba(0,0,0,.2); -webkit-box-shadow: 0 1px 2px rgba(0,0,0,.1); -moz-box-shadow: 0 1px 2px rgba(0,0,0,.1); box-shadow: 0 1px 2px rgba(0,0,0,.1); -webkit-border-radius:4px; -moz-border-radius:4px; border-radius:4px; } -.slide .java { z-index: 10; position: absolute; left: 0px; width: 460px; } -.slide .scala { position: absolute; z-index: 11; width: 460px; background: #fefbf3; } -pre { border: 0px solid #333; background-color: transparent;} -.tab-scala { position: absolute; top: 0px; left: 0px; width: 60px; height: 30px; padding: 6px; background: #fefbf3; border: 1px solid rgba(0,0,0,.2); -webkit-box-shadow: 0 1px 2px rgba(0,0,0,.1); -moz-box-shadow: 0 1px 2px rgba(0,0,0,.1); box-shadow: 0 1px 2px rgba(0,0,0,.1); -webkit-border-radius:4px; -moz-border-radius:4px; border-radius:4px; } -.tab-scala-fix { position: absolute; left: 1px; top: 26px; z-index: 12; width: 72px; height: 2px; background: url('{{ site.baseurl }}/resources/images/tabfix.gif'); } -.java-toggle { position: absolute; top: 0px; left: 76px; padding: 6px; font-weight: bold; color: rgba(89, 80, 80, 0.6); } -.tab-java { position: absolute; z-index: 1; top: 50px; left: 461px; width: 60px; height: 30px; padding: 6px; background: #fefbf3; border: 1px solid rgba(0,0,0,.2); -webkit-box-shadow: 0 1px 2px rgba(0,0,0,.1); -moz-box-shadow: 0 1px 2px rgba(0,0,0,.1); box-shadow: 0 1px 2px rgba(0,0,0,.1); -webkit-border-radius:4px; -moz-border-radius:4px; border-radius:4px; font-weight: bold; } -.tab-java-fix { position: absolute; left: 462px; top: 26px; z-index: 12; width: 72px; height: 2px; background: url('{{ site.baseurl }}/resources/images/tabfix.gif'); } -.used-by h2 { font-family: "Source Sans Pro", "Helvetica Neue", sans-serif; font-size: 32px; font-weight: 400; color: #595959; line-height: 34px; margin-bottom: 12px; } /*gray: 595959, green: 49bf00*/ -.used-by-header { text-align: center; margin-bottom: 20px; } -.used-by-header .text {display: inline-block; padding: 0 20px; background: #C1D2DC; font-weight: bold; color: rgba(88, 111, 117, 0.8);} -.used-by-logos { text-align: center; /*white-space: nowrap;*/ } -.between-dark-strips { background: #C1D2DC; height: 2px; margin-bottom: 40px; } -.three-bars { padding-bottom: 30px; } -.three-bars h2 { font-family: "Source Sans Pro", "Helvetica Neue", sans-serif; font-size: 28px; font-weight: 400; color: #595959; line-height: 30px; margin-bottom: 8px; } -.three-bars h2 a { color: #595959; } -.three-bars h2 a:hover { color: #15A9CE; text-decoration: none; } -.tweets { position: relative; } -.tweet { position: relative; } -.tweets img { margin-left: -14px; } -.tweet .text { background: rgba(0, 26, 30, 0.7); color: rgba(255, 255, 255, 1); text-shadow: rgba(0, 0, 0, 1) 0px 1px 0px; padding: 10px; margin-bottom: 5px; line-height: 1.2em; word-wrap: break-word !important; display: block; -webkit-box-shadow: rgba(255,255,255,.5) -1px -1px 0px; -moz-box-shadow: rgba(255,255,255,.5) -1px -1px 0px; -webkit-border-radius: 3px; -moz-border-radius: 3px; border-radius: 3px; } -.tweet .text a { color: #a4cc47; } -.tweet .text a:hover { color: #8ad3e5; text-decoration: none;} -.username { position: relative; top: -20px; left: 30px; color: #888;} -.username a { color: #235561; font-weight: bold; text-decoration: none; } -.username a:hover { color: #4ba11d; } -.time { display: block; position: relative; line-height: 0px; top: -16px; left: 64px; font-size: 80%; } -.time a { color: #888; white-space: nowrap; text-decoration: none; } -.triangle { position: relative; bottom: 9px; left: 40px; height: 0px; width: 1px; margin-left: auto; margin-right: auto; border-top: 16px solid rgba(0, 30, 30, 0.7); border-left: none; border-right: 16px solid transparent; border-bottom: none; } -.feed-entries { margin-left: 0px; } -.feed-entry { position: relative; margin-bottom: 18px; } -.feed-date { float: left; height: 100%; margin-right: 14px; height: 100%;} -.feed-month { color: #447281; font-size: 18px; font-weight: bold; text-transform:uppercase;} -.feed-day { background: #15A9CE; -webkit-border-radius: 6px; -moz-border-radius: 6px; border-radius: 6px; color: #EFF2F5; font-weight: bold; font-size: 24px; text-align: center; line-height: 26px;} -.feed-year { color: #447281; font-size: 16px; font-weight: bold; } -.feed-title { font-family: "Source Sans Pro", "Helvetica Neue", sans-serif; font-size: 18px; font-weight: 400; line-height: 20px; } -.feed-title a { color: #15A9CE; } -.feed-title a:hover { color: #447281; text-decoration: none; } -.feed-author { font-size: 11px; color: #888888;} -.feed-body { margin-bottom: 14px;} -.news-item { margin-bottom: 14px;} -.news-date { display: inline; font-family: "Source Sans Pro", "Helvetica Neue", sans-serif; font-size: 14px; font-weight: 500; line-height: 16px; color: #15A9CE; } -.news-author { font-family: "Source Sans Pro", "Helvetica Neue", sans-serif; font-size: 12px; font-weight: 500; line-height: 15px; color: #447281; } -.news-title { font-family: "Source Sans Pro", "Helvetica Neue", sans-serif; font-size: 18px; font-weight: 400; line-height: 20px; margin-bottom: 4px;} -.news-title a { color: #447281; } -.news-title a:hover { color: #15A9CE; text-decoration: none; } - -.footer { padding-top: 15px; clear: both; width: 100%; color: #ffffff; background: #15A9CE; } -.footer ul { float: left; margin: 0; padding: 10px 2% 20px 0; -webkit-box-sizing: content-box; -moz-box-sizing: content-box; box-sizing: content-box; width: 22%; list-style: none; } -.footer a {text-decoration: none;color: #ffffff; font-size: 12px;} -.footer a:hover {text-decoration: underline;} -.footer ul:last-child { padding-right: 0; } -.footer ul li a { text-decoration: none; color: #ffffff; font-size: 12px; } -.footer ul li a:hover { text-decoration: underline; } -.footer ul li h5 { color: #ffffff; margin-bottom: 10px; padding-bottom: 10px; line-height: 20px; border-bottom:1px solid rgba(255,255,255,0.5); } -.footer ul li h5 a { font-size: 14px; opacity: 1;} -.footer .copyright { font-size: 12px; border-top:1px solid rgba(255,255,255,0.5); clear: both; padding: 10px 0 20px; } -.footer .license { float: right; font-size: 12px; } diff --git a/akka-docs-dev/_sphinx/themes/akka/static/contentsFix.js b/akka-docs-dev/_sphinx/themes/akka/static/contentsFix.js deleted file mode 100644 index e729663ce9..0000000000 --- a/akka-docs-dev/_sphinx/themes/akka/static/contentsFix.js +++ /dev/null @@ -1,10 +0,0 @@ -jQuery(document).ready(function($) { - - $("#toc ul").each(function(){ - var elem = $(this); - if (elem.children().length == 0) { - $(".contents-title").css("display","none"); - } - }); - -}); \ No newline at end of file diff --git a/akka-docs-dev/_sphinx/themes/akka/static/docs.css b/akka-docs-dev/_sphinx/themes/akka/static/docs.css deleted file mode 100644 index e1c5d9f041..0000000000 --- a/akka-docs-dev/_sphinx/themes/akka/static/docs.css +++ /dev/null @@ -1,222 +0,0 @@ -body { position: relative; color: #0B5567;} -a { color: #15A9CE; } -a:hover { color: #15A9CE; text-decoration: underline; } -.navbar { margin-bottom: 18px; } -.navbar-logo { visibility: visible; float: left; padding-top: 0px; } -.main { position: relative; height: auto; margin-top: -18px; overflow: auto; background: #15A9CE;} -.page-title { position: relative; top: 24px; font-family: 'Source Sans Pro', sans-serif; font-size: 24px; font-weight: 400; color: rgba(255, 255, 255, 1); width: 840px;} -.main-container { background: #ffffff; min-height: 600px; padding-top: 20px; margin-top: 28px; } -.pdf-link { float: right; height: 40px; margin-bottom: -15px; margin-top: -5px; } -.breadcrumb { height: 18px; } -.breadcrumb li { float: right; } -.breadcrumb li a { color: #447281; } -.breadcrumb li a:hover { color: #15A9CE; text-decoration: none; } -.breadcrumb li:last-child { float: left; font-weight: bold; } -.contents-title { font-weight: bold; font-size: 18px; line-height: 27px; margin-bottom: 6px; color: #0d2428; text-shadow:0 1px 0 #f0fafc; } -div#toc { margin-left: -16px; } -div#toc ul { list-style: none; margin: 0 0 5px 16px; } -div#toc ul li { padding-bottom: 8px; line-height: 105%; font-weight: bold; width: 100%; } -div#toc ul ul { list-style: disc; } -div#toc ul ul ul { list-style: square; } -div#toc ul li ul li { padding-bottom: 0px; line-height: 18px; font-weight: normal; } -div#scroller-anchor { width: inherit; } -div#scroller { width: inherit; } -p { padding-top: 4px; font-size: 14px; } -h1 {color: #15A9CE; } -h2 { padding-top: 14px; padding-bottom: 4px; margin-bottom: 2px; border-bottom: solid 1px rgba(0, 0, 0, 0.15); color: #0B5567; } -h2 a { color: #0B5567; } -h2 a:hover { color: #447281; } -h2 .pre { font-size: 20px; } -h3 { padding-top: 10px; color: #0B5567; } -h3 a { color: #0B5567; } -h3 a:hover { } -h3 .pre { font-size: 16px; } -h4 { padding-top: 6px; font-size: 16px; } -h4 a { color: #0B5567; } -h4 a:hover { text-shadow:0 2px 0 #0B5567; } -h5 { text-transform: uppercase; font-size: 14px; padding-top: 6px; color: #0B5567;} -strong {color: #0B5567; } -/*.footer-bg { overflow: auto; background:url('{{ site.baseurl }}/resources/images/dark-blue-bg.png') repeat; height: 100%; }*/ - -.toctree-l1 { font-weight: bold; font-size: 14px; padding-top: 4px;} -.toctree-l1 a { color: #15384e; } -.toctree-l1 a:hover { color: #15A9CE; text-decoration: none; } -.toctree-l2 { font-weight: normal; list-style: square; } -.toctree-l2 a { color: #447281; } -.toctree-l2 a:hover { color: #15A9CE; text-decoration: none; } - -.topic-title { - color: rgba(0, 0, 0, 0.6); - text-shadow: 0 1px 0 rgba(255, 255, 255, .7); - margin-bottom: 6px; - font-size: 24px; - font-weight: bold; - line-height: 36px; -} - -.admonition { - background-image: none; - background-color: #fdf5d9; - filter: progid:DXImageTransform.Microsoft.gradient(enabled = false); - padding: 14px; - border-color: #73cbe2; - -webkit-box-shadow: none; - -moz-box-shadow: none; - box-shadow: none; - margin-bottom: 18px; - position: relative; - padding: 7px 15px; - color: #ffffff; - background-repeat: repeat-x; - background-image: -khtml-gradient(linear, left top, left bottom, from(#73cbe2), to(#15a9ce)); - background-image: -moz-linear-gradient(top, #73cbe2, #15a9ce); - background-image: -ms-linear-gradient(top, #73cbe2, #15a9ce); - background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #73cbe2), color-stop(100%, #15a9ce)); - background-image: -webkit-linear-gradient(top, #73cbe2, #15a9ce); - background-image: -o-linear-gradient(top, #73cbe2, #15a9ce); - background-image: linear-gradient(top, #73cbe2, #15a9ce); - filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#73cbe2', endColorstr='#15a9ce', GradientType=0); - border-color: #15a9ce #15a9ce #E4C652; - border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); - border-width: 1px; - border-style: solid; - -webkit-border-radius: 4px; - -moz-border-radius: 4px; - border-radius: 4px; - -webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.25); - -moz-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.25); - box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.25); -} - -.warning { - background-image: none; - background-color: #e25758; - filter: progid:DXImageTransform.Microsoft.gradient(enabled = false); - padding: 14px; - border-color: #f06565; - -webkit-box-shadow: none; - -moz-box-shadow: none; - box-shadow: none; - margin-bottom: 18px; - position: relative; - padding: 7px 15px; - color: #ffffff; - background-repeat: repeat-x; - background-image: -khtml-gradient(linear, left top, left bottom, from(#f06565), to(#e25758)); - background-image: -moz-linear-gradient(top, #f06565, #e25758); - background-image: -ms-linear-gradient(top, #f06565, #e25758); - background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #f06565), color-stop(100%, #e25758)); - background-image: -webkit-linear-gradient(top, #f06565, #e25758); - background-image: -o-linear-gradient(top, #f06565, #e25758); - background-image: linear-gradient(top, #f06565, #e25758); - filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#f06565', endColorstr='#e25758', GradientType=0); - border-color: #15a9ce #e25758 #E4C652; - border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); - border-width: 1px; - border-style: solid; - -webkit-border-radius: 4px; - -moz-border-radius: 4px; - border-radius: 4px; - -webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.25); - -moz-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.25); - box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.25); -} -.admonition a { - color: #0B5567; -} -.admonition a:hover { - text-decoration: underline; -} -.admonition p.admonition-title { - color: #ffffff; - margin-bottom: 6px; - font-size: 16px; - font-weight: bold; - line-height: 20px; -} - -.topic { - background-image: none; - background-color: #fdf5d9; - filter: progid:DXImageTransform.Microsoft.gradient(enabled = false); - padding: 14px; - border-color: #def1f4; - -webkit-box-shadow: none; - -moz-box-shadow: none; - box-shadow: none; - margin-bottom: 18px; - position: relative; - padding: 7px 15px; - color: #404040; - background-repeat: repeat-x; - background-image: -khtml-gradient(linear, left top, left bottom, from(#def1f4), to(#c1dfe6)); - background-image: -moz-linear-gradient(top, #def1f4, #c1dfe6); - background-image: -ms-linear-gradient(top, #def1f4, #c1dfe6); - background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #def1f4), color-stop(100%, #c1dfe6)); - background-image: -webkit-linear-gradient(top, #def1f4, #c1dfe6); - background-image: -o-linear-gradient(top, #def1f4, #c1dfe6); - background-image: linear-gradient(top, #def1f4, #c1dfe6); - filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#def1f4', endColorstr='#c1dfe6', GradientType=0); - text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); - border-color: #c1dfe6 #c1dfe6 #E4C652; - border-color: rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25); - border-width: 1px; - border-style: solid; - -webkit-border-radius: 4px; - -moz-border-radius: 4px; - border-radius: 4px; - -webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.25); - -moz-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.25); - box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.25); -} - -.pre { padding: 1px 2px; color: #008FA9; background-color: #EFF2F5; border: 1px solid #DDDEDF; font-family: Menlo, Monaco, "Courier New", monospace; font-size: 12px; -webkit-border-radius: 3px; -moz-border-radius: 3px; border-radius: 3px; } -.footer h5 { text-transform: none; } - - -.section-marker { position: absolute; width: 1em; margin-left: -1em; display: block; text-decoration: none; visibility: hidden; text-align: center; font-weight: normal; } -.section-marker:hover { text-decoration: none; } -.section h2:hover > a,.section h3:hover > a,.section h4:hover > a,.section h5:hover > a { visibility: visible; } - - -/* - * Used when browsing 2.3.12 yet 2.4.x is out already. - * This is more critical than browsing 2.3.10 and 2.3.11 is latest (the default color). - */ -#floaty-warning .warning { - background-color: rgb(227, 88, 89); -} - -#floaty-warning a { - color: white !important; - font-weight: bold; - text-decoration: underline; -} - -#floaty-warning button { - border-radius: 4px; - border: none; - margin: 0; - padding: 1em 2em; - font-weight: bold; - color: white; -} - -#floaty-warning button { - background-color: #89CDDE; -} -#floaty-warning button:hover { - background-color: #74DDF7; -} - -#floaty-warning.warning button { - background-color: #F98D8D; -} -#floaty-warning.warning button:hover { - background-color: #F77979; -} - -#close-floaty-window { - padding: 1em; - margin-top: 1em; -} \ No newline at end of file diff --git a/akka-docs-dev/_sphinx/themes/akka/static/effects.core.js b/akka-docs-dev/_sphinx/themes/akka/static/effects.core.js deleted file mode 100644 index ed4fd37741..0000000000 --- a/akka-docs-dev/_sphinx/themes/akka/static/effects.core.js +++ /dev/null @@ -1,509 +0,0 @@ -/* - * jQuery UI Effects 1.5.3 - * - * Copyright (c) 2008 Aaron Eisenberger (aaronchi@gmail.com) - * Dual licensed under the MIT (MIT-LICENSE.txt) - * and GPL (GPL-LICENSE.txt) licenses. - * - * http://docs.jquery.com/UI/Effects/ - */ -;(function($) { - -$.effects = $.effects || {}; //Add the 'effects' scope - -$.extend($.effects, { - save: function(el, set) { - for(var i=0;i'); - var wrapper = el.parent(); - if (el.css('position') == 'static'){ - wrapper.css({position: 'relative'}); - el.css({position: 'relative'}); - } else { - var top = el.css('top'); if(isNaN(parseInt(top))) top = 'auto'; - var left = el.css('left'); if(isNaN(parseInt(left))) left = 'auto'; - wrapper.css({ position: el.css('position'), top: top, left: left, zIndex: el.css('z-index') }).show(); - el.css({position: 'relative', top:0, left:0}); - } - wrapper.css(props); - return wrapper; - }, - removeWrapper: function(el) { - if (el.parent().attr('id') == 'fxWrapper') - return el.parent().replaceWith(el); - return el; - }, - setTransition: function(el, list, factor, val) { - val = val || {}; - $.each(list,function(i, x){ - unit = el.cssUnit(x); - if (unit[0] > 0) val[x] = unit[0] * factor + unit[1]; - }); - return val; - }, - animateClass: function(value, duration, easing, callback) { - - var cb = (typeof easing == "function" ? easing : (callback ? callback : null)); - var ea = (typeof easing == "object" ? easing : null); - - return this.each(function() { - - var offset = {}; var that = $(this); var oldStyleAttr = that.attr("style") || ''; - if(typeof oldStyleAttr == 'object') oldStyleAttr = oldStyleAttr["cssText"]; /* Stupidly in IE, style is a object.. */ - if(value.toggle) { that.hasClass(value.toggle) ? value.remove = value.toggle : value.add = value.toggle; } - - //Let's get a style offset - var oldStyle = $.extend({}, (document.defaultView ? document.defaultView.getComputedStyle(this,null) : this.currentStyle)); - if(value.add) that.addClass(value.add); if(value.remove) that.removeClass(value.remove); - var newStyle = $.extend({}, (document.defaultView ? document.defaultView.getComputedStyle(this,null) : this.currentStyle)); - if(value.add) that.removeClass(value.add); if(value.remove) that.addClass(value.remove); - - // The main function to form the object for animation - for(var n in newStyle) { - if( typeof newStyle[n] != "function" && newStyle[n] /* No functions and null properties */ - && n.indexOf("Moz") == -1 && n.indexOf("length") == -1 /* No mozilla spezific render properties. */ - && newStyle[n] != oldStyle[n] /* Only values that have changed are used for the animation */ - && (n.match(/color/i) || (!n.match(/color/i) && !isNaN(parseInt(newStyle[n],10)))) /* Only things that can be parsed to integers or colors */ - && (oldStyle.position != "static" || (oldStyle.position == "static" && !n.match(/left|top|bottom|right/))) /* No need for positions when dealing with static positions */ - ) offset[n] = newStyle[n]; - } - - that.animate(offset, duration, ea, function() { // Animate the newly constructed offset object - // Change style attribute back to original. For stupid IE, we need to clear the damn object. - if(typeof $(this).attr("style") == 'object') { $(this).attr("style")["cssText"] = ""; $(this).attr("style")["cssText"] = oldStyleAttr; } else $(this).attr("style", oldStyleAttr); - if(value.add) $(this).addClass(value.add); if(value.remove) $(this).removeClass(value.remove); - if(cb) cb.apply(this, arguments); - }); - - }); - } -}); - -//Extend the methods of jQuery -$.fn.extend({ - //Save old methods - _show: $.fn.show, - _hide: $.fn.hide, - __toggle: $.fn.toggle, - _addClass: $.fn.addClass, - _removeClass: $.fn.removeClass, - _toggleClass: $.fn.toggleClass, - // New ec methods - effect: function(fx,o,speed,callback) { - return $.effects[fx] ? $.effects[fx].call(this, {method: fx, options: o || {}, duration: speed, callback: callback }) : null; - }, - show: function() { - if(!arguments[0] || (arguments[0].constructor == Number || /(slow|normal|fast)/.test(arguments[0]))) - return this._show.apply(this, arguments); - else { - var o = arguments[1] || {}; o['mode'] = 'show'; - return this.effect.apply(this, [arguments[0], o, arguments[2] || o.duration, arguments[3] || o.callback]); - } - }, - hide: function() { - if(!arguments[0] || (arguments[0].constructor == Number || /(slow|normal|fast)/.test(arguments[0]))) - return this._hide.apply(this, arguments); - else { - var o = arguments[1] || {}; o['mode'] = 'hide'; - return this.effect.apply(this, [arguments[0], o, arguments[2] || o.duration, arguments[3] || o.callback]); - } - }, - toggle: function(){ - if(!arguments[0] || (arguments[0].constructor == Number || /(slow|normal|fast)/.test(arguments[0])) || (arguments[0].constructor == Function)) - return this.__toggle.apply(this, arguments); - else { - var o = arguments[1] || {}; o['mode'] = 'toggle'; - return this.effect.apply(this, [arguments[0], o, arguments[2] || o.duration, arguments[3] || o.callback]); - } - }, - addClass: function(classNames,speed,easing,callback) { - return speed ? $.effects.animateClass.apply(this, [{ add: classNames },speed,easing,callback]) : this._addClass(classNames); - }, - removeClass: function(classNames,speed,easing,callback) { - return speed ? $.effects.animateClass.apply(this, [{ remove: classNames },speed,easing,callback]) : this._removeClass(classNames); - }, - toggleClass: function(classNames,speed,easing,callback) { - return speed ? $.effects.animateClass.apply(this, [{ toggle: classNames },speed,easing,callback]) : this._toggleClass(classNames); - }, - morph: function(remove,add,speed,easing,callback) { - return $.effects.animateClass.apply(this, [{ add: add, remove: remove },speed,easing,callback]); - }, - switchClass: function() { - return this.morph.apply(this, arguments); - }, - // helper functions - cssUnit: function(key) { - var style = this.css(key), val = []; - $.each( ['em','px','%','pt'], function(i, unit){ - if(style.indexOf(unit) > 0) - val = [parseFloat(style), unit]; - }); - return val; - } -}); - -/* - * jQuery Color Animations - * Copyright 2007 John Resig - * Released under the MIT and GPL licenses. - */ - -// We override the animation for all of these color styles -jQuery.each(['backgroundColor', 'borderBottomColor', 'borderLeftColor', 'borderRightColor', 'borderTopColor', 'color', 'outlineColor'], function(i,attr){ - jQuery.fx.step[attr] = function(fx){ - if ( fx.state == 0 ) { - fx.start = getColor( fx.elem, attr ); - fx.end = getRGB( fx.end ); - } - - fx.elem.style[attr] = "rgb(" + [ - Math.max(Math.min( parseInt((fx.pos * (fx.end[0] - fx.start[0])) + fx.start[0]), 255), 0), - Math.max(Math.min( parseInt((fx.pos * (fx.end[1] - fx.start[1])) + fx.start[1]), 255), 0), - Math.max(Math.min( parseInt((fx.pos * (fx.end[2] - fx.start[2])) + fx.start[2]), 255), 0) - ].join(",") + ")"; - } -}); - -// Color Conversion functions from highlightFade -// By Blair Mitchelmore -// http://jquery.offput.ca/highlightFade/ - -// Parse strings looking for color tuples [255,255,255] -function getRGB(color) { - var result; - - // Check if we're already dealing with an array of colors - if ( color && color.constructor == Array && color.length == 3 ) - return color; - - // Look for rgb(num,num,num) - if (result = /rgb\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*\)/.exec(color)) - return [parseInt(result[1]), parseInt(result[2]), parseInt(result[3])]; - - // Look for rgb(num%,num%,num%) - if (result = /rgb\(\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*\)/.exec(color)) - return [parseFloat(result[1])*2.55, parseFloat(result[2])*2.55, parseFloat(result[3])*2.55]; - - // Look for #a0b1c2 - if (result = /#([a-fA-F0-9]{2})([a-fA-F0-9]{2})([a-fA-F0-9]{2})/.exec(color)) - return [parseInt(result[1],16), parseInt(result[2],16), parseInt(result[3],16)]; - - // Look for #fff - if (result = /#([a-fA-F0-9])([a-fA-F0-9])([a-fA-F0-9])/.exec(color)) - return [parseInt(result[1]+result[1],16), parseInt(result[2]+result[2],16), parseInt(result[3]+result[3],16)]; - - // Look for rgba(0, 0, 0, 0) == transparent in Safari 3 - if (result = /rgba\(0, 0, 0, 0\)/.exec(color)) - return colors['transparent'] - - // Otherwise, we're most likely dealing with a named color - return colors[jQuery.trim(color).toLowerCase()]; -} - -function getColor(elem, attr) { - var color; - - do { - color = jQuery.curCSS(elem, attr); - - // Keep going until we find an element that has color, or we hit the body - if ( color != '' && color != 'transparent' || jQuery.nodeName(elem, "body") ) - break; - - attr = "backgroundColor"; - } while ( elem = elem.parentNode ); - - return getRGB(color); -}; - -// Some named colors to work with -// From Interface by Stefan Petre -// http://interface.eyecon.ro/ - -var colors = { - aqua:[0,255,255], - azure:[240,255,255], - beige:[245,245,220], - black:[0,0,0], - blue:[0,0,255], - brown:[165,42,42], - cyan:[0,255,255], - darkblue:[0,0,139], - darkcyan:[0,139,139], - darkgrey:[169,169,169], - darkgreen:[0,100,0], - darkkhaki:[189,183,107], - darkmagenta:[139,0,139], - darkolivegreen:[85,107,47], - darkorange:[255,140,0], - darkorchid:[153,50,204], - darkred:[139,0,0], - darksalmon:[233,150,122], - darkviolet:[148,0,211], - fuchsia:[255,0,255], - gold:[255,215,0], - green:[0,128,0], - indigo:[75,0,130], - khaki:[240,230,140], - lightblue:[173,216,230], - lightcyan:[224,255,255], - lightgreen:[144,238,144], - lightgrey:[211,211,211], - lightpink:[255,182,193], - lightyellow:[255,255,224], - lime:[0,255,0], - magenta:[255,0,255], - maroon:[128,0,0], - navy:[0,0,128], - olive:[128,128,0], - orange:[255,165,0], - pink:[255,192,203], - purple:[128,0,128], - violet:[128,0,128], - red:[255,0,0], - silver:[192,192,192], - white:[255,255,255], - yellow:[255,255,0], - transparent: [255,255,255] -}; - -/* - * jQuery Easing v1.3 - http://gsgd.co.uk/sandbox/jquery/easing/ - * - * Uses the built in easing capabilities added In jQuery 1.1 - * to offer multiple easing options - * - * TERMS OF USE - jQuery Easing - * - * Open source under the BSD License. - * - * Copyright © 2008 George McGinley Smith - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without modification, - * are permitted provided that the following conditions are met: - * - * Redistributions of source code must retain the above copyright notice, this list of - * conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright notice, this list - * of conditions and the following disclaimer in the documentation and/or other materials - * provided with the distribution. - * - * Neither the name of the author nor the names of contributors may be used to endorse - * or promote products derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY - * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, - * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE - * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING - * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED - * OF THE POSSIBILITY OF SUCH DAMAGE. - * -*/ - -// t: current time, b: begInnIng value, c: change In value, d: duration -jQuery.easing['jswing'] = jQuery.easing['swing']; - -jQuery.extend( jQuery.easing, -{ - def: 'easeOutQuad', - swing: function (x, t, b, c, d) { - //alert(jQuery.easing.default); - return jQuery.easing[jQuery.easing.def](x, t, b, c, d); - }, - easeInQuad: function (x, t, b, c, d) { - return c*(t/=d)*t + b; - }, - easeOutQuad: function (x, t, b, c, d) { - return -c *(t/=d)*(t-2) + b; - }, - easeInOutQuad: function (x, t, b, c, d) { - if ((t/=d/2) < 1) return c/2*t*t + b; - return -c/2 * ((--t)*(t-2) - 1) + b; - }, - easeInCubic: function (x, t, b, c, d) { - return c*(t/=d)*t*t + b; - }, - easeOutCubic: function (x, t, b, c, d) { - return c*((t=t/d-1)*t*t + 1) + b; - }, - easeInOutCubic: function (x, t, b, c, d) { - if ((t/=d/2) < 1) return c/2*t*t*t + b; - return c/2*((t-=2)*t*t + 2) + b; - }, - easeInQuart: function (x, t, b, c, d) { - return c*(t/=d)*t*t*t + b; - }, - easeOutQuart: function (x, t, b, c, d) { - return -c * ((t=t/d-1)*t*t*t - 1) + b; - }, - easeInOutQuart: function (x, t, b, c, d) { - if ((t/=d/2) < 1) return c/2*t*t*t*t + b; - return -c/2 * ((t-=2)*t*t*t - 2) + b; - }, - easeInQuint: function (x, t, b, c, d) { - return c*(t/=d)*t*t*t*t + b; - }, - easeOutQuint: function (x, t, b, c, d) { - return c*((t=t/d-1)*t*t*t*t + 1) + b; - }, - easeInOutQuint: function (x, t, b, c, d) { - if ((t/=d/2) < 1) return c/2*t*t*t*t*t + b; - return c/2*((t-=2)*t*t*t*t + 2) + b; - }, - easeInSine: function (x, t, b, c, d) { - return -c * Math.cos(t/d * (Math.PI/2)) + c + b; - }, - easeOutSine: function (x, t, b, c, d) { - return c * Math.sin(t/d * (Math.PI/2)) + b; - }, - easeInOutSine: function (x, t, b, c, d) { - return -c/2 * (Math.cos(Math.PI*t/d) - 1) + b; - }, - easeInExpo: function (x, t, b, c, d) { - return (t==0) ? b : c * Math.pow(2, 10 * (t/d - 1)) + b; - }, - easeOutExpo: function (x, t, b, c, d) { - return (t==d) ? b+c : c * (-Math.pow(2, -10 * t/d) + 1) + b; - }, - easeInOutExpo: function (x, t, b, c, d) { - if (t==0) return b; - if (t==d) return b+c; - if ((t/=d/2) < 1) return c/2 * Math.pow(2, 10 * (t - 1)) + b; - return c/2 * (-Math.pow(2, -10 * --t) + 2) + b; - }, - easeInCirc: function (x, t, b, c, d) { - return -c * (Math.sqrt(1 - (t/=d)*t) - 1) + b; - }, - easeOutCirc: function (x, t, b, c, d) { - return c * Math.sqrt(1 - (t=t/d-1)*t) + b; - }, - easeInOutCirc: function (x, t, b, c, d) { - if ((t/=d/2) < 1) return -c/2 * (Math.sqrt(1 - t*t) - 1) + b; - return c/2 * (Math.sqrt(1 - (t-=2)*t) + 1) + b; - }, - easeInElastic: function (x, t, b, c, d) { - var s=1.70158;var p=0;var a=c; - if (t==0) return b; if ((t/=d)==1) return b+c; if (!p) p=d*.3; - if (a < Math.abs(c)) { a=c; var s=p/4; } - else var s = p/(2*Math.PI) * Math.asin (c/a); - return -(a*Math.pow(2,10*(t-=1)) * Math.sin( (t*d-s)*(2*Math.PI)/p )) + b; - }, - easeOutElastic: function (x, t, b, c, d) { - var s=1.70158;var p=0;var a=c; - if (t==0) return b; if ((t/=d)==1) return b+c; if (!p) p=d*.3; - if (a < Math.abs(c)) { a=c; var s=p/4; } - else var s = p/(2*Math.PI) * Math.asin (c/a); - return a*Math.pow(2,-10*t) * Math.sin( (t*d-s)*(2*Math.PI)/p ) + c + b; - }, - easeInOutElastic: function (x, t, b, c, d) { - var s=1.70158;var p=0;var a=c; - if (t==0) return b; if ((t/=d/2)==2) return b+c; if (!p) p=d*(.3*1.5); - if (a < Math.abs(c)) { a=c; var s=p/4; } - else var s = p/(2*Math.PI) * Math.asin (c/a); - if (t < 1) return -.5*(a*Math.pow(2,10*(t-=1)) * Math.sin( (t*d-s)*(2*Math.PI)/p )) + b; - return a*Math.pow(2,-10*(t-=1)) * Math.sin( (t*d-s)*(2*Math.PI)/p )*.5 + c + b; - }, - easeInBack: function (x, t, b, c, d, s) { - if (s == undefined) s = 1.70158; - return c*(t/=d)*t*((s+1)*t - s) + b; - }, - easeOutBack: function (x, t, b, c, d, s) { - if (s == undefined) s = 1.70158; - return c*((t=t/d-1)*t*((s+1)*t + s) + 1) + b; - }, - easeInOutBack: function (x, t, b, c, d, s) { - if (s == undefined) s = 1.70158; - if ((t/=d/2) < 1) return c/2*(t*t*(((s*=(1.525))+1)*t - s)) + b; - return c/2*((t-=2)*t*(((s*=(1.525))+1)*t + s) + 2) + b; - }, - easeInBounce: function (x, t, b, c, d) { - return c - jQuery.easing.easeOutBounce (x, d-t, 0, c, d) + b; - }, - easeOutBounce: function (x, t, b, c, d) { - if ((t/=d) < (1/2.75)) { - return c*(7.5625*t*t) + b; - } else if (t < (2/2.75)) { - return c*(7.5625*(t-=(1.5/2.75))*t + .75) + b; - } else if (t < (2.5/2.75)) { - return c*(7.5625*(t-=(2.25/2.75))*t + .9375) + b; - } else { - return c*(7.5625*(t-=(2.625/2.75))*t + .984375) + b; - } - }, - easeInOutBounce: function (x, t, b, c, d) { - if (t < d/2) return jQuery.easing.easeInBounce (x, t*2, 0, c, d) * .5 + b; - return jQuery.easing.easeOutBounce (x, t*2-d, 0, c, d) * .5 + c*.5 + b; - } -}); - -/* - * - * TERMS OF USE - EASING EQUATIONS - * - * Open source under the BSD License. - * - * Copyright © 2001 Robert Penner - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without modification, - * are permitted provided that the following conditions are met: - * - * Redistributions of source code must retain the above copyright notice, this list of - * conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright notice, this list - * of conditions and the following disclaimer in the documentation and/or other materials - * provided with the distribution. - * - * Neither the name of the author nor the names of contributors may be used to endorse - * or promote products derived from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY - * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, - * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE - * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING - * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED - * OF THE POSSIBILITY OF SUCH DAMAGE. - * - */ - -})(jQuery); \ No newline at end of file diff --git a/akka-docs-dev/_sphinx/themes/akka/static/effects.highlight.js b/akka-docs-dev/_sphinx/themes/akka/static/effects.highlight.js deleted file mode 100644 index c9c332522b..0000000000 --- a/akka-docs-dev/_sphinx/themes/akka/static/effects.highlight.js +++ /dev/null @@ -1,48 +0,0 @@ -/* - * jQuery UI Effects Highlight @VERSION - * - * Copyright (c) 2008 Aaron Eisenberger (aaronchi@gmail.com) - * Dual licensed under the MIT (MIT-LICENSE.txt) - * and GPL (GPL-LICENSE.txt) licenses. - * - * http://docs.jquery.com/UI/Effects/Highlight - * - * Depends: - * effects.core.js - */ -(function($) { - -$.effects.highlight = function(o) { - - return this.queue(function() { - - // Create element - var el = $(this), props = ['backgroundImage','backgroundColor','opacity']; - - // Set options - var mode = $.effects.setMode(el, o.options.mode || 'show'); // Set Mode - var color = o.options.color || "#ffff99"; // Default highlight color - var oldColor = "#f2f2eb"; - - // Adjust - $.effects.save(el, props); el.show(); // Save & Show - el.css({backgroundImage: 'none', backgroundColor: color}); // Shift - - // Animation - var animation = {backgroundColor: oldColor }; - if (mode == "hide") animation['opacity'] = 0; - - // Animate - el.animate(animation, { queue: false, duration: o.duration, easing: o.options.easing, complete: function() { - if(mode == "hide") el.hide(); - $.effects.restore(el, props); - if (mode == "show" && jQuery.browser.msie) this.style.removeAttribute('filter'); - if(o.callback) o.callback.apply(this, arguments); - el.dequeue(); - }}); - - }); - -}; - -})(jQuery); \ No newline at end of file diff --git a/akka-docs-dev/_sphinx/themes/akka/static/ga.js b/akka-docs-dev/_sphinx/themes/akka/static/ga.js deleted file mode 100644 index 730ea9160c..0000000000 --- a/akka-docs-dev/_sphinx/themes/akka/static/ga.js +++ /dev/null @@ -1,43 +0,0 @@ -// check to see if this document is on the akka.io server. If so, google analytics and marketo -if (/akka\.io/.test(document.domain)) { - var _gaq = _gaq || []; - _gaq.push(['_setAccount', 'UA-21117439-1']); - _gaq.push(['_setDomainName', 'akka.io']); - _gaq.push(['_trackPageview']); - - (function() { - var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true; - ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js'; - var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s); - })(); - - (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){ - (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o), - m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) - })(window,document,'script','//www.google-analytics.com/analytics.js','ga'); - ga('create', 'UA-23127719-1', 'typesafe.com', {'allowLinker': true, 'name': 'tsTracker'}); - ga('tsTracker.require', 'linker'); - ga('tsTracker.linker:autoLink', ['typesafe.com','playframework.com','scala-lang.org','scaladays.org','spray.io','akka.io','scala-sbt.org','scala-ide.org']); - ga('tsTracker.send', 'pageview'); - - (function() { - var didInit = false; - function initMunchkin() { - if(didInit === false) { - didInit = true; - Munchkin.init('558-NCX-702'); - } - } - var s = document.createElement('script'); - s.type = 'text/javascript'; - s.async = true; - s.src = '//munchkin.marketo.net/munchkin.js'; - s.onreadystatechange = function() { - if (this.readyState == 'complete' || this.readyState == 'loaded') { - initMunchkin(); - } - }; - s.onload = initMunchkin; - document.getElementsByTagName('head')[0].appendChild(s); - })(); -} \ No newline at end of file diff --git a/akka-docs-dev/_sphinx/themes/akka/static/highlightCode.js b/akka-docs-dev/_sphinx/themes/akka/static/highlightCode.js deleted file mode 100644 index 401b9ec471..0000000000 --- a/akka-docs-dev/_sphinx/themes/akka/static/highlightCode.js +++ /dev/null @@ -1,13 +0,0 @@ -jQuery(document).ready(function($) { - if (typeof disableStyleCode != "undefined") { - return; - } - var a = false; - $("pre").each(function() { - if (!$(this).hasClass("prettyprint")) { - $(this).addClass("prettyprint lang-scala linenums"); - a = true - } - }); - if (a) { prettyPrint() } -}); diff --git a/akka-docs-dev/_sphinx/themes/akka/static/jquery.js b/akka-docs-dev/_sphinx/themes/akka/static/jquery.js deleted file mode 100644 index ee0233703d..0000000000 --- a/akka-docs-dev/_sphinx/themes/akka/static/jquery.js +++ /dev/null @@ -1,4 +0,0 @@ -/*! jQuery v1.7.1 jquery.com | jquery.org/license */ -(function(a,b){function cy(a){return f.isWindow(a)?a:a.nodeType===9?a.defaultView||a.parentWindow:!1}function cv(a){if(!ck[a]){var b=c.body,d=f("<"+a+">").appendTo(b),e=d.css("display");d.remove();if(e==="none"||e===""){cl||(cl=c.createElement("iframe"),cl.frameBorder=cl.width=cl.height=0),b.appendChild(cl);if(!cm||!cl.createElement)cm=(cl.contentWindow||cl.contentDocument).document,cm.write((c.compatMode==="CSS1Compat"?"":"")+""),cm.close();d=cm.createElement(a),cm.body.appendChild(d),e=f.css(d,"display"),b.removeChild(cl)}ck[a]=e}return ck[a]}function cu(a,b){var c={};f.each(cq.concat.apply([],cq.slice(0,b)),function(){c[this]=a});return c}function ct(){cr=b}function cs(){setTimeout(ct,0);return cr=f.now()}function cj(){try{return new a.ActiveXObject("Microsoft.XMLHTTP")}catch(b){}}function ci(){try{return new a.XMLHttpRequest}catch(b){}}function cc(a,c){a.dataFilter&&(c=a.dataFilter(c,a.dataType));var d=a.dataTypes,e={},g,h,i=d.length,j,k=d[0],l,m,n,o,p;for(g=1;g0){if(c!=="border")for(;g=0===c})}function S(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function K(){return!0}function J(){return!1}function n(a,b,c){var d=b+"defer",e=b+"queue",g=b+"mark",h=f._data(a,d);h&&(c==="queue"||!f._data(a,e))&&(c==="mark"||!f._data(a,g))&&setTimeout(function(){!f._data(a,e)&&!f._data(a,g)&&(f.removeData(a,d,!0),h.fire())},0)}function m(a){for(var b in a){if(b==="data"&&f.isEmptyObject(a[b]))continue;if(b!=="toJSON")return!1}return!0}function l(a,c,d){if(d===b&&a.nodeType===1){var e="data-"+c.replace(k,"-$1").toLowerCase();d=a.getAttribute(e);if(typeof d=="string"){try{d=d==="true"?!0:d==="false"?!1:d==="null"?null:f.isNumeric(d)?parseFloat(d):j.test(d)?f.parseJSON(d):d}catch(g){}f.data(a,c,d)}else d=b}return d}function h(a){var b=g[a]={},c,d;a=a.split(/\s+/);for(c=0,d=a.length;c)[^>]*$|#([\w\-]*)$)/,j=/\S/,k=/^\s+/,l=/\s+$/,m=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,n=/^[\],:{}\s]*$/,o=/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,p=/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,q=/(?:^|:|,)(?:\s*\[)+/g,r=/(webkit)[ \/]([\w.]+)/,s=/(opera)(?:.*version)?[ \/]([\w.]+)/,t=/(msie) ([\w.]+)/,u=/(mozilla)(?:.*? rv:([\w.]+))?/,v=/-([a-z]|[0-9])/ig,w=/^-ms-/,x=function(a,b){return(b+"").toUpperCase()},y=d.userAgent,z,A,B,C=Object.prototype.toString,D=Object.prototype.hasOwnProperty,E=Array.prototype.push,F=Array.prototype.slice,G=String.prototype.trim,H=Array.prototype.indexOf,I={};e.fn=e.prototype={constructor:e,init:function(a,d,f){var g,h,j,k;if(!a)return this;if(a.nodeType){this.context=this[0]=a,this.length=1;return this}if(a==="body"&&!d&&c.body){this.context=c,this[0]=c.body,this.selector=a,this.length=1;return this}if(typeof a=="string"){a.charAt(0)!=="<"||a.charAt(a.length-1)!==">"||a.length<3?g=i.exec(a):g=[null,a,null];if(g&&(g[1]||!d)){if(g[1]){d=d instanceof e?d[0]:d,k=d?d.ownerDocument||d:c,j=m.exec(a),j?e.isPlainObject(d)?(a=[c.createElement(j[1])],e.fn.attr.call(a,d,!0)):a=[k.createElement(j[1])]:(j=e.buildFragment([g[1]],[k]),a=(j.cacheable?e.clone(j.fragment):j.fragment).childNodes);return e.merge(this,a)}h=c.getElementById(g[2]);if(h&&h.parentNode){if(h.id!==g[2])return f.find(a);this.length=1,this[0]=h}this.context=c,this.selector=a;return this}return!d||d.jquery?(d||f).find(a):this.constructor(d).find(a)}if(e.isFunction(a))return f.ready(a);a.selector!==b&&(this.selector=a.selector,this.context=a.context);return e.makeArray(a,this)},selector:"",jquery:"1.7.1",length:0,size:function(){return this.length},toArray:function(){return F.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this[this.length+a]:this[a]},pushStack:function(a,b,c){var d=this.constructor();e.isArray(a)?E.apply(d,a):e.merge(d,a),d.prevObject=this,d.context=this.context,b==="find"?d.selector=this.selector+(this.selector?" ":"")+c:b&&(d.selector=this.selector+"."+b+"("+c+")");return d},each:function(a,b){return e.each(this,a,b)},ready:function(a){e.bindReady(),A.add(a);return this},eq:function(a){a=+a;return a===-1?this.slice(a):this.slice(a,a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(F.apply(this,arguments),"slice",F.call(arguments).join(","))},map:function(a){return this.pushStack(e.map(this,function(b,c){return a.call(b,c,b)}))},end:function(){return this.prevObject||this.constructor(null)},push:E,sort:[].sort,splice:[].splice},e.fn.init.prototype=e.fn,e.extend=e.fn.extend=function(){var a,c,d,f,g,h,i=arguments[0]||{},j=1,k=arguments.length,l=!1;typeof i=="boolean"&&(l=i,i=arguments[1]||{},j=2),typeof i!="object"&&!e.isFunction(i)&&(i={}),k===j&&(i=this,--j);for(;j0)return;A.fireWith(c,[e]),e.fn.trigger&&e(c).trigger("ready").off("ready")}},bindReady:function(){if(!A){A=e.Callbacks("once memory");if(c.readyState==="complete")return setTimeout(e.ready,1);if(c.addEventListener)c.addEventListener("DOMContentLoaded",B,!1),a.addEventListener("load",e.ready,!1);else if(c.attachEvent){c.attachEvent("onreadystatechange",B),a.attachEvent("onload",e.ready);var b=!1;try{b=a.frameElement==null}catch(d){}c.documentElement.doScroll&&b&&J()}}},isFunction:function(a){return e.type(a)==="function"},isArray:Array.isArray||function(a){return e.type(a)==="array"},isWindow:function(a){return a&&typeof a=="object"&&"setInterval"in a},isNumeric:function(a){return!isNaN(parseFloat(a))&&isFinite(a)},type:function(a){return a==null?String(a):I[C.call(a)]||"object"},isPlainObject:function(a){if(!a||e.type(a)!=="object"||a.nodeType||e.isWindow(a))return!1;try{if(a.constructor&&!D.call(a,"constructor")&&!D.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}var d;for(d in a);return d===b||D.call(a,d)},isEmptyObject:function(a){for(var b in a)return!1;return!0},error:function(a){throw new Error(a)},parseJSON:function(b){if(typeof b!="string"||!b)return null;b=e.trim(b);if(a.JSON&&a.JSON.parse)return a.JSON.parse(b);if(n.test(b.replace(o,"@").replace(p,"]").replace(q,"")))return(new Function("return "+b))();e.error("Invalid JSON: "+b)},parseXML:function(c){var d,f;try{a.DOMParser?(f=new DOMParser,d=f.parseFromString(c,"text/xml")):(d=new ActiveXObject("Microsoft.XMLDOM"),d.async="false",d.loadXML(c))}catch(g){d=b}(!d||!d.documentElement||d.getElementsByTagName("parsererror").length)&&e.error("Invalid XML: "+c);return d},noop:function(){},globalEval:function(b){b&&j.test(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(w,"ms-").replace(v,x)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toUpperCase()===b.toUpperCase()},each:function(a,c,d){var f,g=0,h=a.length,i=h===b||e.isFunction(a);if(d){if(i){for(f in a)if(c.apply(a[f],d)===!1)break}else for(;g0&&a[0]&&a[j-1]||j===0||e.isArray(a));if(k)for(;i1?i.call(arguments,0):b,j.notifyWith(k,e)}}function l(a){return function(c){b[a]=arguments.length>1?i.call(arguments,0):c,--g||j.resolveWith(j,b)}}var b=i.call(arguments,0),c=0,d=b.length,e=Array(d),g=d,h=d,j=d<=1&&a&&f.isFunction(a.promise)?a:f.Deferred(),k=j.promise();if(d>1){for(;c
a",d=q.getElementsByTagName("*"),e=q.getElementsByTagName("a")[0];if(!d||!d.length||!e)return{};g=c.createElement("select"),h=g.appendChild(c.createElement("option")),i=q.getElementsByTagName("input")[0],b={leadingWhitespace:q.firstChild.nodeType===3,tbody:!q.getElementsByTagName("tbody").length,htmlSerialize:!!q.getElementsByTagName("link").length,style:/top/.test(e.getAttribute("style")),hrefNormalized:e.getAttribute("href")==="/a",opacity:/^0.55/.test(e.style.opacity),cssFloat:!!e.style.cssFloat,checkOn:i.value==="on",optSelected:h.selected,getSetAttribute:q.className!=="t",enctype:!!c.createElement("form").enctype,html5Clone:c.createElement("nav").cloneNode(!0).outerHTML!=="<:nav>",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0},i.checked=!0,b.noCloneChecked=i.cloneNode(!0).checked,g.disabled=!0,b.optDisabled=!h.disabled;try{delete q.test}catch(s){b.deleteExpando=!1}!q.addEventListener&&q.attachEvent&&q.fireEvent&&(q.attachEvent("onclick",function(){b.noCloneEvent=!1}),q.cloneNode(!0).fireEvent("onclick")),i=c.createElement("input"),i.value="t",i.setAttribute("type","radio"),b.radioValue=i.value==="t",i.setAttribute("checked","checked"),q.appendChild(i),k=c.createDocumentFragment(),k.appendChild(q.lastChild),b.checkClone=k.cloneNode(!0).cloneNode(!0).lastChild.checked,b.appendChecked=i.checked,k.removeChild(i),k.appendChild(q),q.innerHTML="",a.getComputedStyle&&(j=c.createElement("div"),j.style.width="0",j.style.marginRight="0",q.style.width="2px",q.appendChild(j),b.reliableMarginRight=(parseInt((a.getComputedStyle(j,null)||{marginRight:0}).marginRight,10)||0)===0);if(q.attachEvent)for(o in{submit:1,change:1,focusin:1})n="on"+o,p=n in q,p||(q.setAttribute(n,"return;"),p=typeof q[n]=="function"),b[o+"Bubbles"]=p;k.removeChild(q),k=g=h=j=q=i=null,f(function(){var a,d,e,g,h,i,j,k,m,n,o,r=c.getElementsByTagName("body")[0];!r||(j=1,k="position:absolute;top:0;left:0;width:1px;height:1px;margin:0;",m="visibility:hidden;border:0;",n="style='"+k+"border:5px solid #000;padding:0;'",o="
"+""+"
",a=c.createElement("div"),a.style.cssText=m+"width:0;height:0;position:static;top:0;margin-top:"+j+"px",r.insertBefore(a,r.firstChild),q=c.createElement("div"),a.appendChild(q),q.innerHTML="
t
",l=q.getElementsByTagName("td"),p=l[0].offsetHeight===0,l[0].style.display="",l[1].style.display="none",b.reliableHiddenOffsets=p&&l[0].offsetHeight===0,q.innerHTML="",q.style.width=q.style.paddingLeft="1px",f.boxModel=b.boxModel=q.offsetWidth===2,typeof q.style.zoom!="undefined"&&(q.style.display="inline",q.style.zoom=1,b.inlineBlockNeedsLayout=q.offsetWidth===2,q.style.display="",q.innerHTML="
",b.shrinkWrapBlocks=q.offsetWidth!==2),q.style.cssText=k+m,q.innerHTML=o,d=q.firstChild,e=d.firstChild,h=d.nextSibling.firstChild.firstChild,i={doesNotAddBorder:e.offsetTop!==5,doesAddBorderForTableAndCells:h.offsetTop===5},e.style.position="fixed",e.style.top="20px",i.fixedPosition=e.offsetTop===20||e.offsetTop===15,e.style.position=e.style.top="",d.style.overflow="hidden",d.style.position="relative",i.subtractsBorderForOverflowNotVisible=e.offsetTop===-5,i.doesNotIncludeMarginInBodyOffset=r.offsetTop!==j,r.removeChild(a),q=a=null,f.extend(b,i))});return b}();var j=/^(?:\{.*\}|\[.*\])$/,k=/([A-Z])/g;f.extend({cache:{},uuid:0,expando:"jQuery"+(f.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(a){a=a.nodeType?f.cache[a[f.expando]]:a[f.expando];return!!a&&!m(a)},data:function(a,c,d,e){if(!!f.acceptData(a)){var g,h,i,j=f.expando,k=typeof c=="string",l=a.nodeType,m=l?f.cache:a,n=l?a[j]:a[j]&&j,o=c==="events";if((!n||!m[n]||!o&&!e&&!m[n].data)&&k&&d===b)return;n||(l?a[j]=n=++f.uuid:n=j),m[n]||(m[n]={},l||(m[n].toJSON=f.noop));if(typeof c=="object"||typeof c=="function")e?m[n]=f.extend(m[n],c):m[n].data=f.extend(m[n].data,c);g=h=m[n],e||(h.data||(h.data={}),h=h.data),d!==b&&(h[f.camelCase(c)]=d);if(o&&!h[c])return g.events;k?(i=h[c],i==null&&(i=h[f.camelCase(c)])):i=h;return i}},removeData:function(a,b,c){if(!!f.acceptData(a)){var d,e,g,h=f.expando,i=a.nodeType,j=i?f.cache:a,k=i?a[h]:h;if(!j[k])return;if(b){d=c?j[k]:j[k].data;if(d){f.isArray(b)||(b in d?b=[b]:(b=f.camelCase(b),b in d?b=[b]:b=b.split(" ")));for(e=0,g=b.length;e-1)return!0;return!1},val:function(a){var c,d,e,g=this[0];{if(!!arguments.length){e=f.isFunction(a);return this.each(function(d){var g=f(this),h;if(this.nodeType===1){e?h=a.call(this,d,g.val()):h=a,h==null?h="":typeof h=="number"?h+="":f.isArray(h)&&(h=f.map(h,function(a){return a==null?"":a+""})),c=f.valHooks[this.nodeName.toLowerCase()]||f.valHooks[this.type];if(!c||!("set"in c)||c.set(this,h,"value")===b)this.value=h}})}if(g){c=f.valHooks[g.nodeName.toLowerCase()]||f.valHooks[g.type];if(c&&"get"in c&&(d=c.get(g,"value"))!==b)return d;d=g.value;return typeof d=="string"?d.replace(q,""):d==null?"":d}}}}),f.extend({valHooks:{option:{get:function(a){var b=a.attributes.value;return!b||b.specified?a.value:a.text}},select:{get:function(a){var b,c,d,e,g=a.selectedIndex,h=[],i=a.options,j=a.type==="select-one";if(g<0)return null;c=j?g:0,d=j?g+1:i.length;for(;c=0}),c.length||(a.selectedIndex=-1);return c}}},attrFn:{val:!0,css:!0,html:!0,text:!0,data:!0,width:!0,height:!0,offset:!0},attr:function(a,c,d,e){var g,h,i,j=a.nodeType;if(!!a&&j!==3&&j!==8&&j!==2){if(e&&c in f.attrFn)return f(a)[c](d);if(typeof a.getAttribute=="undefined")return f.prop(a,c,d);i=j!==1||!f.isXMLDoc(a),i&&(c=c.toLowerCase(),h=f.attrHooks[c]||(u.test(c)?x:w));if(d!==b){if(d===null){f.removeAttr(a,c);return}if(h&&"set"in h&&i&&(g=h.set(a,d,c))!==b)return g;a.setAttribute(c,""+d);return d}if(h&&"get"in h&&i&&(g=h.get(a,c))!==null)return g;g=a.getAttribute(c);return g===null?b:g}},removeAttr:function(a,b){var c,d,e,g,h=0;if(b&&a.nodeType===1){d=b.toLowerCase().split(p),g=d.length;for(;h=0}})});var z=/^(?:textarea|input|select)$/i,A=/^([^\.]*)?(?:\.(.+))?$/,B=/\bhover(\.\S+)?\b/,C=/^key/,D=/^(?:mouse|contextmenu)|click/,E=/^(?:focusinfocus|focusoutblur)$/,F=/^(\w*)(?:#([\w\-]+))?(?:\.([\w\-]+))?$/,G=function(a){var b=F.exec(a);b&&(b[1]=(b[1]||"").toLowerCase(),b[3]=b[3]&&new RegExp("(?:^|\\s)"+b[3]+"(?:\\s|$)"));return b},H=function(a,b){var c=a.attributes||{};return(!b[1]||a.nodeName.toLowerCase()===b[1])&&(!b[2]||(c.id||{}).value===b[2])&&(!b[3]||b[3].test((c["class"]||{}).value))},I=function(a){return f.event.special.hover?a:a.replace(B,"mouseenter$1 mouseleave$1")}; -f.event={add:function(a,c,d,e,g){var h,i,j,k,l,m,n,o,p,q,r,s;if(!(a.nodeType===3||a.nodeType===8||!c||!d||!(h=f._data(a)))){d.handler&&(p=d,d=p.handler),d.guid||(d.guid=f.guid++),j=h.events,j||(h.events=j={}),i=h.handle,i||(h.handle=i=function(a){return typeof f!="undefined"&&(!a||f.event.triggered!==a.type)?f.event.dispatch.apply(i.elem,arguments):b},i.elem=a),c=f.trim(I(c)).split(" ");for(k=0;k=0&&(h=h.slice(0,-1),k=!0),h.indexOf(".")>=0&&(i=h.split("."),h=i.shift(),i.sort());if((!e||f.event.customEvent[h])&&!f.event.global[h])return;c=typeof c=="object"?c[f.expando]?c:new f.Event(h,c):new f.Event(h),c.type=h,c.isTrigger=!0,c.exclusive=k,c.namespace=i.join("."),c.namespace_re=c.namespace?new RegExp("(^|\\.)"+i.join("\\.(?:.*\\.)?")+"(\\.|$)"):null,o=h.indexOf(":")<0?"on"+h:"";if(!e){j=f.cache;for(l in j)j[l].events&&j[l].events[h]&&f.event.trigger(c,d,j[l].handle.elem,!0);return}c.result=b,c.target||(c.target=e),d=d!=null?f.makeArray(d):[],d.unshift(c),p=f.event.special[h]||{};if(p.trigger&&p.trigger.apply(e,d)===!1)return;r=[[e,p.bindType||h]];if(!g&&!p.noBubble&&!f.isWindow(e)){s=p.delegateType||h,m=E.test(s+h)?e:e.parentNode,n=null;for(;m;m=m.parentNode)r.push([m,s]),n=m;n&&n===e.ownerDocument&&r.push([n.defaultView||n.parentWindow||a,s])}for(l=0;le&&i.push({elem:this,matches:d.slice(e)});for(j=0;j0?this.on(b,null,a,c):this.trigger(b)},f.attrFn&&(f.attrFn[b]=!0),C.test(b)&&(f.event.fixHooks[b]=f.event.keyHooks),D.test(b)&&(f.event.fixHooks[b]=f.event.mouseHooks)}),function(){function x(a,b,c,e,f,g){for(var h=0,i=e.length;h0){k=j;break}}j=j[a]}e[h]=k}}}function w(a,b,c,e,f,g){for(var h=0,i=e.length;h+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,d="sizcache"+(Math.random()+"").replace(".",""),e=0,g=Object.prototype.toString,h=!1,i=!0,j=/\\/g,k=/\r\n/g,l=/\W/;[0,0].sort(function(){i=!1;return 0});var m=function(b,d,e,f){e=e||[],d=d||c;var h=d;if(d.nodeType!==1&&d.nodeType!==9)return[];if(!b||typeof b!="string")return e;var i,j,k,l,n,q,r,t,u=!0,v=m.isXML(d),w=[],x=b;do{a.exec(""),i=a.exec(x);if(i){x=i[3],w.push(i[1]);if(i[2]){l=i[3];break}}}while(i);if(w.length>1&&p.exec(b))if(w.length===2&&o.relative[w[0]])j=y(w[0]+w[1],d,f);else{j=o.relative[w[0]]?[d]:m(w.shift(),d);while(w.length)b=w.shift(),o.relative[b]&&(b+=w.shift()),j=y(b,j,f)}else{!f&&w.length>1&&d.nodeType===9&&!v&&o.match.ID.test(w[0])&&!o.match.ID.test(w[w.length-1])&&(n=m.find(w.shift(),d,v),d=n.expr?m.filter(n.expr,n.set)[0]:n.set[0]);if(d){n=f?{expr:w.pop(),set:s(f)}:m.find(w.pop(),w.length===1&&(w[0]==="~"||w[0]==="+")&&d.parentNode?d.parentNode:d,v),j=n.expr?m.filter(n.expr,n.set):n.set,w.length>0?k=s(j):u=!1;while(w.length)q=w.pop(),r=q,o.relative[q]?r=w.pop():q="",r==null&&(r=d),o.relative[q](k,r,v)}else k=w=[]}k||(k=j),k||m.error(q||b);if(g.call(k)==="[object Array]")if(!u)e.push.apply(e,k);else if(d&&d.nodeType===1)for(t=0;k[t]!=null;t++)k[t]&&(k[t]===!0||k[t].nodeType===1&&m.contains(d,k[t]))&&e.push(j[t]);else for(t=0;k[t]!=null;t++)k[t]&&k[t].nodeType===1&&e.push(j[t]);else s(k,e);l&&(m(l,h,e,f),m.uniqueSort(e));return e};m.uniqueSort=function(a){if(u){h=i,a.sort(u);if(h)for(var b=1;b0},m.find=function(a,b,c){var d,e,f,g,h,i;if(!a)return[];for(e=0,f=o.order.length;e":function(a,b){var c,d=typeof b=="string",e=0,f=a.length;if(d&&!l.test(b)){b=b.toLowerCase();for(;e=0)?c||d.push(h):c&&(b[g]=!1));return!1},ID:function(a){return a[1].replace(j,"")},TAG:function(a,b){return a[1].replace(j,"").toLowerCase()},CHILD:function(a){if(a[1]==="nth"){a[2]||m.error(a[0]),a[2]=a[2].replace(/^\+|\s*/g,"");var b=/(-?)(\d*)(?:n([+\-]?\d*))?/.exec(a[2]==="even"&&"2n"||a[2]==="odd"&&"2n+1"||!/\D/.test(a[2])&&"0n+"+a[2]||a[2]);a[2]=b[1]+(b[2]||1)-0,a[3]=b[3]-0}else a[2]&&m.error(a[0]);a[0]=e++;return a},ATTR:function(a,b,c,d,e,f){var g=a[1]=a[1].replace(j,"");!f&&o.attrMap[g]&&(a[1]=o.attrMap[g]),a[4]=(a[4]||a[5]||"").replace(j,""),a[2]==="~="&&(a[4]=" "+a[4]+" ");return a},PSEUDO:function(b,c,d,e,f){if(b[1]==="not")if((a.exec(b[3])||"").length>1||/^\w/.test(b[3]))b[3]=m(b[3],null,null,c);else{var g=m.filter(b[3],c,d,!0^f);d||e.push.apply(e,g);return!1}else if(o.match.POS.test(b[0])||o.match.CHILD.test(b[0]))return!0;return b},POS:function(a){a.unshift(!0);return a}},filters:{enabled:function(a){return a.disabled===!1&&a.type!=="hidden"},disabled:function(a){return a.disabled===!0},checked:function(a){return a.checked===!0},selected:function(a){a.parentNode&&a.parentNode.selectedIndex;return a.selected===!0},parent:function(a){return!!a.firstChild},empty:function(a){return!a.firstChild},has:function(a,b,c){return!!m(c[3],a).length},header:function(a){return/h\d/i.test(a.nodeName)},text:function(a){var b=a.getAttribute("type"),c=a.type;return a.nodeName.toLowerCase()==="input"&&"text"===c&&(b===c||b===null)},radio:function(a){return a.nodeName.toLowerCase()==="input"&&"radio"===a.type},checkbox:function(a){return a.nodeName.toLowerCase()==="input"&&"checkbox"===a.type},file:function(a){return a.nodeName.toLowerCase()==="input"&&"file"===a.type},password:function(a){return a.nodeName.toLowerCase()==="input"&&"password"===a.type},submit:function(a){var b=a.nodeName.toLowerCase();return(b==="input"||b==="button")&&"submit"===a.type},image:function(a){return a.nodeName.toLowerCase()==="input"&&"image"===a.type},reset:function(a){var b=a.nodeName.toLowerCase();return(b==="input"||b==="button")&&"reset"===a.type},button:function(a){var b=a.nodeName.toLowerCase();return b==="input"&&"button"===a.type||b==="button"},input:function(a){return/input|select|textarea|button/i.test(a.nodeName)},focus:function(a){return a===a.ownerDocument.activeElement}},setFilters:{first:function(a,b){return b===0},last:function(a,b,c,d){return b===d.length-1},even:function(a,b){return b%2===0},odd:function(a,b){return b%2===1},lt:function(a,b,c){return bc[3]-0},nth:function(a,b,c){return c[3]-0===b},eq:function(a,b,c){return c[3]-0===b}},filter:{PSEUDO:function(a,b,c,d){var e=b[1],f=o.filters[e];if(f)return f(a,c,b,d);if(e==="contains")return(a.textContent||a.innerText||n([a])||"").indexOf(b[3])>=0;if(e==="not"){var g=b[3];for(var h=0,i=g.length;h=0}},ID:function(a,b){return a.nodeType===1&&a.getAttribute("id")===b},TAG:function(a,b){return b==="*"&&a.nodeType===1||!!a.nodeName&&a.nodeName.toLowerCase()===b},CLASS:function(a,b){return(" "+(a.className||a.getAttribute("class"))+" ").indexOf(b)>-1},ATTR:function(a,b){var c=b[1],d=m.attr?m.attr(a,c):o.attrHandle[c]?o.attrHandle[c](a):a[c]!=null?a[c]:a.getAttribute(c),e=d+"",f=b[2],g=b[4];return d==null?f==="!=":!f&&m.attr?d!=null:f==="="?e===g:f==="*="?e.indexOf(g)>=0:f==="~="?(" "+e+" ").indexOf(g)>=0:g?f==="!="?e!==g:f==="^="?e.indexOf(g)===0:f==="$="?e.substr(e.length-g.length)===g:f==="|="?e===g||e.substr(0,g.length+1)===g+"-":!1:e&&d!==!1},POS:function(a,b,c,d){var e=b[2],f=o.setFilters[e];if(f)return f(a,c,b,d)}}},p=o.match.POS,q=function(a,b){return"\\"+(b-0+1)};for(var r in o.match)o.match[r]=new RegExp(o.match[r].source+/(?![^\[]*\])(?![^\(]*\))/.source),o.leftMatch[r]=new RegExp(/(^(?:.|\r|\n)*?)/.source+o.match[r].source.replace(/\\(\d+)/g,q));var s=function(a,b){a=Array.prototype.slice.call(a,0);if(b){b.push.apply(b,a);return b}return a};try{Array.prototype.slice.call(c.documentElement.childNodes,0)[0].nodeType}catch(t){s=function(a,b){var c=0,d=b||[];if(g.call(a)==="[object Array]")Array.prototype.push.apply(d,a);else if(typeof a.length=="number")for(var e=a.length;c",e.insertBefore(a,e.firstChild),c.getElementById(d)&&(o.find.ID=function(a,c,d){if(typeof c.getElementById!="undefined"&&!d){var e=c.getElementById(a[1]);return e?e.id===a[1]||typeof e.getAttributeNode!="undefined"&&e.getAttributeNode("id").nodeValue===a[1]?[e]:b:[]}},o.filter.ID=function(a,b){var c=typeof a.getAttributeNode!="undefined"&&a.getAttributeNode("id");return a.nodeType===1&&c&&c.nodeValue===b}),e.removeChild(a),e=a=null}(),function(){var a=c.createElement("div");a.appendChild(c.createComment("")),a.getElementsByTagName("*").length>0&&(o.find.TAG=function(a,b){var c=b.getElementsByTagName(a[1]);if(a[1]==="*"){var d=[];for(var e=0;c[e];e++)c[e].nodeType===1&&d.push(c[e]);c=d}return c}),a.innerHTML="",a.firstChild&&typeof a.firstChild.getAttribute!="undefined"&&a.firstChild.getAttribute("href")!=="#"&&(o.attrHandle.href=function(a){return a.getAttribute("href",2)}),a=null}(),c.querySelectorAll&&function(){var a=m,b=c.createElement("div"),d="__sizzle__";b.innerHTML="

";if(!b.querySelectorAll||b.querySelectorAll(".TEST").length!==0){m=function(b,e,f,g){e=e||c;if(!g&&!m.isXML(e)){var h=/^(\w+$)|^\.([\w\-]+$)|^#([\w\-]+$)/.exec(b);if(h&&(e.nodeType===1||e.nodeType===9)){if(h[1])return s(e.getElementsByTagName(b),f);if(h[2]&&o.find.CLASS&&e.getElementsByClassName)return s(e.getElementsByClassName(h[2]),f)}if(e.nodeType===9){if(b==="body"&&e.body)return s([e.body],f);if(h&&h[3]){var i=e.getElementById(h[3]);if(!i||!i.parentNode)return s([],f);if(i.id===h[3])return s([i],f)}try{return s(e.querySelectorAll(b),f)}catch(j){}}else if(e.nodeType===1&&e.nodeName.toLowerCase()!=="object"){var k=e,l=e.getAttribute("id"),n=l||d,p=e.parentNode,q=/^\s*[+~]/.test(b);l?n=n.replace(/'/g,"\\$&"):e.setAttribute("id",n),q&&p&&(e=e.parentNode);try{if(!q||p)return s(e.querySelectorAll("[id='"+n+"'] "+b),f)}catch(r){}finally{l||k.removeAttribute("id")}}}return a(b,e,f,g)};for(var e in a)m[e]=a[e];b=null}}(),function(){var a=c.documentElement,b=a.matchesSelector||a.mozMatchesSelector||a.webkitMatchesSelector||a.msMatchesSelector;if(b){var d=!b.call(c.createElement("div"),"div"),e=!1;try{b.call(c.documentElement,"[test!='']:sizzle")}catch(f){e=!0}m.matchesSelector=function(a,c){c=c.replace(/\=\s*([^'"\]]*)\s*\]/g,"='$1']");if(!m.isXML(a))try{if(e||!o.match.PSEUDO.test(c)&&!/!=/.test(c)){var f=b.call(a,c);if(f||!d||a.document&&a.document.nodeType!==11)return f}}catch(g){}return m(c,null,null,[a]).length>0}}}(),function(){var a=c.createElement("div");a.innerHTML="
";if(!!a.getElementsByClassName&&a.getElementsByClassName("e").length!==0){a.lastChild.className="e";if(a.getElementsByClassName("e").length===1)return;o.order.splice(1,0,"CLASS"),o.find.CLASS=function(a,b,c){if(typeof b.getElementsByClassName!="undefined"&&!c)return b.getElementsByClassName(a[1])},a=null}}(),c.documentElement.contains?m.contains=function(a,b){return a!==b&&(a.contains?a.contains(b):!0)}:c.documentElement.compareDocumentPosition?m.contains=function(a,b){return!!(a.compareDocumentPosition(b)&16)}:m.contains=function(){return!1},m.isXML=function(a){var b=(a?a.ownerDocument||a:0).documentElement;return b?b.nodeName!=="HTML":!1};var y=function(a,b,c){var d,e=[],f="",g=b.nodeType?[b]:b;while(d=o.match.PSEUDO.exec(a))f+=d[0],a=a.replace(o.match.PSEUDO,"");a=o.relative[a]?a+"*":a;for(var h=0,i=g.length;h0)for(h=g;h=0:f.filter(a,this).length>0:this.filter(a).length>0)},closest:function(a,b){var c=[],d,e,g=this[0];if(f.isArray(a)){var h=1;while(g&&g.ownerDocument&&g!==b){for(d=0;d-1:f.find.matchesSelector(g,a)){c.push(g);break}g=g.parentNode;if(!g||!g.ownerDocument||g===b||g.nodeType===11)break}}c=c.length>1?f.unique(c):c;return this.pushStack(c,"closest",a)},index:function(a){if(!a)return this[0]&&this[0].parentNode?this.prevAll().length:-1;if(typeof a=="string")return f.inArray(this[0],f(a));return f.inArray(a.jquery?a[0]:a,this)},add:function(a,b){var c=typeof a=="string"?f(a,b):f.makeArray(a&&a.nodeType?[a]:a),d=f.merge(this.get(),c);return this.pushStack(S(c[0])||S(d[0])?d:f.unique(d))},andSelf:function(){return this.add(this.prevObject)}}),f.each({parent:function(a){var b=a.parentNode;return b&&b.nodeType!==11?b:null},parents:function(a){return f.dir(a,"parentNode")},parentsUntil:function(a,b,c){return f.dir(a,"parentNode",c)},next:function(a){return f.nth(a,2,"nextSibling")},prev:function(a){return f.nth(a,2,"previousSibling")},nextAll:function(a){return f.dir(a,"nextSibling")},prevAll:function(a){return f.dir(a,"previousSibling")},nextUntil:function(a,b,c){return f.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return f.dir(a,"previousSibling",c)},siblings:function(a){return f.sibling(a.parentNode.firstChild,a)},children:function(a){return f.sibling(a.firstChild)},contents:function(a){return f.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:f.makeArray(a.childNodes)}},function(a,b){f.fn[a]=function(c,d){var e=f.map(this,b,c);L.test(a)||(d=c),d&&typeof d=="string"&&(e=f.filter(d,e)),e=this.length>1&&!R[a]?f.unique(e):e,(this.length>1||N.test(d))&&M.test(a)&&(e=e.reverse());return this.pushStack(e,a,P.call(arguments).join(","))}}),f.extend({filter:function(a,b,c){c&&(a=":not("+a+")");return b.length===1?f.find.matchesSelector(b[0],a)?[b[0]]:[]:f.find.matches(a,b)},dir:function(a,c,d){var e=[],g=a[c];while(g&&g.nodeType!==9&&(d===b||g.nodeType!==1||!f(g).is(d)))g.nodeType===1&&e.push(g),g=g[c];return e},nth:function(a,b,c,d){b=b||1;var e=0;for(;a;a=a[c])if(a.nodeType===1&&++e===b)break;return a},sibling:function(a,b){var c=[];for(;a;a=a.nextSibling)a.nodeType===1&&a!==b&&c.push(a);return c}});var V="abbr|article|aside|audio|canvas|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",W=/ jQuery\d+="(?:\d+|null)"/g,X=/^\s+/,Y=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/ig,Z=/<([\w:]+)/,$=/",""],legend:[1,"
","
"],thead:[1,"","
"],tr:[2,"","
"],td:[3,"","
"],col:[2,"","
"],area:[1,"",""],_default:[0,"",""]},bh=U(c);bg.optgroup=bg.option,bg.tbody=bg.tfoot=bg.colgroup=bg.caption=bg.thead,bg.th=bg.td,f.support.htmlSerialize||(bg._default=[1,"div
","
"]),f.fn.extend({text:function(a){if(f.isFunction(a))return this.each(function(b){var c=f(this);c.text(a.call(this,b,c.text()))});if(typeof a!="object"&&a!==b)return this.empty().append((this[0]&&this[0].ownerDocument||c).createTextNode(a));return f.text(this)},wrapAll:function(a){if(f.isFunction(a))return this.each(function(b){f(this).wrapAll(a.call(this,b))});if(this[0]){var b=f(a,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){var a=this;while(a.firstChild&&a.firstChild.nodeType===1)a=a.firstChild;return a}).append(this)}return this},wrapInner:function(a){if(f.isFunction(a))return this.each(function(b){f(this).wrapInner(a.call(this,b))});return this.each(function(){var b=f(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=f.isFunction(a);return this.each(function(c){f(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(){return this.parent().each(function(){f.nodeName(this,"body")||f(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(a){this.nodeType===1&&this.appendChild(a)})},prepend:function(){return this.domManip(arguments,!0,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this)});if(arguments.length){var a=f.clean(arguments);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this.nextSibling)});if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,f.clean(arguments));return a}},remove:function(a,b){for(var c=0,d;(d=this[c])!=null;c++)if(!a||f.filter(a,[d]).length)!b&&d.nodeType===1&&(f.cleanData(d.getElementsByTagName("*")),f.cleanData([d])),d.parentNode&&d.parentNode.removeChild(d);return this},empty:function() -{for(var a=0,b;(b=this[a])!=null;a++){b.nodeType===1&&f.cleanData(b.getElementsByTagName("*"));while(b.firstChild)b.removeChild(b.firstChild)}return this},clone:function(a,b){a=a==null?!1:a,b=b==null?a:b;return this.map(function(){return f.clone(this,a,b)})},html:function(a){if(a===b)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(W,""):null;if(typeof a=="string"&&!ba.test(a)&&(f.support.leadingWhitespace||!X.test(a))&&!bg[(Z.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Y,"<$1>");try{for(var c=0,d=this.length;c1&&l0?this.clone(!0):this).get();f(e[h])[b](j),d=d.concat(j)}return this.pushStack(d,a,e.selector)}}),f.extend({clone:function(a,b,c){var d,e,g,h=f.support.html5Clone||!bc.test("<"+a.nodeName)?a.cloneNode(!0):bo(a);if((!f.support.noCloneEvent||!f.support.noCloneChecked)&&(a.nodeType===1||a.nodeType===11)&&!f.isXMLDoc(a)){bk(a,h),d=bl(a),e=bl(h);for(g=0;d[g];++g)e[g]&&bk(d[g],e[g])}if(b){bj(a,h);if(c){d=bl(a),e=bl(h);for(g=0;d[g];++g)bj(d[g],e[g])}}d=e=null;return h},clean:function(a,b,d,e){var g;b=b||c,typeof b.createElement=="undefined"&&(b=b.ownerDocument||b[0]&&b[0].ownerDocument||c);var h=[],i;for(var j=0,k;(k=a[j])!=null;j++){typeof k=="number"&&(k+="");if(!k)continue;if(typeof k=="string")if(!_.test(k))k=b.createTextNode(k);else{k=k.replace(Y,"<$1>");var l=(Z.exec(k)||["",""])[1].toLowerCase(),m=bg[l]||bg._default,n=m[0],o=b.createElement("div");b===c?bh.appendChild(o):U(b).appendChild(o),o.innerHTML=m[1]+k+m[2];while(n--)o=o.lastChild;if(!f.support.tbody){var p=$.test(k),q=l==="table"&&!p?o.firstChild&&o.firstChild.childNodes:m[1]===""&&!p?o.childNodes:[];for(i=q.length-1;i>=0;--i)f.nodeName(q[i],"tbody")&&!q[i].childNodes.length&&q[i].parentNode.removeChild(q[i])}!f.support.leadingWhitespace&&X.test(k)&&o.insertBefore(b.createTextNode(X.exec(k)[0]),o.firstChild),k=o.childNodes}var r;if(!f.support.appendChecked)if(k[0]&&typeof (r=k.length)=="number")for(i=0;i=0)return b+"px"}}}),f.support.opacity||(f.cssHooks.opacity={get:function(a,b){return br.test((b&&a.currentStyle?a.currentStyle.filter:a.style.filter)||"")?parseFloat(RegExp.$1)/100+"":b?"1":""},set:function(a,b){var c=a.style,d=a.currentStyle,e=f.isNumeric(b)?"alpha(opacity="+b*100+")":"",g=d&&d.filter||c.filter||"";c.zoom=1;if(b>=1&&f.trim(g.replace(bq,""))===""){c.removeAttribute("filter");if(d&&!d.filter)return}c.filter=bq.test(g)?g.replace(bq,e):g+" "+e}}),f(function(){f.support.reliableMarginRight||(f.cssHooks.marginRight={get:function(a,b){var c;f.swap(a,{display:"inline-block"},function(){b?c=bz(a,"margin-right","marginRight"):c=a.style.marginRight});return c}})}),c.defaultView&&c.defaultView.getComputedStyle&&(bA=function(a,b){var c,d,e;b=b.replace(bs,"-$1").toLowerCase(),(d=a.ownerDocument.defaultView)&&(e=d.getComputedStyle(a,null))&&(c=e.getPropertyValue(b),c===""&&!f.contains(a.ownerDocument.documentElement,a)&&(c=f.style(a,b)));return c}),c.documentElement.currentStyle&&(bB=function(a,b){var c,d,e,f=a.currentStyle&&a.currentStyle[b],g=a.style;f===null&&g&&(e=g[b])&&(f=e),!bt.test(f)&&bu.test(f)&&(c=g.left,d=a.runtimeStyle&&a.runtimeStyle.left,d&&(a.runtimeStyle.left=a.currentStyle.left),g.left=b==="fontSize"?"1em":f||0,f=g.pixelLeft+"px",g.left=c,d&&(a.runtimeStyle.left=d));return f===""?"auto":f}),bz=bA||bB,f.expr&&f.expr.filters&&(f.expr.filters.hidden=function(a){var b=a.offsetWidth,c=a.offsetHeight;return b===0&&c===0||!f.support.reliableHiddenOffsets&&(a.style&&a.style.display||f.css(a,"display"))==="none"},f.expr.filters.visible=function(a){return!f.expr.filters.hidden(a)});var bD=/%20/g,bE=/\[\]$/,bF=/\r?\n/g,bG=/#.*$/,bH=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,bI=/^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,bJ=/^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/,bK=/^(?:GET|HEAD)$/,bL=/^\/\//,bM=/\?/,bN=/)<[^<]*)*<\/script>/gi,bO=/^(?:select|textarea)/i,bP=/\s+/,bQ=/([?&])_=[^&]*/,bR=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+))?)?/,bS=f.fn.load,bT={},bU={},bV,bW,bX=["*/"]+["*"];try{bV=e.href}catch(bY){bV=c.createElement("a"),bV.href="",bV=bV.href}bW=bR.exec(bV.toLowerCase())||[],f.fn.extend({load:function(a,c,d){if(typeof a!="string"&&bS)return bS.apply(this,arguments);if(!this.length)return this;var e=a.indexOf(" ");if(e>=0){var g=a.slice(e,a.length);a=a.slice(0,e)}var h="GET";c&&(f.isFunction(c)?(d=c,c=b):typeof c=="object"&&(c=f.param(c,f.ajaxSettings.traditional),h="POST"));var i=this;f.ajax({url:a,type:h,dataType:"html",data:c,complete:function(a,b,c){c=a.responseText,a.isResolved()&&(a.done(function(a){c=a}),i.html(g?f("
").append(c.replace(bN,"")).find(g):c)),d&&i.each(d,[c,b,a])}});return this},serialize:function(){return f.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?f.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||bO.test(this.nodeName)||bI.test(this.type))}).map(function(a,b){var c=f(this).val();return c==null?null:f.isArray(c)?f.map(c,function(a,c){return{name:b.name,value:a.replace(bF,"\r\n")}}):{name:b.name,value:c.replace(bF,"\r\n")}}).get()}}),f.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(a,b){f.fn[b]=function(a){return this.on(b,a)}}),f.each(["get","post"],function(a,c){f[c]=function(a,d,e,g){f.isFunction(d)&&(g=g||e,e=d,d=b);return f.ajax({type:c,url:a,data:d,success:e,dataType:g})}}),f.extend({getScript:function(a,c){return f.get(a,b,c,"script")},getJSON:function(a,b,c){return f.get(a,b,c,"json")},ajaxSetup:function(a,b){b?b_(a,f.ajaxSettings):(b=a,a=f.ajaxSettings),b_(a,b);return a},ajaxSettings:{url:bV,isLocal:bJ.test(bW[1]),global:!0,type:"GET",contentType:"application/x-www-form-urlencoded",processData:!0,async:!0,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":bX},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":a.String,"text html":!0,"text json":f.parseJSON,"text xml":f.parseXML},flatOptions:{context:!0,url:!0}},ajaxPrefilter:bZ(bT),ajaxTransport:bZ(bU),ajax:function(a,c){function w(a,c,l,m){if(s!==2){s=2,q&&clearTimeout(q),p=b,n=m||"",v.readyState=a>0?4:0;var o,r,u,w=c,x=l?cb(d,v,l):b,y,z;if(a>=200&&a<300||a===304){if(d.ifModified){if(y=v.getResponseHeader("Last-Modified"))f.lastModified[k]=y;if(z=v.getResponseHeader("Etag"))f.etag[k]=z}if(a===304)w="notmodified",o=!0;else try{r=cc(d,x),w="success",o=!0}catch(A){w="parsererror",u=A}}else{u=w;if(!w||a)w="error",a<0&&(a=0)}v.status=a,v.statusText=""+(c||w),o?h.resolveWith(e,[r,w,v]):h.rejectWith(e,[v,w,u]),v.statusCode(j),j=b,t&&g.trigger("ajax"+(o?"Success":"Error"),[v,d,o?r:u]),i.fireWith(e,[v,w]),t&&(g.trigger("ajaxComplete",[v,d]),--f.active||f.event.trigger("ajaxStop"))}}typeof a=="object"&&(c=a,a=b),c=c||{};var d=f.ajaxSetup({},c),e=d.context||d,g=e!==d&&(e.nodeType||e instanceof f)?f(e):f.event,h=f.Deferred(),i=f.Callbacks("once memory"),j=d.statusCode||{},k,l={},m={},n,o,p,q,r,s=0,t,u,v={readyState:0,setRequestHeader:function(a,b){if(!s){var c=a.toLowerCase();a=m[c]=m[c]||a,l[a]=b}return this},getAllResponseHeaders:function(){return s===2?n:null},getResponseHeader:function(a){var c;if(s===2){if(!o){o={};while(c=bH.exec(n))o[c[1].toLowerCase()]=c[2]}c=o[a.toLowerCase()]}return c===b?null:c},overrideMimeType:function(a){s||(d.mimeType=a);return this},abort:function(a){a=a||"abort",p&&p.abort(a),w(0,a);return this}};h.promise(v),v.success=v.done,v.error=v.fail,v.complete=i.add,v.statusCode=function(a){if(a){var b;if(s<2)for(b in a)j[b]=[j[b],a[b]];else b=a[v.status],v.then(b,b)}return this},d.url=((a||d.url)+"").replace(bG,"").replace(bL,bW[1]+"//"),d.dataTypes=f.trim(d.dataType||"*").toLowerCase().split(bP),d.crossDomain==null&&(r=bR.exec(d.url.toLowerCase()),d.crossDomain=!(!r||r[1]==bW[1]&&r[2]==bW[2]&&(r[3]||(r[1]==="http:"?80:443))==(bW[3]||(bW[1]==="http:"?80:443)))),d.data&&d.processData&&typeof d.data!="string"&&(d.data=f.param(d.data,d.traditional)),b$(bT,d,c,v);if(s===2)return!1;t=d.global,d.type=d.type.toUpperCase(),d.hasContent=!bK.test(d.type),t&&f.active++===0&&f.event.trigger("ajaxStart");if(!d.hasContent){d.data&&(d.url+=(bM.test(d.url)?"&":"?")+d.data,delete d.data),k=d.url;if(d.cache===!1){var x=f.now(),y=d.url.replace(bQ,"$1_="+x);d.url=y+(y===d.url?(bM.test(d.url)?"&":"?")+"_="+x:"")}}(d.data&&d.hasContent&&d.contentType!==!1||c.contentType)&&v.setRequestHeader("Content-Type",d.contentType),d.ifModified&&(k=k||d.url,f.lastModified[k]&&v.setRequestHeader("If-Modified-Since",f.lastModified[k]),f.etag[k]&&v.setRequestHeader("If-None-Match",f.etag[k])),v.setRequestHeader("Accept",d.dataTypes[0]&&d.accepts[d.dataTypes[0]]?d.accepts[d.dataTypes[0]]+(d.dataTypes[0]!=="*"?", "+bX+"; q=0.01":""):d.accepts["*"]);for(u in d.headers)v.setRequestHeader(u,d.headers[u]);if(d.beforeSend&&(d.beforeSend.call(e,v,d)===!1||s===2)){v.abort();return!1}for(u in{success:1,error:1,complete:1})v[u](d[u]);p=b$(bU,d,c,v);if(!p)w(-1,"No Transport");else{v.readyState=1,t&&g.trigger("ajaxSend",[v,d]),d.async&&d.timeout>0&&(q=setTimeout(function(){v.abort("timeout")},d.timeout));try{s=1,p.send(l,w)}catch(z){if(s<2)w(-1,z);else throw z}}return v},param:function(a,c){var d=[],e=function(a,b){b=f.isFunction(b)?b():b,d[d.length]=encodeURIComponent(a)+"="+encodeURIComponent(b)};c===b&&(c=f.ajaxSettings.traditional);if(f.isArray(a)||a.jquery&&!f.isPlainObject(a))f.each(a,function(){e(this.name,this.value)});else for(var g in a)ca(g,a[g],c,e);return d.join("&").replace(bD,"+")}}),f.extend({active:0,lastModified:{},etag:{}});var cd=f.now(),ce=/(\=)\?(&|$)|\?\?/i;f.ajaxSetup({jsonp:"callback",jsonpCallback:function(){return f.expando+"_"+cd++}}),f.ajaxPrefilter("json jsonp",function(b,c,d){var e=b.contentType==="application/x-www-form-urlencoded"&&typeof b.data=="string";if(b.dataTypes[0]==="jsonp"||b.jsonp!==!1&&(ce.test(b.url)||e&&ce.test(b.data))){var g,h=b.jsonpCallback=f.isFunction(b.jsonpCallback)?b.jsonpCallback():b.jsonpCallback,i=a[h],j=b.url,k=b.data,l="$1"+h+"$2";b.jsonp!==!1&&(j=j.replace(ce,l),b.url===j&&(e&&(k=k.replace(ce,l)),b.data===k&&(j+=(/\?/.test(j)?"&":"?")+b.jsonp+"="+h))),b.url=j,b.data=k,a[h]=function(a){g=[a]},d.always(function(){a[h]=i,g&&f.isFunction(i)&&a[h](g[0])}),b.converters["script json"]=function(){g||f.error(h+" was not called");return g[0]},b.dataTypes[0]="json";return"script"}}),f.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/javascript|ecmascript/},converters:{"text script":function(a){f.globalEval(a);return a}}}),f.ajaxPrefilter("script",function(a){a.cache===b&&(a.cache=!1),a.crossDomain&&(a.type="GET",a.global=!1)}),f.ajaxTransport("script",function(a){if(a.crossDomain){var d,e=c.head||c.getElementsByTagName("head")[0]||c.documentElement;return{send:function(f,g){d=c.createElement("script"),d.async="async",a.scriptCharset&&(d.charset=a.scriptCharset),d.src=a.url,d.onload=d.onreadystatechange=function(a,c){if(c||!d.readyState||/loaded|complete/.test(d.readyState))d.onload=d.onreadystatechange=null,e&&d.parentNode&&e.removeChild(d),d=b,c||g(200,"success")},e.insertBefore(d,e.firstChild)},abort:function(){d&&d.onload(0,1)}}}});var cf=a.ActiveXObject?function(){for(var a in ch)ch[a](0,1)}:!1,cg=0,ch;f.ajaxSettings.xhr=a.ActiveXObject?function(){return!this.isLocal&&ci()||cj()}:ci,function(a){f.extend(f.support,{ajax:!!a,cors:!!a&&"withCredentials"in a})}(f.ajaxSettings.xhr()),f.support.ajax&&f.ajaxTransport(function(c){if(!c.crossDomain||f.support.cors){var d;return{send:function(e,g){var h=c.xhr(),i,j;c.username?h.open(c.type,c.url,c.async,c.username,c.password):h.open(c.type,c.url,c.async);if(c.xhrFields)for(j in c.xhrFields)h[j]=c.xhrFields[j];c.mimeType&&h.overrideMimeType&&h.overrideMimeType(c.mimeType),!c.crossDomain&&!e["X-Requested-With"]&&(e["X-Requested-With"]="XMLHttpRequest");try{for(j in e)h.setRequestHeader(j,e[j])}catch(k){}h.send(c.hasContent&&c.data||null),d=function(a,e){var j,k,l,m,n;try{if(d&&(e||h.readyState===4)){d=b,i&&(h.onreadystatechange=f.noop,cf&&delete ch[i]);if(e)h.readyState!==4&&h.abort();else{j=h.status,l=h.getAllResponseHeaders(),m={},n=h.responseXML,n&&n.documentElement&&(m.xml=n),m.text=h.responseText;try{k=h.statusText}catch(o){k=""}!j&&c.isLocal&&!c.crossDomain?j=m.text?200:404:j===1223&&(j=204)}}}catch(p){e||g(-1,p)}m&&g(j,k,m,l)},!c.async||h.readyState===4?d():(i=++cg,cf&&(ch||(ch={},f(a).unload(cf)),ch[i]=d),h.onreadystatechange=d)},abort:function(){d&&d(0,1)}}}});var ck={},cl,cm,cn=/^(?:toggle|show|hide)$/,co=/^([+\-]=)?([\d+.\-]+)([a-z%]*)$/i,cp,cq=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]],cr;f.fn.extend({show:function(a,b,c){var d,e;if(a||a===0)return this.animate(cu("show",3),a,b,c);for(var g=0,h=this.length;g=i.duration+this.startTime){this.now=this.end,this.pos=this.state=1,this.update(),i.animatedProperties[this.prop]=!0;for(b in i.animatedProperties)i.animatedProperties[b]!==!0&&(g=!1);if(g){i.overflow!=null&&!f.support.shrinkWrapBlocks&&f.each(["","X","Y"],function(a,b){h.style["overflow"+b]=i.overflow[a]}),i.hide&&f(h).hide();if(i.hide||i.show)for(b in i.animatedProperties)f.style(h,b,i.orig[b]),f.removeData(h,"fxshow"+b,!0),f.removeData(h,"toggle"+b,!0);d=i.complete,d&&(i.complete=!1,d.call(h))}return!1}i.duration==Infinity?this.now=e:(c=e-this.startTime,this.state=c/i.duration,this.pos=f.easing[i.animatedProperties[this.prop]](this.state,c,0,1,i.duration),this.now=this.start+(this.end-this.start)*this.pos),this.update();return!0}},f.extend(f.fx,{tick:function(){var a,b=f.timers,c=0;for(;c-1,k={},l={},m,n;j?(l=e.position(),m=l.top,n=l.left):(m=parseFloat(h)||0,n=parseFloat(i)||0),f.isFunction(b)&&(b=b.call(a,c,g)),b.top!=null&&(k.top=b.top-g.top+m),b.left!=null&&(k.left=b.left-g.left+n),"using"in b?b.using.call(a,k):e.css(k)}},f.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),c=this.offset(),d=cx.test(b[0].nodeName)?{top:0,left:0}:b.offset();c.top-=parseFloat(f.css(a,"marginTop"))||0,c.left-=parseFloat(f.css(a,"marginLeft"))||0,d.top+=parseFloat(f.css(b[0],"borderTopWidth"))||0,d.left+=parseFloat(f.css(b[0],"borderLeftWidth"))||0;return{top:c.top-d.top,left:c.left-d.left}},offsetParent:function(){return this.map(function(){var a=this.offsetParent||c.body;while(a&&!cx.test(a.nodeName)&&f.css(a,"position")==="static")a=a.offsetParent;return a})}}),f.each(["Left","Top"],function(a,c){var d="scroll"+c;f.fn[d]=function(c){var e,g;if(c===b){e=this[0];if(!e)return null;g=cy(e);return g?"pageXOffset"in g?g[a?"pageYOffset":"pageXOffset"]:f.support.boxModel&&g.document.documentElement[d]||g.document.body[d]:e[d]}return this.each(function(){g=cy(this),g?g.scrollTo(a?f(g).scrollLeft():c,a?c:f(g).scrollTop()):this[d]=c})}}),f.each(["Height","Width"],function(a,c){var d=c.toLowerCase();f.fn["inner"+c]=function(){var a=this[0];return a?a.style?parseFloat(f.css(a,d,"padding")):this[d]():null},f.fn["outer"+c]=function(a){var b=this[0];return b?b.style?parseFloat(f.css(b,d,a?"margin":"border")):this[d]():null},f.fn[d]=function(a){var e=this[0];if(!e)return a==null?null:this;if(f.isFunction(a))return this.each(function(b){var c=f(this);c[d](a.call(this,b,c[d]()))});if(f.isWindow(e)){var g=e.document.documentElement["client"+c],h=e.document.body;return e.document.compatMode==="CSS1Compat"&&g||h&&h["client"+c]||g}if(e.nodeType===9)return Math.max(e.documentElement["client"+c],e.body["scroll"+c],e.documentElement["scroll"+c],e.body["offset"+c],e.documentElement["offset"+c]);if(a===b){var i=f.css(e,d),j=parseFloat(i);return f.isNumeric(j)?j:i}return this.css(d,typeof a=="string"?a:a+"px")}}),a.jQuery=a.$=f,typeof define=="function"&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return f})})(window); \ No newline at end of file diff --git a/akka-docs-dev/_sphinx/themes/akka/static/logo-small.png b/akka-docs-dev/_sphinx/themes/akka/static/logo-small.png deleted file mode 100644 index 2ad5096d3c..0000000000 Binary files a/akka-docs-dev/_sphinx/themes/akka/static/logo-small.png and /dev/null differ diff --git a/akka-docs-dev/_sphinx/themes/akka/static/pdf-icon.png b/akka-docs-dev/_sphinx/themes/akka/static/pdf-icon.png deleted file mode 100644 index 203ecad073..0000000000 Binary files a/akka-docs-dev/_sphinx/themes/akka/static/pdf-icon.png and /dev/null differ diff --git a/akka-docs-dev/_sphinx/themes/akka/static/pdf-java-icon.png b/akka-docs-dev/_sphinx/themes/akka/static/pdf-java-icon.png deleted file mode 100644 index b9923de062..0000000000 Binary files a/akka-docs-dev/_sphinx/themes/akka/static/pdf-java-icon.png and /dev/null differ diff --git a/akka-docs-dev/_sphinx/themes/akka/static/pdf-scala-icon.png b/akka-docs-dev/_sphinx/themes/akka/static/pdf-scala-icon.png deleted file mode 100644 index 1210d021ad..0000000000 Binary files a/akka-docs-dev/_sphinx/themes/akka/static/pdf-scala-icon.png and /dev/null differ diff --git a/akka-docs-dev/_sphinx/themes/akka/static/prettify.css b/akka-docs-dev/_sphinx/themes/akka/static/prettify.css deleted file mode 100644 index b47210fb35..0000000000 --- a/akka-docs-dev/_sphinx/themes/akka/static/prettify.css +++ /dev/null @@ -1,43 +0,0 @@ -.com { color: #93a1a1; } -.lit { color: #195f91; } -.pun, .opn, .clo { color: #595050; } -.fun { color: #dc322f; } -.str, .atv { color: #83b925; } -.kwd, .tag { color: #30a628; } -.typ, .atn, .dec, .var { color: #008fa9; } -.pln { color: #595050;/*color: #93a1a1;*/ } -pre.prettyprint { - background: #EFF2F5; - padding: 9px; - border: 1px solid rgba(0,0,0,.2); - -webkit-box-shadow: 0 1px 2px rgba(0,0,0,.1); - -moz-box-shadow: 0 1px 2px rgba(0,0,0,.1); - box-shadow: 0 1px 2px rgba(0,0,0,.1); -} - -/* Specify class=linenums on a pre to get line numbering */ -ol.linenums { margin: 0 0 0 40px; } /* IE indents via margin-left */ -ol.linenums li { color: rgba(0,0,0,.35); line-height: 20px; } -/* Alternate shading for lines */ -li.L1, li.L3, li.L5, li.L7, li.L9 { } - -/* -$base03: #002b36; -$base02: #073642; -$base01: #586e75; -$base00: #657b83; -$base0: #839496; -$base1: #93a1a1; -$base2: #eee8d5; -$base3: #fdf6e3; -$yellow: #b58900; -$orange: #cb4b16; -$red: #dc322f; -$magenta: #d33682; -$violet: #6c71c4; -$blue: #268bd2; -$cyan: #2aa198; -$green: #859900; -*/ - -/*.pln{color:#000}@media screen{.str{color:#080}.kwd{color:#008}.com{color:#800}.typ{color:#606}.lit{color:#066}.pun,.opn,.clo{color:#660}.tag{color:#008}.atn{color:#606}.atv{color:#080}.dec,.var{color:#606}.fun{color:red}}@media print,projection{.str{color:#060}.kwd{color:#006;font-weight:bold}.com{color:#600;font-style:italic}.typ{color:#404;font-weight:bold}.lit{color:#044}.pun,.opn,.clo{color:#440}.tag{color:#006;font-weight:bold}.atn{color:#404}.atv{color:#060}}pre.prettyprint{*//*padding:2px;border:1px solid #888*//*}ol.linenums{margin-top:0;margin-bottom:0}li.L0,li.L1,li.L2,li.L3,li.L5,li.L6,li.L7,li.L8{list-style-type:none}li.L1,li.L3,li.L5,li.L7,li.L9{background:#eee}*/ \ No newline at end of file diff --git a/akka-docs-dev/_sphinx/themes/akka/static/prettify.js b/akka-docs-dev/_sphinx/themes/akka/static/prettify.js deleted file mode 100644 index eef5ad7e6a..0000000000 --- a/akka-docs-dev/_sphinx/themes/akka/static/prettify.js +++ /dev/null @@ -1,28 +0,0 @@ -var q=null;window.PR_SHOULD_USE_CONTINUATION=!0; -(function(){function L(a){function m(a){var f=a.charCodeAt(0);if(f!==92)return f;var b=a.charAt(1);return(f=r[b])?f:"0"<=b&&b<="7"?parseInt(a.substring(1),8):b==="u"||b==="x"?parseInt(a.substring(2),16):a.charCodeAt(1)}function e(a){if(a<32)return(a<16?"\\x0":"\\x")+a.toString(16);a=String.fromCharCode(a);if(a==="\\"||a==="-"||a==="["||a==="]")a="\\"+a;return a}function h(a){for(var f=a.substring(1,a.length-1).match(/\\u[\dA-Fa-f]{4}|\\x[\dA-Fa-f]{2}|\\[0-3][0-7]{0,2}|\\[0-7]{1,2}|\\[\S\s]|[^\\]/g),a= -[],b=[],o=f[0]==="^",c=o?1:0,i=f.length;c122||(d<65||j>90||b.push([Math.max(65,j)|32,Math.min(d,90)|32]),d<97||j>122||b.push([Math.max(97,j)&-33,Math.min(d,122)&-33]))}}b.sort(function(a,f){return a[0]-f[0]||f[1]-a[1]});f=[];j=[NaN,NaN];for(c=0;ci[0]&&(i[1]+1>i[0]&&b.push("-"),b.push(e(i[1])));b.push("]");return b.join("")}function y(a){for(var f=a.source.match(/\[(?:[^\\\]]|\\[\S\s])*]|\\u[\dA-Fa-f]{4}|\\x[\dA-Fa-f]{2}|\\\d+|\\[^\dux]|\(\?[!:=]|[()^]|[^()[\\^]+/g),b=f.length,d=[],c=0,i=0;c=2&&a==="["?f[c]=h(j):a!=="\\"&&(f[c]=j.replace(/[A-Za-z]/g,function(a){a=a.charCodeAt(0);return"["+String.fromCharCode(a&-33,a|32)+"]"}));return f.join("")}for(var t=0,s=!1,l=!1,p=0,d=a.length;p=5&&"lang-"===b.substring(0,5))&&!(o&&typeof o[1]==="string"))c=!1,b="src";c||(r[f]=b)}i=d;d+=f.length;if(c){c=o[1];var j=f.indexOf(c),k=j+c.length;o[2]&&(k=f.length-o[2].length,j=k-c.length);b=b.substring(5);B(l+i,f.substring(0,j),e,p);B(l+i+j,c,C(b,c),p);B(l+i+k,f.substring(k),e,p)}else p.push(l+i,b)}a.e=p}var h={},y;(function(){for(var e=a.concat(m), -l=[],p={},d=0,g=e.length;d=0;)h[n.charAt(k)]=r;r=r[1];n=""+r;p.hasOwnProperty(n)||(l.push(r),p[n]=q)}l.push(/[\S\s]/);y=L(l)})();var t=m.length;return e}function u(a){var m=[],e=[];a.tripleQuotedStrings?m.push(["str",/^(?:'''(?:[^'\\]|\\[\S\s]|''?(?=[^']))*(?:'''|$)|"""(?:[^"\\]|\\[\S\s]|""?(?=[^"]))*(?:"""|$)|'(?:[^'\\]|\\[\S\s])*(?:'|$)|"(?:[^"\\]|\\[\S\s])*(?:"|$))/,q,"'\""]):a.multiLineStrings?m.push(["str",/^(?:'(?:[^'\\]|\\[\S\s])*(?:'|$)|"(?:[^"\\]|\\[\S\s])*(?:"|$)|`(?:[^\\`]|\\[\S\s])*(?:`|$))/, -q,"'\"`"]):m.push(["str",/^(?:'(?:[^\n\r'\\]|\\.)*(?:'|$)|"(?:[^\n\r"\\]|\\.)*(?:"|$))/,q,"\"'"]);a.verbatimStrings&&e.push(["str",/^@"(?:[^"]|"")*(?:"|$)/,q]);var h=a.hashComments;h&&(a.cStyleComments?(h>1?m.push(["com",/^#(?:##(?:[^#]|#(?!##))*(?:###|$)|.*)/,q,"#"]):m.push(["com",/^#(?:(?:define|elif|else|endif|error|ifdef|include|ifndef|line|pragma|undef|warning)\b|[^\n\r]*)/,q,"#"]),e.push(["str",/^<(?:(?:(?:\.\.\/)*|\/?)(?:[\w-]+(?:\/[\w-]+)+)?[\w-]+\.h|[a-z]\w*)>/,q])):m.push(["com",/^#[^\n\r]*/, -q,"#"]));a.cStyleComments&&(e.push(["com",/^\/\/[^\n\r]*/,q]),e.push(["com",/^\/\*[\S\s]*?(?:\*\/|$)/,q]));a.regexLiterals&&e.push(["lang-regex",/^(?:^^\.?|[!+-]|!=|!==|#|%|%=|&|&&|&&=|&=|\(|\*|\*=|\+=|,|-=|->|\/|\/=|:|::|;|<|<<|<<=|<=|=|==|===|>|>=|>>|>>=|>>>|>>>=|[?@[^]|\^=|\^\^|\^\^=|{|\||\|=|\|\||\|\|=|~|break|case|continue|delete|do|else|finally|instanceof|return|throw|try|typeof)\s*(\/(?=[^*/])(?:[^/[\\]|\\[\S\s]|\[(?:[^\\\]]|\\[\S\s])*(?:]|$))+\/)/]);(h=a.types)&&e.push(["typ",h]);a=(""+a.keywords).replace(/^ | $/g, -"");a.length&&e.push(["kwd",RegExp("^(?:"+a.replace(/[\s,]+/g,"|")+")\\b"),q]);m.push(["pln",/^\s+/,q," \r\n\t\xa0"]);e.push(["lit",/^@[$_a-z][\w$@]*/i,q],["typ",/^(?:[@_]?[A-Z]+[a-z][\w$@]*|\w+_t\b)/,q],["pln",/^[$_a-z][\w$@]*/i,q],["lit",/^(?:0x[\da-f]+|(?:\d(?:_\d+)*\d*(?:\.\d*)?|\.\d\+)(?:e[+-]?\d+)?)[a-z]*/i,q,"0123456789"],["pln",/^\\[\S\s]?/,q],["pun",/^.[^\s\w"-$'./@\\`]*/,q]);return x(m,e)}function D(a,m){function e(a){switch(a.nodeType){case 1:if(k.test(a.className))break;if("BR"===a.nodeName)h(a), -a.parentNode&&a.parentNode.removeChild(a);else for(a=a.firstChild;a;a=a.nextSibling)e(a);break;case 3:case 4:if(p){var b=a.nodeValue,d=b.match(t);if(d){var c=b.substring(0,d.index);a.nodeValue=c;(b=b.substring(d.index+d[0].length))&&a.parentNode.insertBefore(s.createTextNode(b),a.nextSibling);h(a);c||a.parentNode.removeChild(a)}}}}function h(a){function b(a,d){var e=d?a.cloneNode(!1):a,f=a.parentNode;if(f){var f=b(f,1),g=a.nextSibling;f.appendChild(e);for(var h=g;h;h=g)g=h.nextSibling,f.appendChild(h)}return e} -for(;!a.nextSibling;)if(a=a.parentNode,!a)return;for(var a=b(a.nextSibling,0),e;(e=a.parentNode)&&e.nodeType===1;)a=e;d.push(a)}var k=/(?:^|\s)nocode(?:\s|$)/,t=/\r\n?|\n/,s=a.ownerDocument,l;a.currentStyle?l=a.currentStyle.whiteSpace:window.getComputedStyle&&(l=s.defaultView.getComputedStyle(a,q).getPropertyValue("white-space"));var p=l&&"pre"===l.substring(0,3);for(l=s.createElement("LI");a.firstChild;)l.appendChild(a.firstChild);for(var d=[l],g=0;g=0;){var h=m[e];A.hasOwnProperty(h)?window.console&&console.warn("cannot override language handler %s",h):A[h]=a}}function C(a,m){if(!a||!A.hasOwnProperty(a))a=/^\s*=o&&(h+=2);e>=c&&(a+=2)}}catch(w){"console"in window&&console.log(w&&w.stack?w.stack:w)}}var v=["break,continue,do,else,for,if,return,while"],w=[[v,"auto,case,char,const,default,double,enum,extern,float,goto,int,long,register,short,signed,sizeof,static,struct,switch,typedef,union,unsigned,void,volatile"], -"catch,class,delete,false,import,new,operator,private,protected,public,this,throw,true,try,typeof"],F=[w,"alignof,align_union,asm,axiom,bool,concept,concept_map,const_cast,constexpr,decltype,dynamic_cast,explicit,export,friend,inline,late_check,mutable,namespace,nullptr,reinterpret_cast,static_assert,static_cast,template,typeid,typename,using,virtual,where"],G=[w,"abstract,boolean,byte,extends,final,finally,implements,import,instanceof,null,native,package,strictfp,super,synchronized,throws,transient"], -H=[G,"as,base,by,checked,decimal,delegate,descending,dynamic,event,fixed,foreach,from,group,implicit,in,interface,internal,into,is,lock,object,out,override,orderby,params,partial,readonly,ref,sbyte,sealed,stackalloc,string,select,uint,ulong,unchecked,unsafe,ushort,var"],w=[w,"debugger,eval,export,function,get,null,set,undefined,var,with,Infinity,NaN"],I=[v,"and,as,assert,class,def,del,elif,except,exec,finally,from,global,import,in,is,lambda,nonlocal,not,or,pass,print,raise,try,with,yield,False,True,None"], -J=[v,"alias,and,begin,case,class,def,defined,elsif,end,ensure,false,in,module,next,nil,not,or,redo,rescue,retry,self,super,then,true,undef,unless,until,when,yield,BEGIN,END"],v=[v,"case,done,elif,esac,eval,fi,function,in,local,set,then,until"],K=/^(DIR|FILE|vector|(de|priority_)?queue|list|stack|(const_)?iterator|(multi)?(set|map)|bitset|u?(int|float)\d*)/,N=/\S/,O=u({keywords:[F,H,w,"caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END"+ -I,J,v],hashComments:!0,cStyleComments:!0,multiLineStrings:!0,regexLiterals:!0}),A={};k(O,["default-code"]);k(x([],[["pln",/^[^]*(?:>|$)/],["com",/^<\!--[\S\s]*?(?:--\>|$)/],["lang-",/^<\?([\S\s]+?)(?:\?>|$)/],["lang-",/^<%([\S\s]+?)(?:%>|$)/],["pun",/^(?:<[%?]|[%?]>)/],["lang-",/^]*>([\S\s]+?)<\/xmp\b[^>]*>/i],["lang-js",/^]*>([\S\s]*?)(<\/script\b[^>]*>)/i],["lang-css",/^]*>([\S\s]*?)(<\/style\b[^>]*>)/i],["lang-in.tag",/^(<\/?[a-z][^<>]*>)/i]]), -["default-markup","htm","html","mxml","xhtml","xml","xsl"]);k(x([["pln",/^\s+/,q," \t\r\n"],["atv",/^(?:"[^"]*"?|'[^']*'?)/,q,"\"'"]],[["tag",/^^<\/?[a-z](?:[\w-.:]*\w)?|\/?>$/i],["atn",/^(?!style[\s=]|on)[a-z](?:[\w:-]*\w)?/i],["lang-uq.val",/^=\s*([^\s"'>]*(?:[^\s"'/>]|\/(?=\s)))/],["pun",/^[/<->]+/],["lang-js",/^on\w+\s*=\s*"([^"]+)"/i],["lang-js",/^on\w+\s*=\s*'([^']+)'/i],["lang-js",/^on\w+\s*=\s*([^\s"'>]+)/i],["lang-css",/^style\s*=\s*"([^"]+)"/i],["lang-css",/^style\s*=\s*'([^']+)'/i],["lang-css", -/^style\s*=\s*([^\s"'>]+)/i]]),["in.tag"]);k(x([],[["atv",/^[\S\s]+/]]),["uq.val"]);k(u({keywords:F,hashComments:!0,cStyleComments:!0,types:K}),["c","cc","cpp","cxx","cyc","m"]);k(u({keywords:"null,true,false"}),["json"]);k(u({keywords:H,hashComments:!0,cStyleComments:!0,verbatimStrings:!0,types:K}),["cs"]);k(u({keywords:G,cStyleComments:!0}),["java"]);k(u({keywords:v,hashComments:!0,multiLineStrings:!0}),["bsh","csh","sh"]);k(u({keywords:I,hashComments:!0,multiLineStrings:!0,tripleQuotedStrings:!0}), -["cv","py"]);k(u({keywords:"caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END",hashComments:!0,multiLineStrings:!0,regexLiterals:!0}),["perl","pl","pm"]);k(u({keywords:J,hashComments:!0,multiLineStrings:!0,regexLiterals:!0}),["rb"]);k(u({keywords:w,cStyleComments:!0,regexLiterals:!0}),["js"]);k(u({keywords:"all,and,by,catch,class,else,extends,false,finally,for,if,in,is,isnt,loop,new,no,not,null,of,off,on,or,return,super,then,true,try,unless,until,when,while,yes", -hashComments:3,cStyleComments:!0,multilineStrings:!0,tripleQuotedStrings:!0,regexLiterals:!0}),["coffee"]);k(x([],[["str",/^[\S\s]+/]]),["regex"]);window.prettyPrintOne=function(a,m,e){var h=document.createElement("PRE");h.innerHTML=a;e&&D(h,e);E({g:m,i:e,h:h});return h.innerHTML};window.prettyPrint=function(a){function m(){for(var e=window.PR_SHOULD_USE_CONTINUATION?l.now()+250:Infinity;p=0){var k=k.match(g),f,b;if(b= -!k){b=n;for(var o=void 0,c=b.firstChild;c;c=c.nextSibling)var i=c.nodeType,o=i===1?o?b:c:i===3?N.test(c.nodeValue)?b:o:o;b=(f=o===b?void 0:o)&&"CODE"===f.tagName}b&&(k=f.className.match(g));k&&(k=k[1]);b=!1;for(o=n.parentNode;o;o=o.parentNode)if((o.tagName==="pre"||o.tagName==="code"||o.tagName==="xmp")&&o.className&&o.className.indexOf("prettyprint")>=0){b=!0;break}b||((b=(b=n.className.match(/\blinenums\b(?::(\d+))?/))?b[1]&&b[1].length?+b[1]:!0:!1)&&D(n,b),d={g:k,h:n,i:b},E(d))}}p[class*="span"]{float:left;margin-left:2.127659574%;} -.row-fluid>[class*="span"]:first-child{margin-left:0;} -.row-fluid .span1{width:6.382978723%;} -.row-fluid .span2{width:14.89361702%;} -.row-fluid .span3{width:23.404255317%;} -.row-fluid .span4{width:31.914893614%;} -.row-fluid .span5{width:40.425531911%;} -.row-fluid .span6{width:48.93617020799999%;} -.row-fluid .span7{width:57.446808505%;} -.row-fluid .span8{width:65.95744680199999%;} -.row-fluid .span9{width:74.468085099%;} -.row-fluid .span10{width:82.97872339599999%;} -.row-fluid .span11{width:91.489361693%;} -.row-fluid .span12{width:99.99999998999999%;} -.container{width:940px;margin-left:auto;margin-right:auto;*zoom:1;}.container:before,.container:after{display:table;content:"";} -.container:after{clear:both;} -.container-fluid{padding-left:20px;padding-right:20px;*zoom:1;}.container-fluid:before,.container-fluid:after{display:table;content:"";} -.container-fluid:after{clear:both;} -p{margin:0 0 9px;font-family:"Source Sans Pro", "Helvetica Neue",sans-serif;font-size:13px;line-height:18px;}p small{font-size:11px;color:#999999;} -.lead{margin-bottom:18px;font-size:20px;font-weight:200;line-height:27px;} -h1,h2,h3,h4,h5,h6{margin:0;font-weight:bold;color:#333333;text-rendering:optimizelegibility;}h1 small,h2 small,h3 small,h4 small,h5 small,h6 small{font-weight:normal;color:#999999;} -h1{font-size:30px;line-height:36px;}h1 small{font-size:18px;} -h2{font-size:24px;line-height:36px;}h2 small{font-size:18px;} -h3{line-height:27px;font-size:18px;}h3 small{font-size:14px;} -h4,h5,h6{line-height:18px;} -h4{font-size:14px;}h4 small{font-size:12px;} -h5{font-size:12px;} -h6{font-size:11px;color:#999999;text-transform:uppercase;} -.page-header{padding-bottom:17px;margin:18px 0;border-bottom:1px solid #eeeeee;} -.page-header h1{line-height:1;} -ul,ol{padding:0;margin:0 0 9px 25px;} -ul ul,ul ol,ol ol,ol ul{margin-bottom:0;} -ul{list-style:disc;} -ol{list-style:decimal;} -li{line-height:18px;} -ul.unstyled{margin-left:0;list-style:none;} -dl{margin-bottom:18px;} -dt,dd{line-height:18px;} -dt{font-weight:bold;} -dd{margin-left:9px;} -hr{margin:18px 0;border:0;border-top:1px solid #e5e5e5;border-bottom:1px solid #ffffff;} -strong{font-weight:bold;} -em{font-style:italic;} -.muted{color:#999999;} -abbr{font-size:90%;text-transform:uppercase;border-bottom:1px dotted #ddd;cursor:help;} -blockquote{padding:0 0 0 15px;margin:0 0 18px;border-left:5px solid #eeeeee;}blockquote p{margin-bottom:0;font-size:16px;font-weight:300;line-height:22.5px;} -blockquote small{display:block;line-height:18px;color:#999999;}blockquote small:before{content:'\2014 \00A0';} -blockquote.pull-right{float:right;padding-left:0;padding-right:15px;border-left:0;border-right:5px solid #eeeeee;}blockquote.pull-right p,blockquote.pull-right small{text-align:right;} -q:before,q:after,blockquote:before,blockquote:after{content:"";} -address{display:block;margin-bottom:18px;line-height:18px;font-style:normal;} -small{font-size:100%;} -cite{font-style:normal;} -code,pre{padding:0 3px 2px;font-family:Menlo,Monaco,"Courier New",monospace;font-size:12px;color:#333333;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px;} -code{padding:1px 4px;color:#d14;background-color:#f7f7f9;border:1px solid #e1e1e8;} -pre{display:block;padding:8.5px;margin:0 0 9px;font-size:12px;line-height:18px;background-color:#f5f5f5;border:1px solid #ccc;border:1px solid rgba(0, 0, 0, 0.15);-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;/*white-space:pre;white-space:pre-wrap*/;word-break:break-all;}pre.prettyprint{margin-bottom:18px;} -pre code{padding:0;background-color:transparent;} -form{margin:0 0 18px;} -fieldset{padding:0;margin:0;border:0;} -legend{display:block;width:100%;padding:0;margin-bottom:27px;font-size:19.5px;line-height:36px;color:#333333;border:0;border-bottom:1px solid #eee;} -label,input,button,select,textarea{font-family:"Source Sans Pro", "Helvetica Neue", sans-serif;font-size:13px;font-weight:normal;line-height:18px;} -label{display:block;margin-bottom:5px;color:#333333;} -input,textarea,select,.uneditable-input{display:inline-block;width:210px;height:18px;padding:4px;margin-bottom:9px;font-size:13px;line-height:18px;color:#555555;border:1px solid #ccc;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px;} -.uneditable-textarea{width:auto;height:auto;} -label input,label textarea,label select{display:block;} -input[type="image"],input[type="checkbox"],input[type="radio"]{width:auto;height:auto;padding:0;margin:3px 0;*margin-top:0;line-height:normal;border:0;cursor:pointer;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;} -input[type="file"]{padding:initial;line-height:initial;border:initial;background-color:#ffffff;background-color:initial;-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none;} -input[type="button"],input[type="reset"],input[type="submit"]{width:auto;height:auto;} -select,input[type="file"]{height:28px;*margin-top:4px;line-height:28px;} -select{width:220px;background-color:#ffffff;} -select[multiple],select[size]{height:auto;} -input[type="image"]{-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none;} -textarea{height:auto;} -input[type="hidden"]{display:none;} -.radio,.checkbox{padding-left:18px;} -.radio input[type="radio"],.checkbox input[type="checkbox"]{float:left;margin-left:-18px;} -.controls>.radio:first-child,.controls>.checkbox:first-child{padding-top:5px;} -.radio.inline,.checkbox.inline{display:inline-block;margin-bottom:0;vertical-align:middle;} -.radio.inline+.radio.inline,.checkbox.inline+.checkbox.inline{margin-left:10px;} -.controls>.radio.inline:first-child,.controls>.checkbox.inline:first-child{padding-top:0;} -input,textarea{-webkit-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075);-moz-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075);box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075);-webkit-transition:border linear 0.2s,box-shadow linear 0.2s;-moz-transition:border linear 0.2s,box-shadow linear 0.2s;-ms-transition:border linear 0.2s,box-shadow linear 0.2s;-o-transition:border linear 0.2s,box-shadow linear 0.2s;transition:border linear 0.2s,box-shadow linear 0.2s;} -input:focus,textarea:focus{border-color:rgba(82, 168, 236, 0.8);-webkit-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075),0 0 8px rgba(82, 168, 236, 0.6);-moz-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075),0 0 8px rgba(82, 168, 236, 0.6);box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075),0 0 8px rgba(82, 168, 236, 0.6);outline:0;outline:thin dotted \9;} -input[type="file"]:focus,input[type="checkbox"]:focus,select:focus{-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none;outline:thin dotted;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px;} -.input-mini{width:60px;} -.input-small{width:90px;} -.input-medium{width:150px;} -.input-large{width:210px;} -.input-xlarge{width:270px;} -.input-xxlarge{width:530px;} -input[class*="span"],select[class*="span"],textarea[class*="span"],.uneditable-input{float:none;margin-left:0;} -input.span1,textarea.span1,.uneditable-input.span1{width:50px;} -input.span2,textarea.span2,.uneditable-input.span2{width:130px;} -input.span3,textarea.span3,.uneditable-input.span3{width:210px;} -input.span4,textarea.span4,.uneditable-input.span4{width:290px;} -input.span5,textarea.span5,.uneditable-input.span5{width:370px;} -input.span6,textarea.span6,.uneditable-input.span6{width:450px;} -input.span7,textarea.span7,.uneditable-input.span7{width:530px;} -input.span8,textarea.span8,.uneditable-input.span8{width:610px;} -input.span9,textarea.span9,.uneditable-input.span9{width:690px;} -input.span10,textarea.span10,.uneditable-input.span10{width:770px;} -input.span11,textarea.span11,.uneditable-input.span11{width:850px;} -input.span12,textarea.span12,.uneditable-input.span12{width:930px;} -input[disabled],select[disabled],textarea[disabled],input[readonly],select[readonly],textarea[readonly]{background-color:#f5f5f5;border-color:#ddd;cursor:not-allowed;} -.control-group.warning>label,.control-group.warning .help-block,.control-group.warning .help-inline{color:#c09853;} -.control-group.warning input,.control-group.warning select,.control-group.warning textarea{color:#c09853;border-color:#c09853;}.control-group.warning input:focus,.control-group.warning select:focus,.control-group.warning textarea:focus{border-color:#a47e3c;-webkit-box-shadow:0 0 6px #dbc59e;-moz-box-shadow:0 0 6px #dbc59e;box-shadow:0 0 6px #dbc59e;} -.control-group.warning .input-prepend .add-on,.control-group.warning .input-append .add-on{color:#c09853;background-color:#fcf8e3;border-color:#c09853;} -.control-group.error>label,.control-group.error .help-block,.control-group.error .help-inline{color:#b94a48;} -.control-group.error input,.control-group.error select,.control-group.error textarea{color:#b94a48;border-color:#b94a48;}.control-group.error input:focus,.control-group.error select:focus,.control-group.error textarea:focus{border-color:#953b39;-webkit-box-shadow:0 0 6px #d59392;-moz-box-shadow:0 0 6px #d59392;box-shadow:0 0 6px #d59392;} -.control-group.error .input-prepend .add-on,.control-group.error .input-append .add-on{color:#b94a48;background-color:#f2dede;border-color:#b94a48;} -.control-group.success>label,.control-group.success .help-block,.control-group.success .help-inline{color:#468847;} -.control-group.success input,.control-group.success select,.control-group.success textarea{color:#468847;border-color:#468847;}.control-group.success input:focus,.control-group.success select:focus,.control-group.success textarea:focus{border-color:#356635;-webkit-box-shadow:0 0 6px #7aba7b;-moz-box-shadow:0 0 6px #7aba7b;box-shadow:0 0 6px #7aba7b;} -.control-group.success .input-prepend .add-on,.control-group.success .input-append .add-on{color:#468847;background-color:#dff0d8;border-color:#468847;} -input:focus:required:invalid,textarea:focus:required:invalid,select:focus:required:invalid{color:#b94a48;border-color:#ee5f5b;}input:focus:required:invalid:focus,textarea:focus:required:invalid:focus,select:focus:required:invalid:focus{border-color:#e9322d;-webkit-box-shadow:0 0 6px #f8b9b7;-moz-box-shadow:0 0 6px #f8b9b7;box-shadow:0 0 6px #f8b9b7;} -.form-actions{padding:17px 20px 18px;margin-top:18px;margin-bottom:18px;background-color:#f5f5f5;border-top:1px solid #ddd;} -.uneditable-input{display:block;background-color:#ffffff;border-color:#eee;-webkit-box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.025);-moz-box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.025);box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.025);cursor:not-allowed;} -:-moz-placeholder{color:#999999;} -::-webkit-input-placeholder{color:#999999;} -.help-block{margin-top:5px;margin-bottom:0;color:#999999;} -.help-inline{display:inline-block;*display:inline;*zoom:1;margin-bottom:9px;vertical-align:middle;padding-left:5px;} -.input-prepend,.input-append{margin-bottom:5px;*zoom:1;}.input-prepend:before,.input-append:before,.input-prepend:after,.input-append:after{display:table;content:"";} -.input-prepend:after,.input-append:after{clear:both;} -.input-prepend input,.input-append input,.input-prepend .uneditable-input,.input-append .uneditable-input{-webkit-border-radius:0 3px 3px 0;-moz-border-radius:0 3px 3px 0;border-radius:0 3px 3px 0;}.input-prepend input:focus,.input-append input:focus,.input-prepend .uneditable-input:focus,.input-append .uneditable-input:focus{position:relative;z-index:2;} -.input-prepend .uneditable-input,.input-append .uneditable-input{border-left-color:#ccc;} -.input-prepend .add-on,.input-append .add-on{float:left;display:block;width:auto;min-width:16px;height:18px;margin-right:-1px;padding:4px 5px;font-weight:normal;line-height:18px;color:#999999;text-align:center;background-color:#f5f5f5;border:1px solid #ccc;-webkit-border-radius:3px 0 0 3px;-moz-border-radius:3px 0 0 3px;border-radius:3px 0 0 3px;} -.input-prepend .active,.input-append .active{background-color:#a9dba9;border-color:#46a546;} -.input-prepend .add-on{*margin-top:1px;} -.input-append input,.input-append .uneditable-input{float:left;-webkit-border-radius:3px 0 0 3px;-moz-border-radius:3px 0 0 3px;border-radius:3px 0 0 3px;} -.input-append .uneditable-input{border-right-color:#ccc;} -.input-append .add-on{margin-right:0;margin-left:-1px;-webkit-border-radius:0 3px 3px 0;-moz-border-radius:0 3px 3px 0;border-radius:0 3px 3px 0;} -.input-append input:first-child{*margin-left:-160px;}.input-append input:first-child+.add-on{*margin-left:-21px;} -.search-query{padding-left:14px;padding-right:14px;margin-bottom:0;-webkit-border-radius:14px;-moz-border-radius:14px;border-radius:14px;} -.form-search input,.form-inline input,.form-horizontal input,.form-search textarea,.form-inline textarea,.form-horizontal textarea,.form-search select,.form-inline select,.form-horizontal select,.form-search .help-inline,.form-inline .help-inline,.form-horizontal .help-inline,.form-search .uneditable-input,.form-inline .uneditable-input,.form-horizontal .uneditable-input{display:inline-block;margin-bottom:0;} -.form-search label,.form-inline label,.form-search .input-append,.form-inline .input-append,.form-search .input-prepend,.form-inline .input-prepend{display:inline-block;} -.form-search .input-append .add-on,.form-inline .input-prepend .add-on,.form-search .input-append .add-on,.form-inline .input-prepend .add-on{vertical-align:middle;} -.control-group{margin-bottom:9px;} -.form-horizontal legend+.control-group{margin-top:18px;-webkit-margin-top-collapse:separate;} -.form-horizontal .control-group{margin-bottom:18px;*zoom:1;}.form-horizontal .control-group:before,.form-horizontal .control-group:after{display:table;content:"";} -.form-horizontal .control-group:after{clear:both;} -.form-horizontal .control-group>label{float:left;width:140px;padding-top:5px;text-align:right;} -.form-horizontal .controls{margin-left:160px;} -.form-horizontal .form-actions{padding-left:160px;} -table{max-width:100%;border-collapse:collapse;border-spacing:0;} -.table{width:100%;margin-bottom:18px;}.table th,.table td{padding:8px;line-height:18px;text-align:left;border-top:1px solid #ddd;} -.table th{font-weight:bold;vertical-align:bottom;} -.table td{vertical-align:top;} -.table thead:first-child tr th,.table thead:first-child tr td{border-top:0;} -.table tbody+tbody{border-top:2px solid #ddd;} -.table-condensed th,.table-condensed td{padding:4px 5px;} -.table-bordered{border:1px solid #ddd;border-collapse:separate;*border-collapse:collapsed;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;}.table-bordered th+th,.table-bordered td+td,.table-bordered th+td,.table-bordered td+th{border-left:1px solid #ddd;} -.table-bordered thead:first-child tr:first-child th,.table-bordered tbody:first-child tr:first-child th,.table-bordered tbody:first-child tr:first-child td{border-top:0;} -.table-bordered thead:first-child tr:first-child th:first-child,.table-bordered tbody:first-child tr:first-child td:first-child{-webkit-border-radius:4px 0 0 0;-moz-border-radius:4px 0 0 0;border-radius:4px 0 0 0;} -.table-bordered thead:first-child tr:first-child th:last-child,.table-bordered tbody:first-child tr:first-child td:last-child{-webkit-border-radius:0 4px 0 0;-moz-border-radius:0 4px 0 0;border-radius:0 4px 0 0;} -.table-bordered thead:last-child tr:last-child th:first-child,.table-bordered tbody:last-child tr:last-child td:first-child{-webkit-border-radius:0 0 0 4px;-moz-border-radius:0 0 0 4px;border-radius:0 0 0 4px;} -.table-bordered thead:last-child tr:last-child th:last-child,.table-bordered tbody:last-child tr:last-child td:last-child{-webkit-border-radius:0 0 4px 0;-moz-border-radius:0 0 4px 0;border-radius:0 0 4px 0;} -.table-striped tbody tr:nth-child(odd) td,.table-striped tbody tr:nth-child(odd) th{background-color:#f9f9f9;} -table .span1{float:none;width:44px;margin-left:0;} -table .span2{float:none;width:124px;margin-left:0;} -table .span3{float:none;width:204px;margin-left:0;} -table .span4{float:none;width:284px;margin-left:0;} -table .span5{float:none;width:364px;margin-left:0;} -table .span6{float:none;width:444px;margin-left:0;} -table .span7{float:none;width:524px;margin-left:0;} -table .span8{float:none;width:604px;margin-left:0;} -table .span9{float:none;width:684px;margin-left:0;} -table .span10{float:none;width:764px;margin-left:0;} -table .span11{float:none;width:844px;margin-left:0;} -table .span12{float:none;width:924px;margin-left:0;} -[class^="icon-"]{display:inline-block;width:14px;height:14px;vertical-align:text-top;background-image:url(../img/glyphicons-halflings.png);background-position:14px 14px;background-repeat:no-repeat;*margin-right:.3em;}[class^="icon-"]:last-child{*margin-left:0;} -.icon-white{background-image:url(../img/glyphicons-halflings-white.png);} -.icon-glass{background-position:0 0;} -.icon-music{background-position:-24px 0;} -.icon-search{background-position:-48px 0;} -.icon-envelope{background-position:-72px 0;} -.icon-heart{background-position:-96px 0;} -.icon-star{background-position:-120px 0;} -.icon-star-empty{background-position:-144px 0;} -.icon-user{background-position:-168px 0;} -.icon-film{background-position:-192px 0;} -.icon-th-large{background-position:-216px 0;} -.icon-th{background-position:-240px 0;} -.icon-th-list{background-position:-264px 0;} -.icon-ok{background-position:-288px 0;} -.icon-remove{background-position:-312px 0;} -.icon-zoom-in{background-position:-336px 0;} -.icon-zoom-out{background-position:-360px 0;} -.icon-off{background-position:-384px 0;} -.icon-signal{background-position:-408px 0;} -.icon-cog{background-position:-432px 0;} -.icon-trash{background-position:-456px 0;} -.icon-home{background-position:0 -24px;} -.icon-file{background-position:-24px -24px;} -.icon-time{background-position:-48px -24px;} -.icon-road{background-position:-72px -24px;} -.icon-download-alt{background-position:-96px -24px;} -.icon-download{background-position:-120px -24px;} -.icon-upload{background-position:-144px -24px;} -.icon-inbox{background-position:-168px -24px;} -.icon-play-circle{background-position:-192px -24px;} -.icon-repeat{background-position:-216px -24px;} -.icon-refresh{background-position:-240px -24px;} -.icon-list-alt{background-position:-264px -24px;} -.icon-lock{background-position:-287px -24px;} -.icon-flag{background-position:-312px -24px;} -.icon-headphones{background-position:-336px -24px;} -.icon-volume-off{background-position:-360px -24px;} -.icon-volume-down{background-position:-384px -24px;} -.icon-volume-up{background-position:-408px -24px;} -.icon-qrcode{background-position:-432px -24px;} -.icon-barcode{background-position:-456px -24px;} -.icon-tag{background-position:0 -48px;} -.icon-tags{background-position:-25px -48px;} -.icon-book{background-position:-48px -48px;} -.icon-bookmark{background-position:-72px -48px;} -.icon-print{background-position:-96px -48px;} -.icon-camera{background-position:-120px -48px;} -.icon-font{background-position:-144px -48px;} -.icon-bold{background-position:-167px -48px;} -.icon-italic{background-position:-192px -48px;} -.icon-text-height{background-position:-216px -48px;} -.icon-text-width{background-position:-240px -48px;} -.icon-align-left{background-position:-264px -48px;} -.icon-align-center{background-position:-288px -48px;} -.icon-align-right{background-position:-312px -48px;} -.icon-align-justify{background-position:-336px -48px;} -.icon-list{background-position:-360px -48px;} -.icon-indent-left{background-position:-384px -48px;} -.icon-indent-right{background-position:-408px -48px;} -.icon-facetime-video{background-position:-432px -48px;} -.icon-picture{background-position:-456px -48px;} -.icon-pencil{background-position:0 -72px;} -.icon-map-marker{background-position:-24px -72px;} -.icon-adjust{background-position:-48px -72px;} -.icon-tint{background-position:-72px -72px;} -.icon-edit{background-position:-96px -72px;} -.icon-share{background-position:-120px -72px;} -.icon-check{background-position:-144px -72px;} -.icon-move{background-position:-168px -72px;} -.icon-step-backward{background-position:-192px -72px;} -.icon-fast-backward{background-position:-216px -72px;} -.icon-backward{background-position:-240px -72px;} -.icon-play{background-position:-264px -72px;} -.icon-pause{background-position:-288px -72px;} -.icon-stop{background-position:-312px -72px;} -.icon-forward{background-position:-336px -72px;} -.icon-fast-forward{background-position:-360px -72px;} -.icon-step-forward{background-position:-384px -72px;} -.icon-eject{background-position:-408px -72px;} -.icon-chevron-left{background-position:-432px -72px;} -.icon-chevron-right{background-position:-456px -72px;} -.icon-plus-sign{background-position:0 -96px;} -.icon-minus-sign{background-position:-24px -96px;} -.icon-remove-sign{background-position:-48px -96px;} -.icon-ok-sign{background-position:-72px -96px;} -.icon-question-sign{background-position:-96px -96px;} -.icon-info-sign{background-position:-120px -96px;} -.icon-screenshot{background-position:-144px -96px;} -.icon-remove-circle{background-position:-168px -96px;} -.icon-ok-circle{background-position:-192px -96px;} -.icon-ban-circle{background-position:-216px -96px;} -.icon-arrow-left{background-position:-240px -96px;} -.icon-arrow-right{background-position:-264px -96px;} -.icon-arrow-up{background-position:-289px -96px;} -.icon-arrow-down{background-position:-312px -96px;} -.icon-share-alt{background-position:-336px -96px;} -.icon-resize-full{background-position:-360px -96px;} -.icon-resize-small{background-position:-384px -96px;} -.icon-plus{background-position:-408px -96px;} -.icon-minus{background-position:-433px -96px;} -.icon-asterisk{background-position:-456px -96px;} -.icon-exclamation-sign{background-position:0 -120px;} -.icon-gift{background-position:-24px -120px;} -.icon-leaf{background-position:-48px -120px;} -.icon-fire{background-position:-72px -120px;} -.icon-eye-open{background-position:-96px -120px;} -.icon-eye-close{background-position:-120px -120px;} -.icon-warning-sign{background-position:-144px -120px;} -.icon-plane{background-position:-168px -120px;} -.icon-calendar{background-position:-192px -120px;} -.icon-random{background-position:-216px -120px;} -.icon-comment{background-position:-240px -120px;} -.icon-magnet{background-position:-264px -120px;} -.icon-chevron-up{background-position:-288px -120px;} -.icon-chevron-down{background-position:-313px -119px;} -.icon-retweet{background-position:-336px -120px;} -.icon-shopping-cart{background-position:-360px -120px;} -.icon-folder-close{background-position:-384px -120px;} -.icon-folder-open{background-position:-408px -120px;} -.icon-resize-vertical{background-position:-432px -119px;} -.icon-resize-horizontal{background-position:-456px -118px;} -.dropdown{position:relative;} -.dropdown-toggle{*margin-bottom:-3px;} -.dropdown-toggle:active,.open .dropdown-toggle{outline:0;} -.caret{display:inline-block;width:0;height:0;text-indent:-99999px;*text-indent:0;vertical-align:top;border-left:4px solid transparent;border-right:4px solid transparent;border-top:4px solid #000000;opacity:0.3;filter:alpha(opacity=30);content:"\2193";} -.dropdown .caret{margin-top:8px;margin-left:2px;} -.dropdown:hover .caret,.open.dropdown .caret{opacity:1;filter:alpha(opacity=100);} -.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;float:left;display:none;min-width:160px;max-width:220px;_width:160px;padding:4px 0;margin:0;list-style:none;background-color:#ffffff;border-color:#ccc;border-color:rgba(0, 0, 0, 0.2);border-style:solid;border-width:1px;-webkit-border-radius:0 0 5px 5px;-moz-border-radius:0 0 5px 5px;border-radius:0 0 5px 5px;-webkit-box-shadow:0 5px 10px rgba(0, 0, 0, 0.2);-moz-box-shadow:0 5px 10px rgba(0, 0, 0, 0.2);box-shadow:0 5px 10px rgba(0, 0, 0, 0.2);-webkit-background-clip:padding-box;-moz-background-clip:padding;background-clip:padding-box;*border-right-width:2px;*border-bottom-width:2px;}.dropdown-menu.bottom-up{top:auto;bottom:100%;margin-bottom:2px;} -.dropdown-menu .divider{height:1px;margin:5px 1px;overflow:hidden;background-color:#e5e5e5;border-bottom:1px solid #ffffff;*width:100%;*margin:-5px 0 5px;} -.dropdown-menu a{display:block;padding:3px 15px;clear:both;font-weight:normal;line-height:18px;color:#555555;white-space:nowrap;} -.dropdown-menu li>a:hover,.dropdown-menu .active>a,.dropdown-menu .active>a:hover{color:#ffffff;text-decoration:none;background-color:#0088cc;} -.dropdown.open{*z-index:1000;}.dropdown.open .dropdown-toggle{color:#ffffff;background:#ccc;background:rgba(0, 0, 0, 0.3);} -.dropdown.open .dropdown-menu{display:block;} -.typeahead{margin-top:2px;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;} -.well{min-height:20px;padding:19px;margin-bottom:20px;background-color:#f5f5f5;border:1px solid #eee;border:1px solid rgba(0, 0, 0, 0.05);-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.05);-moz-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.05);box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.05);}.well blockquote{border-color:#ddd;border-color:rgba(0, 0, 0, 0.15);} -.fade{-webkit-transition:opacity 0.15s linear;-moz-transition:opacity 0.15s linear;-ms-transition:opacity 0.15s linear;-o-transition:opacity 0.15s linear;transition:opacity 0.15s linear;opacity:0;}.fade.in{opacity:1;} -.collapse{-webkit-transition:height 0.35s ease;-moz-transition:height 0.35s ease;-ms-transition:height 0.35s ease;-o-transition:height 0.35s ease;transition:height 0.35s ease;position:relative;overflow:hidden;height:0;}.collapse.in{height:auto;} -.close{float:right;font-size:20px;font-weight:bold;line-height:18px;color:#000000;text-shadow:0 1px 0 #ffffff;opacity:0.2;filter:alpha(opacity=20);}.close:hover{color:#000000;text-decoration:none;opacity:0.4;filter:alpha(opacity=40);cursor:pointer;} -.btn{display:inline-block;padding:4px 10px 4px;font-size:13px;line-height:18px;color:#333333;text-align:center;text-shadow:0 1px 1px rgba(255, 255, 255, 0.75);background-color:#fafafa;background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#ffffff), color-stop(25%, #ffffff), to(#e6e6e6));background-image:-webkit-linear-gradient(#ffffff, #ffffff 25%, #e6e6e6);background-image:-moz-linear-gradient(top, #ffffff, #ffffff 25%, #e6e6e6);background-image:-ms-linear-gradient(#ffffff, #ffffff 25%, #e6e6e6);background-image:-o-linear-gradient(#ffffff, #ffffff 25%, #e6e6e6);background-image:linear-gradient(#ffffff, #ffffff 25%, #e6e6e6);background-repeat:no-repeat;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffff', endColorstr='#e6e6e6', GradientType=0);border:1px solid #ccc;border-bottom-color:#bbb;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.2),0 1px 2px rgba(0, 0, 0, 0.05);-moz-box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.2),0 1px 2px rgba(0, 0, 0, 0.05);box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.2),0 1px 2px rgba(0, 0, 0, 0.05);cursor:pointer;*margin-left:.3em;}.btn:first-child{*margin-left:0;} -.btn:hover{color:#333333;text-decoration:none;background-color:#e6e6e6;background-position:0 -15px;-webkit-transition:background-position 0.1s linear;-moz-transition:background-position 0.1s linear;-ms-transition:background-position 0.1s linear;-o-transition:background-position 0.1s linear;transition:background-position 0.1s linear;} -.btn:focus{outline:thin dotted;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px;} -.btn.active,.btn:active{background-image:none;-webkit-box-shadow:inset 0 2px 4px rgba(0, 0, 0, 0.15),0 1px 2px rgba(0, 0, 0, 0.05);-moz-box-shadow:inset 0 2px 4px rgba(0, 0, 0, 0.15),0 1px 2px rgba(0, 0, 0, 0.05);box-shadow:inset 0 2px 4px rgba(0, 0, 0, 0.15),0 1px 2px rgba(0, 0, 0, 0.05);background-color:#e6e6e6;background-color:#d9d9d9 \9;color:rgba(0, 0, 0, 0.5);outline:0;} -.btn.disabled,.btn[disabled]{cursor:default;background-image:none;background-color:#e6e6e6;opacity:0.65;filter:alpha(opacity=65);-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none;} -.btn-large{padding:9px 14px;font-size:15px;line-height:normal;-webkit-border-radius:5px;-moz-border-radius:5px;border-radius:5px;} -.btn-large .icon{margin-top:1px;} -.btn-small{padding:5px 9px;font-size:11px;line-height:16px;} -.btn-small .icon{margin-top:-1px;} -.btn-primary,.btn-primary:hover,.btn-warning,.btn-warning:hover,.btn-danger,.btn-danger:hover,.btn-success,.btn-success:hover,.btn-info,.btn-info:hover{text-shadow:0 -1px 0 rgba(0, 0, 0, 0.25);color:#ffffff;} -.btn-primary.active,.btn-warning.active,.btn-danger.active,.btn-success.active,.btn-info.active{color:rgba(255, 255, 255, 0.75);} -.btn-primary{background-color:#006dcc;background-image:-moz-linear-gradient(top, #0088cc, #0044cc);background-image:-ms-linear-gradient(top, #0088cc, #0044cc);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#0088cc), to(#0044cc));background-image:-webkit-linear-gradient(top, #0088cc, #0044cc);background-image:-o-linear-gradient(top, #0088cc, #0044cc);background-image:linear-gradient(top, #0088cc, #0044cc);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#0088cc', endColorstr='#0044cc', GradientType=0);border-color:#0044cc #0044cc #002a80;border-color:rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);}.btn-primary:hover,.btn-primary:active,.btn-primary.active,.btn-primary.disabled,.btn-primary[disabled]{background-color:#0044cc;} -.btn-primary:active,.btn-primary.active{background-color:#003399 \9;} -.btn-warning{background-color:#faa732;background-image:-moz-linear-gradient(top, #fbb450, #f89406);background-image:-ms-linear-gradient(top, #fbb450, #f89406);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#fbb450), to(#f89406));background-image:-webkit-linear-gradient(top, #fbb450, #f89406);background-image:-o-linear-gradient(top, #fbb450, #f89406);background-image:linear-gradient(top, #fbb450, #f89406);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fbb450', endColorstr='#f89406', GradientType=0);border-color:#f89406 #f89406 #ad6704;border-color:rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);}.btn-warning:hover,.btn-warning:active,.btn-warning.active,.btn-warning.disabled,.btn-warning[disabled]{background-color:#f89406;} -.btn-warning:active,.btn-warning.active{background-color:#c67605 \9;} -.btn-danger{background-color:#da4f49;background-image:-moz-linear-gradient(top, #ee5f5b, #bd362f);background-image:-ms-linear-gradient(top, #ee5f5b, #bd362f);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#ee5f5b), to(#bd362f));background-image:-webkit-linear-gradient(top, #ee5f5b, #bd362f);background-image:-o-linear-gradient(top, #ee5f5b, #bd362f);background-image:linear-gradient(top, #ee5f5b, #bd362f);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ee5f5b', endColorstr='#bd362f', GradientType=0);border-color:#bd362f #bd362f #802420;border-color:rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);}.btn-danger:hover,.btn-danger:active,.btn-danger.active,.btn-danger.disabled,.btn-danger[disabled]{background-color:#bd362f;} -.btn-danger:active,.btn-danger.active{background-color:#942a25 \9;} -.btn-success{background-color:#5bb75b;background-image:-moz-linear-gradient(top, #62c462, #51a351);background-image:-ms-linear-gradient(top, #62c462, #51a351);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#62c462), to(#51a351));background-image:-webkit-linear-gradient(top, #62c462, #51a351);background-image:-o-linear-gradient(top, #62c462, #51a351);background-image:linear-gradient(top, #62c462, #51a351);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#62c462', endColorstr='#51a351', GradientType=0);border-color:#51a351 #51a351 #387038;border-color:rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);}.btn-success:hover,.btn-success:active,.btn-success.active,.btn-success.disabled,.btn-success[disabled]{background-color:#51a351;} -.btn-success:active,.btn-success.active{background-color:#408140 \9;} -.btn-info{background-color:#49afcd;background-image:-moz-linear-gradient(top, #5bc0de, #2f96b4);background-image:-ms-linear-gradient(top, #5bc0de, #2f96b4);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#5bc0de), to(#2f96b4));background-image:-webkit-linear-gradient(top, #5bc0de, #2f96b4);background-image:-o-linear-gradient(top, #5bc0de, #2f96b4);background-image:linear-gradient(top, #5bc0de, #2f96b4);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#5bc0de', endColorstr='#2f96b4', GradientType=0);border-color:#2f96b4 #2f96b4 #1f6377;border-color:rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);}.btn-info:hover,.btn-info:active,.btn-info.active,.btn-info.disabled,.btn-info[disabled]{background-color:#2f96b4;} -.btn-info:active,.btn-info.active{background-color:#24748c \9;} -button.btn,input[type="submit"].btn{*padding-top:2px;*padding-bottom:2px;}button.btn::-moz-focus-inner,input[type="submit"].btn::-moz-focus-inner{padding:0;border:0;} -button.btn.large,input[type="submit"].btn.large{*padding-top:7px;*padding-bottom:7px;} -button.btn.small,input[type="submit"].btn.small{*padding-top:3px;*padding-bottom:3px;} -.btn-group{position:relative;*zoom:1;*margin-left:.3em;}.btn-group:before,.btn-group:after{display:table;content:"";} -.btn-group:after{clear:both;} -.btn-group:first-child{*margin-left:0;} -.btn-group+.btn-group{margin-left:5px;} -.btn-toolbar{margin-top:9px;margin-bottom:9px;}.btn-toolbar .btn-group{display:inline-block;*display:inline;*zoom:1;} -.btn-group .btn{position:relative;float:left;margin-left:-1px;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;} -.btn-group .btn:first-child{margin-left:0;-webkit-border-top-left-radius:4px;-moz-border-radius-topleft:4px;border-top-left-radius:4px;-webkit-border-bottom-left-radius:4px;-moz-border-radius-bottomleft:4px;border-bottom-left-radius:4px;} -.btn-group .btn:last-child,.btn-group .dropdown-toggle{-webkit-border-top-right-radius:4px;-moz-border-radius-topright:4px;border-top-right-radius:4px;-webkit-border-bottom-right-radius:4px;-moz-border-radius-bottomright:4px;border-bottom-right-radius:4px;} -.btn-group .btn.large:first-child{margin-left:0;-webkit-border-top-left-radius:6px;-moz-border-radius-topleft:6px;border-top-left-radius:6px;-webkit-border-bottom-left-radius:6px;-moz-border-radius-bottomleft:6px;border-bottom-left-radius:6px;} -.btn-group .btn.large:last-child,.btn-group .large.dropdown-toggle{-webkit-border-top-right-radius:6px;-moz-border-radius-topright:6px;border-top-right-radius:6px;-webkit-border-bottom-right-radius:6px;-moz-border-radius-bottomright:6px;border-bottom-right-radius:6px;} -.btn-group .btn:hover,.btn-group .btn:focus,.btn-group .btn:active,.btn-group .btn.active{z-index:2;} -.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0;} -.btn-group .dropdown-toggle{padding-left:8px;padding-right:8px;-webkit-box-shadow:inset 1px 0 0 rgba(255, 255, 255, 0.125),inset 0 1px 0 rgba(255, 255, 255, 0.2),0 1px 2px rgba(0, 0, 0, 0.05);-moz-box-shadow:inset 1px 0 0 rgba(255, 255, 255, 0.125),inset 0 1px 0 rgba(255, 255, 255, 0.2),0 1px 2px rgba(0, 0, 0, 0.05);box-shadow:inset 1px 0 0 rgba(255, 255, 255, 0.125),inset 0 1px 0 rgba(255, 255, 255, 0.2),0 1px 2px rgba(0, 0, 0, 0.05);*padding-top:5px;*padding-bottom:5px;} -.btn-group.open{*z-index:1000;}.btn-group.open .dropdown-menu{display:block;margin-top:1px;-webkit-border-radius:5px;-moz-border-radius:5px;border-radius:5px;} -.btn-group.open .dropdown-toggle{background-image:none;-webkit-box-shadow:inset 0 1px 6px rgba(0, 0, 0, 0.15),0 1px 2px rgba(0, 0, 0, 0.05);-moz-box-shadow:inset 0 1px 6px rgba(0, 0, 0, 0.15),0 1px 2px rgba(0, 0, 0, 0.05);box-shadow:inset 0 1px 6px rgba(0, 0, 0, 0.15),0 1px 2px rgba(0, 0, 0, 0.05);} -.btn .caret{margin-top:7px;margin-left:0;} -.btn:hover .caret,.open.btn-group .caret{opacity:1;filter:alpha(opacity=100);} -.btn-primary .caret,.btn-danger .caret,.btn-info .caret,.btn-success .caret{border-top-color:#ffffff;opacity:0.75;filter:alpha(opacity=75);} -.btn-small .caret{margin-top:4px;} -.alert{padding:8px 35px 8px 14px;margin-bottom:18px;text-shadow:0 1px 0 rgba(255, 255, 255, 0.5);background-color:#fcf8e3;border:1px solid #fbeed5;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;} -.alert,.alert-heading{color:#c09853;} -.alert .close{position:relative;top:-2px;right:-21px;line-height:18px;} -.alert-success{background-color:#dff0d8;border-color:#d6e9c6;} -.alert-success,.alert-success .alert-heading{color:#468847;} -.alert-danger,.alert-error{background-color:#f2dede;border-color:#eed3d7;} -.alert-danger,.alert-error,.alert-danger .alert-heading,.alert-error .alert-heading{color:#b94a48;} -.alert-info{background-color:#d9edf7;border-color:#bce8f1;} -.alert-info,.alert-info .alert-heading{color:#3a87ad;} -.alert-block{padding-top:14px;padding-bottom:14px;} -.alert-block>p,.alert-block>ul{margin-bottom:0;} -.alert-block p+p{margin-top:5px;} -.nav{margin-left:0;margin-bottom:18px;list-style:none;} -.nav>li>a{display:block;} -.nav>li>a:hover{text-decoration:none;background-color:#eeeeee;} -.nav-list{padding-left:14px;padding-right:14px;margin-bottom:0;} -.nav-list>li>a,.nav-list .nav-header{display:block;padding:3px 15px;margin-left:-15px;margin-right:-15px;} -.nav-list .nav-header{font-size:11px;font-weight:bold;line-height:18px;color:#999999;text-transform:uppercase;} -.nav-list>li+.nav-header{margin-top:9px;} -.nav-list .active>a,.nav-list .active>a:hover{color:#ffffff;background-color:#0088cc;} -.nav-list [class^="icon-"]{margin-right:2px;} -.nav-tabs,.nav-pills{*zoom:1;}.nav-tabs:before,.nav-pills:before,.nav-tabs:after,.nav-pills:after{display:table;content:"";} -.nav-tabs:after,.nav-pills:after{clear:both;} -.nav-tabs>li,.nav-pills>li{float:left;} -.nav-tabs>li>a,.nav-pills>li>a{padding-right:12px;padding-left:12px;margin-right:2px;line-height:14px;} -.nav-tabs{border-bottom:1px solid #ddd;} -.nav-tabs>li{margin-bottom:-1px;} -.nav-tabs>li>a{padding-top:9px;padding-bottom:9px;border:1px solid transparent;-webkit-border-radius:4px 4px 0 0;-moz-border-radius:4px 4px 0 0;border-radius:4px 4px 0 0;}.nav-tabs>li>a:hover{border-color:#eeeeee #eeeeee #dddddd;} -.nav-tabs>.active>a,.nav-tabs>.active>a:hover{color:#555555;background-color:#ffffff;border:1px solid #ddd;border-bottom-color:transparent;cursor:default;} -.nav-pills>li>a{padding-top:8px;padding-bottom:8px;margin-top:2px;margin-bottom:2px;-webkit-border-radius:5px;-moz-border-radius:5px;border-radius:5px;} -.nav-pills .active>a,.nav-pills .active>a:hover{color:#ffffff;background-color:#0088cc;} -.nav-stacked>li{float:none;} -.nav-stacked>li>a{margin-right:0;} -.nav-tabs.nav-stacked{border-bottom:0;} -.nav-tabs.nav-stacked>li>a{border:1px solid #ddd;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;} -.nav-tabs.nav-stacked>li:first-child>a{-webkit-border-radius:4px 4px 0 0;-moz-border-radius:4px 4px 0 0;border-radius:4px 4px 0 0;} -.nav-tabs.nav-stacked>li:last-child>a{-webkit-border-radius:0 0 4px 4px;-moz-border-radius:0 0 4px 4px;border-radius:0 0 4px 4px;} -.nav-tabs.nav-stacked>li>a:hover{border-color:#ddd;z-index:2;} -.nav-pills.nav-stacked>li>a{margin-bottom:3px;} -.nav-pills.nav-stacked>li:last-child>a{margin-bottom:1px;} -.nav-tabs .dropdown-menu,.nav-pills .dropdown-menu{margin-top:1px;border-width:1px;} -.nav-pills .dropdown-menu{-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;} -.nav-tabs .dropdown-toggle .caret,.nav-pills .dropdown-toggle .caret{border-top-color:#0088cc;margin-top:6px;} -.nav-tabs .dropdown-toggle:hover .caret,.nav-pills .dropdown-toggle:hover .caret{border-top-color:#005580;} -.nav-tabs .active .dropdown-toggle .caret,.nav-pills .active .dropdown-toggle .caret{border-top-color:#333333;} -.nav>.dropdown.active>a:hover{color:#000000;cursor:pointer;} -.nav-tabs .open .dropdown-toggle,.nav-pills .open .dropdown-toggle,.nav>.open.active>a:hover{color:#ffffff;background-color:#999999;border-color:#999999;} -.nav .open .caret,.nav .open.active .caret,.nav .open a:hover .caret{border-top-color:#ffffff;opacity:1;filter:alpha(opacity=100);} -.tabs-stacked .open>a:hover{border-color:#999999;} -.tabbable{*zoom:1;}.tabbable:before,.tabbable:after{display:table;content:"";} -.tabbable:after{clear:both;} -.tabs-below .nav-tabs,.tabs-right .nav-tabs,.tabs-left .nav-tabs{border-bottom:0;} -.tab-content>.tab-pane,.pill-content>.pill-pane{display:none;} -.tab-content>.active,.pill-content>.active{display:block;} -.tabs-below .nav-tabs{border-top:1px solid #ddd;} -.tabs-below .nav-tabs>li{margin-top:-1px;margin-bottom:0;} -.tabs-below .nav-tabs>li>a{-webkit-border-radius:0 0 4px 4px;-moz-border-radius:0 0 4px 4px;border-radius:0 0 4px 4px;}.tabs-below .nav-tabs>li>a:hover{border-bottom-color:transparent;border-top-color:#ddd;} -.tabs-below .nav-tabs .active>a,.tabs-below .nav-tabs .active>a:hover{border-color:transparent #ddd #ddd #ddd;} -.tabs-left .nav-tabs>li,.tabs-right .nav-tabs>li{float:none;} -.tabs-left .nav-tabs>li>a,.tabs-right .nav-tabs>li>a{min-width:74px;margin-right:0;margin-bottom:3px;} -.tabs-left .nav-tabs{float:left;margin-right:19px;border-right:1px solid #ddd;} -.tabs-left .nav-tabs>li>a{margin-right:-1px;-webkit-border-radius:4px 0 0 4px;-moz-border-radius:4px 0 0 4px;border-radius:4px 0 0 4px;} -.tabs-left .nav-tabs>li>a:hover{border-color:#eeeeee #dddddd #eeeeee #eeeeee;} -.tabs-left .nav-tabs .active>a,.tabs-left .nav-tabs .active>a:hover{border-color:#ddd transparent #ddd #ddd;*border-right-color:#ffffff;} -.tabs-right .nav-tabs{float:right;margin-left:19px;border-left:1px solid #ddd;} -.tabs-right .nav-tabs>li>a{margin-left:-1px;-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0;} -.tabs-right .nav-tabs>li>a:hover{border-color:#eeeeee #eeeeee #eeeeee #dddddd;} -.tabs-right .nav-tabs .active>a,.tabs-right .nav-tabs .active>a:hover{border-color:#ddd #ddd #ddd transparent;*border-left-color:#ffffff;} -.navbar{overflow:visible;margin-bottom:18px;} -.btn-navbar{display:none;float:right;padding:7px 10px;margin-left:5px;margin-right:5px;background-color:#102a29;background-image:-moz-linear-gradient(top, #132e2b, #0c2327);background-image:-ms-linear-gradient(top, #132e2b, #0c2327);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#132e2b), to(#0c2327));background-image:-webkit-linear-gradient(top, #132e2b, #0c2327);background-image:-o-linear-gradient(top, #132e2b, #0c2327);background-image:linear-gradient(top, #132e2b, #0c2327);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#132e2b', endColorstr='#0c2327', GradientType=0);border-color:#0c2327 #0c2327 #000000;border-color:rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.1) rgba(0, 0, 0, 0.25);filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);-webkit-box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.1),0 1px 0 rgba(255, 255, 255, 0.075);-moz-box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.1),0 1px 0 rgba(255, 255, 255, 0.075);box-shadow:inset 0 1px 0 rgba(255, 255, 255, 0.1),0 1px 0 rgba(255, 255, 255, 0.075);}.btn-navbar:hover,.btn-navbar:active,.btn-navbar.active,.btn-navbar.disabled,.btn-navbar[disabled]{background-color:#0c2327;} -.btn-navbar:active,.btn-navbar.active{background-color:#000000 \9;} -.btn-navbar .icon-bar{display:block;width:18px;height:2px;background-color:#f5f5f5;-webkit-border-radius:1px;-moz-border-radius:1px;border-radius:1px;-webkit-box-shadow:0 1px 0 rgba(0, 0, 0, 0.25);-moz-box-shadow:0 1px 0 rgba(0, 0, 0, 0.25);box-shadow:0 1px 0 rgba(0, 0, 0, 0.25);} -.btn-navbar .icon-bar+.icon-bar{margin-top:3px;} -.nav-collapse.collapse{height:auto;} -.navbar .brand:hover{text-decoration:none;} -.navbar .brand{float:left;display:block;padding:8px 20px 12px;margin-left:-20px;font-size:20px;font-weight:200;line-height:1;color:#ffffff;} -.navbar .navbar-text{margin-bottom:0;line-height:40px;color:#999999;}.navbar .navbar-text a:hover{color:#ffffff;background-color:transparent;} -.navbar .btn,.navbar .btn-group{margin-top:5px;} -.navbar .btn-group .btn{margin-top:0;} -.navbar-form{margin-bottom:0;*zoom:1;}.navbar-form:before,.navbar-form:after{display:table;content:"";} -.navbar-form:after{clear:both;} -.navbar-form input,.navbar-form select{display:inline-block;margin-top:5px;margin-bottom:0;} -.navbar-form .radio,.navbar-form .checkbox{margin-top:5px;} -.navbar-form input[type="image"],.navbar-form input[type="checkbox"],.navbar-form input[type="radio"]{margin-top:3px;} -.navbar-search{position:relative;float:left;margin-top:6px;margin-bottom:0;}.navbar-search .search-query{padding:4px 9px;font-family:"Source Sans Pro", "Helvetica Neue", sans-serif;font-size:13px;font-weight:normal;line-height:1;color:#ffffff;color:rgba(255, 255, 255, 0.75);background:#666;background:rgba(255, 255, 255, 0.3);border:1px solid #111;-webkit-box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1),0 1px 0px rgba(255, 255, 255, 0.15);-moz-box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1),0 1px 0px rgba(255, 255, 255, 0.15);box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1),0 1px 0px rgba(255, 255, 255, 0.15);-webkit-transition:none;-moz-transition:none;-ms-transition:none;-o-transition:none;transition:none;}.navbar-search .search-query :-moz-placeholder{color:#eeeeee;} -.navbar-search .search-query::-webkit-input-placeholder{color:#eeeeee;} -.navbar-search .search-query:hover{color:#ffffff;background-color:#999999;background-color:rgba(255, 255, 255, 0.5);} -.navbar-search .search-query:focus,.navbar-search .search-query.focused{padding:5px 10px;color:#333333;background-color:#ffffff;border:0;-webkit-box-shadow:0 0 3px rgba(0, 0, 0, 0.15);-moz-box-shadow:0 0 3px rgba(0, 0, 0, 0.15);box-shadow:0 0 3px rgba(0, 0, 0, 0.15);outline:0;} -.navbar-fixed-top{position:fixed;top:0;right:0;left:0;z-index:1030;} -.navbar-fixed-top .navbar-inner{padding-left:0;padding-right:0;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0;} -.navbar .nav{position:relative;left:0;display:block;float:left;margin:0 10px 0 0;} -.navbar .nav.pull-right{float:right;} -.navbar .nav>li{display:block;float:left;} -.navbar .nav>li>a{ - float:none; - padding: 15px; - height: 45px; - line-height: 45px; - color:#0B5567; - text-decoration:none; - font-size: 14px; - font-weight: 700; - border-left: 1px solid #ebebeb; - transition: all 200ms ease-in-out; -} -.navbar .nav>li:last-child a { - border-right: 1px solid #ebebeb; -} -.navbar .nav>li>a:hover { - background: #f5f5f5; - color:#0B5567; - text-decoration:none; - box-shadow: inset 0 4px 0 #15A9CE, inset 0 0 3px #ebebeb; - transition: all 200ms ease-in-out; -} -.navbar .nav .active>a,.navbar .nav .active>a:hover{color:#ffffff;text-decoration:none;background-color:#0c2327;background-color:rgba(0, 0, 0, 0.5);} -.navbar .divider-vertical{height:40px;width:1px;margin:0 9px;overflow:hidden;background-color:#0c2327;border-right:1px solid #132e2b;} -.navbar .nav.pull-right{margin-left:10px;margin-right:0;} -.navbar .dropdown-menu{margin-top:1px;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;}.navbar .dropdown-menu:before{content:'';display:inline-block;border-left:7px solid transparent;border-right:7px solid transparent;border-bottom:7px solid #ccc;border-bottom-color:rgba(0, 0, 0, 0.2);position:absolute;top:-7px;left:9px;} -.navbar .dropdown-menu:after{content:'';display:inline-block;border-left:6px solid transparent;border-right:6px solid transparent;border-bottom:6px solid #ffffff;position:absolute;top:-6px;left:10px;} -.navbar .nav .dropdown-toggle .caret,.navbar .nav .open.dropdown .caret{border-top-color:#ffffff;} -.navbar .nav .active .caret{opacity:1;filter:alpha(opacity=100);} -.navbar .nav .open>.dropdown-toggle,.navbar .nav .active>.dropdown-toggle,.navbar .nav .open.active>.dropdown-toggle{background-color:transparent;} -.navbar .nav .active>.dropdown-toggle:hover{color:#ffffff;} -.navbar .nav.pull-right .dropdown-menu{left:auto;right:0;}.navbar .nav.pull-right .dropdown-menu:before{left:auto;right:12px;} -.navbar .nav.pull-right .dropdown-menu:after{left:auto;right:13px;} -.breadcrumb{padding:7px 14px;margin:0 0 18px;background-color:rgba(251, 251, 251, 0.7);background-image:-moz-linear-gradient(top, rgba(255, 255, 255, 0.7), rgba(245, 245, 245, 0.7));background-image:-ms-linear-gradient(top, rgba(255, 255, 255, 0.7), rgba(245, 245, 245, 0.7));background-image:-webkit-gradient(linear, 0 0, 0 100%, from(rgba(255, 255, 255, 0.7)), to(rgba(245, 245, 245, 0.7)));background-image:-webkit-linear-gradient(top, rgba(255, 255, 255, 0.7), rgba(245, 245, 245, 0.7));background-image:-o-linear-gradient(top, rgba(255, 255, 255, 0.7), rgba(245, 245, 245, 0.7));background-image:linear-gradient(top, rgba(255, 255, 255, 0.7), rgba(245, 245, 245, 0.7));background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='rgba(255, 255, 255, 0.7)', endColorstr='rgba(245, 245, 245, 0.7)', GradientType=0);border:1px solid #ddd;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px;-webkit-box-shadow:inset 0 1px 0 #ffffff;-moz-box-shadow:inset 0 1px 0 #ffffff;box-shadow:inset 0 1px 0 #ffffff;}.breadcrumb li{display:inline;} -.breadcrumb .divider{padding:0 5px;color:#999999;} -.breadcrumb .active a{color:#333333;} -.pagination{height:36px;margin:18px 0;} -.pagination ul{display:inline-block;*display:inline;*zoom:1;margin-left:0;margin-bottom:0;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px;-webkit-box-shadow:0 1px 2px rgba(0, 0, 0, 0.05);-moz-box-shadow:0 1px 2px rgba(0, 0, 0, 0.05);box-shadow:0 1px 2px rgba(0, 0, 0, 0.05);} -.pagination li{display:inline;} -.pagination a{float:left;padding:0 14px;line-height:34px;text-decoration:none;border:1px solid #ddd;border-left-width:0;} -.pagination a:hover,.pagination .active a{background-color:#f5f5f5;} -.pagination .active a{color:#999999;cursor:default;} -.pagination .disabled a,.pagination .disabled a:hover{color:#999999;background-color:transparent;cursor:default;} -.pagination li:first-child a{border-left-width:1px;-webkit-border-radius:3px 0 0 3px;-moz-border-radius:3px 0 0 3px;border-radius:3px 0 0 3px;} -.pagination li:last-child a{-webkit-border-radius:0 3px 3px 0;-moz-border-radius:0 3px 3px 0;border-radius:0 3px 3px 0;} -.pagination-centered{text-align:center;} -.pagination-right{text-align:right;} -.pager{margin-left:0;margin-bottom:18px;list-style:none;text-align:center;*zoom:1;}.pager:before,.pager:after{display:table;content:"";} -.pager:after{clear:both;} -.pager li{display:inline;} -.pager a{display:inline-block;padding:5px 14px;background-color:#fff;border:1px solid #ddd;-webkit-border-radius:15px;-moz-border-radius:15px;border-radius:15px;} -.pager a:hover{text-decoration:none;background-color:#f5f5f5;} -.pager .next a{float:right;} -.pager .previous a{float:left;} -.modal-open .dropdown-menu{z-index:2050;} -.modal-open .dropdown.open{*z-index:2050;} -.modal-open .popover{z-index:2060;} -.modal-open .tooltip{z-index:2070;} -.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040;background-color:#000000;}.modal-backdrop.fade{opacity:0;} -.modal-backdrop,.modal-backdrop.fade.in{opacity:0.8;filter:alpha(opacity=80);} -.modal{position:fixed;top:50%;left:50%;z-index:1050;max-height:500px;overflow:auto;width:560px;margin:-250px 0 0 -280px;background-color:#ffffff;border:1px solid #999;border:1px solid rgba(0, 0, 0, 0.3);*border:1px solid #999;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;-webkit-box-shadow:0 3px 7px rgba(0, 0, 0, 0.3);-moz-box-shadow:0 3px 7px rgba(0, 0, 0, 0.3);box-shadow:0 3px 7px rgba(0, 0, 0, 0.3);-webkit-background-clip:padding-box;-moz-background-clip:padding-box;background-clip:padding-box;}.modal.fade{-webkit-transition:opacity .3s linear, top .3s ease-out;-moz-transition:opacity .3s linear, top .3s ease-out;-ms-transition:opacity .3s linear, top .3s ease-out;-o-transition:opacity .3s linear, top .3s ease-out;transition:opacity .3s linear, top .3s ease-out;top:-25%;} -.modal.fade.in{top:50%;} -.modal-header{padding:9px 15px;border-bottom:1px solid #eee;}.modal-header .close{margin-top:2px;} -.modal-body{padding:15px;} -.modal-footer{padding:14px 15px 15px;margin-bottom:0;background-color:#f5f5f5;border-top:1px solid #ddd;-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px;-webkit-box-shadow:inset 0 1px 0 #ffffff;-moz-box-shadow:inset 0 1px 0 #ffffff;box-shadow:inset 0 1px 0 #ffffff;*zoom:1;}.modal-footer:before,.modal-footer:after{display:table;content:"";} -.modal-footer:after{clear:both;} -.modal-footer .btn{float:right;margin-left:5px;margin-bottom:0;} -.tooltip{position:absolute;z-index:1020;display:block;visibility:visible;padding:5px;font-size:11px;opacity:0;filter:alpha(opacity=0);}.tooltip.in{opacity:0.8;filter:alpha(opacity=80);} -.tooltip.top{margin-top:-2px;} -.tooltip.right{margin-left:2px;} -.tooltip.bottom{margin-top:2px;} -.tooltip.left{margin-left:-2px;} -.tooltip.top .tooltip-arrow{bottom:0;left:50%;margin-left:-5px;border-left:5px solid transparent;border-right:5px solid transparent;border-top:5px solid #000000;} -.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-top:5px solid transparent;border-bottom:5px solid transparent;border-left:5px solid #000000;} -.tooltip.bottom .tooltip-arrow{top:0;left:50%;margin-left:-5px;border-left:5px solid transparent;border-right:5px solid transparent;border-bottom:5px solid #000000;} -.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-top:5px solid transparent;border-bottom:5px solid transparent;border-right:5px solid #000000;} -.tooltip-inner{max-width:200px;padding:3px 8px;color:#ffffff;text-align:center;text-decoration:none;background-color:#000000;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;} -.tooltip-arrow{position:absolute;width:0;height:0;} -.popover{position:absolute;top:0;left:0;z-index:1010;display:none;padding:5px;}.popover.top{margin-top:-5px;} -.popover.right{margin-left:5px;} -.popover.bottom{margin-top:5px;} -.popover.left{margin-left:-5px;} -.popover.top .arrow{bottom:0;left:50%;margin-left:-5px;border-left:5px solid transparent;border-right:5px solid transparent;border-top:5px solid #000000;} -.popover.right .arrow{top:50%;left:0;margin-top:-5px;border-top:5px solid transparent;border-bottom:5px solid transparent;border-right:5px solid #000000;} -.popover.bottom .arrow{top:0;left:50%;margin-left:-5px;border-left:5px solid transparent;border-right:5px solid transparent;border-bottom:5px solid #000000;} -.popover.left .arrow{top:50%;right:0;margin-top:-5px;border-top:5px solid transparent;border-bottom:5px solid transparent;border-left:5px solid #000000;} -.popover .arrow{position:absolute;width:0;height:0;} -.popover-inner{padding:3px;width:280px;overflow:hidden;background:#000000;background:rgba(0, 0, 0, 0.8);-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;-webkit-box-shadow:0 3px 7px rgba(0, 0, 0, 0.3);-moz-box-shadow:0 3px 7px rgba(0, 0, 0, 0.3);box-shadow:0 3px 7px rgba(0, 0, 0, 0.3);} -.popover-title{padding:9px 15px;line-height:1;background-color:#f5f5f5;border-bottom:1px solid #eee;-webkit-border-radius:3px 3px 0 0;-moz-border-radius:3px 3px 0 0;border-radius:3px 3px 0 0;} -.popover-content{padding:14px;background-color:#ffffff;-webkit-border-radius:0 0 3px 3px;-moz-border-radius:0 0 3px 3px;border-radius:0 0 3px 3px;-webkit-background-clip:padding-box;-moz-background-clip:padding-box;background-clip:padding-box;}.popover-content p,.popover-content ul,.popover-content ol{margin-bottom:0;} -.thumbnails{margin-left:-20px;list-style:none;*zoom:1;}.thumbnails:before,.thumbnails:after{display:table;content:"";} -.thumbnails:after{clear:both;} -.thumbnails>li{float:left;margin:0 0 18px 20px;} -.thumbnail{display:block;padding:4px;line-height:1;border:1px solid #ddd;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:0 1px 1px rgba(0, 0, 0, 0.075);-moz-box-shadow:0 1px 1px rgba(0, 0, 0, 0.075);box-shadow:0 1px 1px rgba(0, 0, 0, 0.075);} -a.thumbnail:hover{border-color:#0088cc;-webkit-box-shadow:0 1px 4px rgba(0, 105, 214, 0.25);-moz-box-shadow:0 1px 4px rgba(0, 105, 214, 0.25);box-shadow:0 1px 4px rgba(0, 105, 214, 0.25);} -.thumbnail>img{display:block;max-width:100%;margin-left:auto;margin-right:auto;} -.thumbnail .caption{padding:9px;} -.label{padding:1px 3px 2px;font-size:9.75px;font-weight:bold;color:#ffffff;text-transform:uppercase;background-color:#999999;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px;} -.label-important{background-color:#b94a48;} -.label-warning{background-color:#f89406;} -.label-success{background-color:#468847;} -.label-info{background-color:#3a87ad;} -@-webkit-keyframes progress-bar-stripes{from{background-position:0 0;} to{background-position:40px 0;}}@-moz-keyframes progress-bar-stripes{from{background-position:0 0;} to{background-position:40px 0;}}@keyframes progress-bar-stripes{from{background-position:0 0;} to{background-position:40px 0;}}.progress{overflow:hidden;height:18px;margin-bottom:18px;background-color:#f7f7f7;background-image:-moz-linear-gradient(top, #f5f5f5, #f9f9f9);background-image:-ms-linear-gradient(top, #f5f5f5, #f9f9f9);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#f5f5f5), to(#f9f9f9));background-image:-webkit-linear-gradient(top, #f5f5f5, #f9f9f9);background-image:-o-linear-gradient(top, #f5f5f5, #f9f9f9);background-image:linear-gradient(top, #f5f5f5, #f9f9f9);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#f5f5f5', endColorstr='#f9f9f9', GradientType=0);-webkit-box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1);-moz-box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1);box-shadow:inset 0 1px 2px rgba(0, 0, 0, 0.1);-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;} -.progress .bar{width:0%;height:18px;color:#ffffff;font-size:12px;text-align:center;background-color:#0e90d2;background-image:-moz-linear-gradient(top, #149bdf, #0480be);background-image:-ms-linear-gradient(top, #149bdf, #0480be);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#149bdf), to(#0480be));background-image:-webkit-linear-gradient(top, #149bdf, #0480be);background-image:-o-linear-gradient(top, #149bdf, #0480be);background-image:linear-gradient(top, #149bdf, #0480be);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#149bdf', endColorstr='#0480be', GradientType=0);-webkit-box-shadow:inset 0 -1px 0 rgba(0, 0, 0, 0.15);-moz-box-shadow:inset 0 -1px 0 rgba(0, 0, 0, 0.15);box-shadow:inset 0 -1px 0 rgba(0, 0, 0, 0.15);-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;-webkit-transition:width 0.6s ease;-moz-transition:width 0.6s ease;-ms-transition:width 0.6s ease;-o-transition:width 0.6s ease;transition:width 0.6s ease;} -.progress-striped .bar{background-color:#62c462;background-image:-webkit-gradient(linear, 0 100%, 100% 0, color-stop(0.25, rgba(255, 255, 255, 0.15)), color-stop(0.25, transparent), color-stop(0.5, transparent), color-stop(0.5, rgba(255, 255, 255, 0.15)), color-stop(0.75, rgba(255, 255, 255, 0.15)), color-stop(0.75, transparent), to(transparent));background-image:-webkit-linear-gradient(-45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-moz-linear-gradient(-45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-ms-linear-gradient(-45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(-45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(-45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);-webkit-background-size:40px 40px;-moz-background-size:40px 40px;-o-background-size:40px 40px;background-size:40px 40px;} -.progress.active .bar{-webkit-animation:progress-bar-stripes 2s linear infinite;-moz-animation:progress-bar-stripes 2s linear infinite;animation:progress-bar-stripes 2s linear infinite;} -.progress-danger .bar{background-color:#dd514c;background-image:-moz-linear-gradient(top, #ee5f5b, #c43c35);background-image:-ms-linear-gradient(top, #ee5f5b, #c43c35);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#ee5f5b), to(#c43c35));background-image:-webkit-linear-gradient(top, #ee5f5b, #c43c35);background-image:-o-linear-gradient(top, #ee5f5b, #c43c35);background-image:linear-gradient(top, #ee5f5b, #c43c35);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ee5f5b', endColorstr='#c43c35', GradientType=0);} -.progress-danger.progress-striped .bar{background-color:#ee5f5b;background-image:-webkit-gradient(linear, 0 100%, 100% 0, color-stop(0.25, rgba(255, 255, 255, 0.15)), color-stop(0.25, transparent), color-stop(0.5, transparent), color-stop(0.5, rgba(255, 255, 255, 0.15)), color-stop(0.75, rgba(255, 255, 255, 0.15)), color-stop(0.75, transparent), to(transparent));background-image:-webkit-linear-gradient(-45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-moz-linear-gradient(-45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-ms-linear-gradient(-45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(-45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(-45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);} -.progress-success .bar{background-color:#5eb95e;background-image:-moz-linear-gradient(top, #62c462, #57a957);background-image:-ms-linear-gradient(top, #62c462, #57a957);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#62c462), to(#57a957));background-image:-webkit-linear-gradient(top, #62c462, #57a957);background-image:-o-linear-gradient(top, #62c462, #57a957);background-image:linear-gradient(top, #62c462, #57a957);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#62c462', endColorstr='#57a957', GradientType=0);} -.progress-success.progress-striped .bar{background-color:#62c462;background-image:-webkit-gradient(linear, 0 100%, 100% 0, color-stop(0.25, rgba(255, 255, 255, 0.15)), color-stop(0.25, transparent), color-stop(0.5, transparent), color-stop(0.5, rgba(255, 255, 255, 0.15)), color-stop(0.75, rgba(255, 255, 255, 0.15)), color-stop(0.75, transparent), to(transparent));background-image:-webkit-linear-gradient(-45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-moz-linear-gradient(-45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-ms-linear-gradient(-45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(-45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(-45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);} -.progress-info .bar{background-color:#4bb1cf;background-image:-moz-linear-gradient(top, #5bc0de, #339bb9);background-image:-ms-linear-gradient(top, #5bc0de, #339bb9);background-image:-webkit-gradient(linear, 0 0, 0 100%, from(#5bc0de), to(#339bb9));background-image:-webkit-linear-gradient(top, #5bc0de, #339bb9);background-image:-o-linear-gradient(top, #5bc0de, #339bb9);background-image:linear-gradient(top, #5bc0de, #339bb9);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#5bc0de', endColorstr='#339bb9', GradientType=0);} -.progress-info.progress-striped .bar{background-color:#5bc0de;background-image:-webkit-gradient(linear, 0 100%, 100% 0, color-stop(0.25, rgba(255, 255, 255, 0.15)), color-stop(0.25, transparent), color-stop(0.5, transparent), color-stop(0.5, rgba(255, 255, 255, 0.15)), color-stop(0.75, rgba(255, 255, 255, 0.15)), color-stop(0.75, transparent), to(transparent));background-image:-webkit-linear-gradient(-45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-moz-linear-gradient(-45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-ms-linear-gradient(-45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:-o-linear-gradient(-45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-image:linear-gradient(-45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);} -.accordion{margin-bottom:18px;} -.accordion-group{margin-bottom:2px;border:1px solid #e5e5e5;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;} -.accordion-heading{border-bottom:0;} -.accordion-heading .accordion-toggle{display:block;padding:8px 15px;} -.accordion-inner{padding:9px 15px;border-top:1px solid #e5e5e5;} -.carousel{position:relative;margin-bottom:18px;line-height:1;} -.carousel-inner{overflow:hidden;width:100%;position:relative;} -.carousel .item{display:none;position:relative;-webkit-transition:0.6s ease-in-out left;-moz-transition:0.6s ease-in-out left;-ms-transition:0.6s ease-in-out left;-o-transition:0.6s ease-in-out left;transition:0.6s ease-in-out left;} -.carousel .item>img{display:block;line-height:1;} -.carousel .active,.carousel .next,.carousel .prev{display:block;} -.carousel .active{left:0;} -.carousel .next,.carousel .prev{position:absolute;top:0;width:100%;} -.carousel .next{left:100%;} -.carousel .prev{left:-100%;} -.carousel .next.left,.carousel .prev.right{left:0;} -.carousel .active.left{left:-100%;} -.carousel .active.right{left:100%;} -.carousel-control{position:absolute;top:40%;left:15px;width:40px;height:40px;margin-top:-20px;font-size:60px;font-weight:100;line-height:30px;color:#ffffff;text-align:center;background:#222222;border:3px solid #ffffff;-webkit-border-radius:23px;-moz-border-radius:23px;border-radius:23px;opacity:0.5;filter:alpha(opacity=50);}.carousel-control.right{left:auto;right:15px;} -.carousel-control:hover{color:#ffffff;text-decoration:none;opacity:0.9;filter:alpha(opacity=90);} -.carousel-caption{position:absolute;left:0;right:0;bottom:0;padding:10px 15px 5px;background:#333333;background:rgba(0, 0, 0, 0.75);} -.carousel-caption h4,.carousel-caption p{color:#ffffff;} -.hero-unit{padding:60px;margin-bottom:30px;background-color:#f5f5f5;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;}.hero-unit h1{margin-bottom:0;font-size:60px;line-height:1;letter-spacing:-1px;} -.hero-unit p{font-size:18px;font-weight:200;line-height:27px;} -.pull-right{float:right;} -.pull-left{float:left;} -.hide{display:none;} -.show{display:block;} -.invisible{visibility:hidden;} diff --git a/akka-docs-dev/_sphinx/themes/akka/static/toc.js b/akka-docs-dev/_sphinx/themes/akka/static/toc.js deleted file mode 100644 index 98f18fd518..0000000000 --- a/akka-docs-dev/_sphinx/themes/akka/static/toc.js +++ /dev/null @@ -1,131 +0,0 @@ -/*! - * samaxesJS JavaScript Library - * jQuery TOC Plugin v1.1.3 - * http://code.google.com/p/samaxesjs/ - * - * Copyright (c) 2011 samaxes.com - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -(function($) { - - /* - * The TOC plugin dynamically builds a table of contents from the headings in - * a document and prepends legal-style section numbers to each of the headings. - */ - $.fn.toc = function(options) { - var opts = $.extend({}, $.fn.toc.defaults, options); - var toc = this.append('
    ').children('ul'); - var headers = {h1: 0, h2: 0, h3: 0, h4: 0, h5: 0, h6: 0}; - var index = 0; - var indexes = {h1: 0, h2: 0, h3: 0, h4: 0, h5: 0, h6: 0}; - for (var i = 1; i <= 6; i++) { - indexes['h' + i] = (opts.exclude.match(new RegExp('h' + i, 'i')) === null && $('h' + i).length > 0) ? ++index : 0; - } - - return this.each(function() { - $(opts.context + ' :header').not(opts.exclude).each(function() { - var $this = $(this); - for (var i = 6; i >= 1; i--) { - if ($this.is('h' + i)) { - if (opts.numerate) { - checkContainer(headers['h' + i], toc); - updateNumeration(headers, 'h' + i); - if (opts.autoId && !$this.attr('id')) { - $this.attr('id', generateId($this.text())); - } - $this.text(addNumeration(headers, 'h' + i, $this.text())); - } - if (opts.autoId && !$this.attr('id')) { - $this.attr('id', generateId($this.text())); - } - appendToTOC(toc, indexes['h' + i], $this.attr('id'), $this.text()); - } - } - }); - }); - }; - - /* - * Checks if the last node is an 'ul' element. - * If not, a new one is created. - */ - function checkContainer(header, toc) { - if (header === 0 && toc.find(':last').length !== 0 && !toc.find(':last').is('ul')) { - toc.find('li:last').append('
      '); - } - }; - - /* - * Updates headers numeration. - */ - function updateNumeration(headers, header) { - $.each(headers, function(i, val) { - if (i === header) { - ++headers[i]; - } else if (i > header) { - headers[i] = 0; - } - }); - }; - - /* - * Generate an anchor id from a string by replacing unwanted characters. - */ - function generateId(text) { - return text.replace(/[ <#\/\\?&.,():;]/g, '_'); - }; - - /* - * Prepends the numeration to a heading. - */ - function addNumeration(headers, header, text) { - var numeration = ''; - - $.each(headers, function(i, val) { - if (i <= header && headers[i] > 0) { - numeration += headers[i] + '.'; - } - }); - - return numeration + ' ' + text; - }; - - /* - * Appends a new node to the TOC. - */ - function appendToTOC(toc, index, id, text) { - var parent = toc; - - for (var i = 1; i < index; i++) { - if (parent.find('> li:last > ul').length === 0) { - parent.append('
      • '); - } - parent = parent.find('> li:last > ul:first'); - } - - if (id === '') { - parent.append('
      • ' + text + '
      • '); - } else { - parent.append('
      • ' + text + '
      • '); - } - }; - - $.fn.toc.defaults = { - exclude: 'h1, h5, h6', - context: '', - autoId: true, - numerate: false - }; -})(jQuery); diff --git a/akka-docs-dev/_sphinx/themes/akka/static/watermark.png b/akka-docs-dev/_sphinx/themes/akka/static/watermark.png deleted file mode 100644 index dfbc5d5e2c..0000000000 Binary files a/akka-docs-dev/_sphinx/themes/akka/static/watermark.png and /dev/null differ diff --git a/akka-docs-dev/_sphinx/themes/akka/theme.conf b/akka-docs-dev/_sphinx/themes/akka/theme.conf deleted file mode 100644 index 2f5cc641a1..0000000000 --- a/akka-docs-dev/_sphinx/themes/akka/theme.conf +++ /dev/null @@ -1,6 +0,0 @@ -[theme] -inherit = basic -stylesheet = style.css - -[options] -full_logo = false \ No newline at end of file diff --git a/akka-docs-dev/rst/conf.py b/akka-docs-dev/rst/conf.py deleted file mode 100644 index 593c9d421c..0000000000 --- a/akka-docs-dev/rst/conf.py +++ /dev/null @@ -1,87 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Akka documentation build configuration file. -# - -import sys, os - -# -- General configuration ----------------------------------------------------- - -sys.path.append(os.path.abspath('../_sphinx/exts')) -extensions = ['sphinx.ext.todo', 'includecode', 'includecode2'] - -templates_path = ['_templates'] -source_suffix = '.rst' -master_doc = 'index' -exclude_patterns = ['_build', 'pending', 'disabled'] - -project = u'Akka' -copyright = u'2011-2014, Typesafe Inc' -version = '@version@' -release = '@version@' - -pygments_style = 'simple' -highlight_language = 'scala' -add_function_parentheses = False -show_authors = True - -# -- Options for HTML output --------------------------------------------------- - -html_theme = 'akka' -html_theme_path = ['../_sphinx/themes'] -html_favicon = '../_sphinx/static/favicon.ico' - -html_title = 'Akka Documentation' -html_logo = '../_sphinx/static/logo.png' -#html_favicon = None - -html_static_path = ['../_sphinx/static'] - -html_last_updated_fmt = '%b %d, %Y' -#html_sidebars = {} -#html_additional_pages = {} -html_domain_indices = False -html_use_index = False -html_show_sourcelink = False -html_show_sphinx = False -html_show_copyright = True -htmlhelp_basename = 'Akkadoc' -html_use_smartypants = False -html_add_permalinks = '' - -html_context = { - 'include_analytics': 'online' in tags -} - -# -- Options for EPUB output --------------------------------------------------- -epub_author = "Typesafe Inc" -epub_language = "en" -epub_publisher = epub_author -epub_identifier = "http://doc.akka.io/docs/akka/snapshot/" -epub_scheme = "URL" -epub_cover = ("../_sphinx/static/akka.png", "") - -# -- Options for LaTeX output -------------------------------------------------- - -def setup(app): - from sphinx.util.texescape import tex_replacements - tex_replacements.append((u'⇒', ur'\(\Rightarrow\)')) - -latex_paper_size = 'a4' -latex_font_size = '10pt' - -latex_documents = [ - ('java', 'AkkaStreamAndHTTPJava.tex', u' Akka Stream and HTTP Experimental Java Documentation', - u'Typesafe Inc', 'manual'), - ('scala', 'AkkaStreamAndHTTPScala.tex', u' Akka Stream and HTTP Experimental Scala Documentation', - u'Typesafe Inc', 'manual'), -] - -latex_elements = { - 'classoptions': ',oneside,openany', - 'babel': '\\usepackage[english]{babel}', - 'fontpkg': '\\PassOptionsToPackage{warn}{textcomp} \\usepackage{times}', - 'preamble': '\\definecolor{VerbatimColor}{rgb}{0.935,0.935,0.935}' - } - -# latex_logo = '_sphinx/static/akka.png' diff --git a/akka-docs-dev/rst/index.rst b/akka-docs-dev/rst/index.rst deleted file mode 100644 index 300ed680fc..0000000000 --- a/akka-docs-dev/rst/index.rst +++ /dev/null @@ -1,8 +0,0 @@ -Contents -======== - -.. toctree:: - :maxdepth: 1 - - java - scala diff --git a/akka-docs-dev/rst/java.rst b/akka-docs-dev/rst/java.rst deleted file mode 100644 index c608687e28..0000000000 --- a/akka-docs-dev/rst/java.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. _stream-java-api: - -Java Documentation -================== - -.. toctree:: - :maxdepth: 3 - - java/stream-index - java/http/index diff --git a/akka-docs-dev/rst/java/code/docs/MigrationsJava.java b/akka-docs-dev/rst/java/code/docs/MigrationsJava.java deleted file mode 100644 index 7e86a92eea..0000000000 --- a/akka-docs-dev/rst/java/code/docs/MigrationsJava.java +++ /dev/null @@ -1,249 +0,0 @@ -package docs; - -import akka.actor.ActorSystem; -import akka.actor.Cancellable; -import akka.http.javadsl.model.Uri; -import akka.dispatch.Futures; -import akka.japi.function.Creator; -import akka.japi.Pair; -import akka.japi.function.Function; -import akka.stream.*; -import akka.stream.javadsl.*; -import akka.stream.testkit.TestPublisher; -import akka.stream.testkit.TestSubscriber; -import akka.util.ByteString; -import scala.Option; -import scala.concurrent.Future; -import scala.concurrent.duration.FiniteDuration; -import scala.concurrent.Promise; -import scala.runtime.BoxedUnit; - -import org.reactivestreams.Publisher; -import org.reactivestreams.Subscriber; - -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.concurrent.TimeUnit; -import java.nio.charset.Charset; - -public class MigrationsJava { - - // This is compile-only code, no need for actually running anything. - public static ActorMaterializer mat = null; - public static ActorSystem sys = null; - - public static class SomeInputStream extends InputStream { - public SomeInputStream() {} - @Override public int read() throws IOException { return 0; } - } - - public static class SomeOutputStream extends OutputStream { - @Override public void write(int b) throws IOException { return; } - } - - public static void main(String[] args) { - - Outlet outlet = null; - - Outlet outlet1 = null; - Outlet outlet2 = null; - - Inlet inlet = null; - - Inlet inlet1 = null; - Inlet inlet2 = null; - - Flow flow = Flow.of(Integer.class); - Flow flow1 = Flow.of(Integer.class); - Flow flow2 = Flow.of(Integer.class); - - Promise> promise = null; - - - { - Graph, BoxedUnit> graphSource = null; - Graph, BoxedUnit> graphSink = null; - Graph, BoxedUnit> graphFlow = null; - - //#flow-wrap - Source source = Source.fromGraph(graphSource); - Sink sink = Sink.fromGraph(graphSink); - Flow aflow = Flow.fromGraph(graphFlow); - Flow.fromSinkAndSource(Sink.head(), Source.single(0)); - Flow.fromSinkAndSourceMat(Sink.head(), Source.single(0), Keep.left()); - //#flow-wrap - - Graph, BoxedUnit> bidiGraph = null; - - //#bidi-wrap - BidiFlow bidiFlow = - BidiFlow.fromGraph(bidiGraph); - BidiFlow.fromFlows(flow1, flow2); - BidiFlow.fromFlowsMat(flow1, flow2, Keep.both()); - //#bidi-wrap - - } - - { - //#graph-create - GraphDSL.create(builder -> { - //... - return ClosedShape.getInstance(); - }); - - GraphDSL.create(builder -> { - //... - return new FlowShape<>(inlet, outlet); - }); - //#graph-create - } - - { - //#graph-create-2 - GraphDSL.create(builder -> { - //... - return SourceShape.of(outlet); - }); - - GraphDSL.create(builder -> { - //... - return SinkShape.of(inlet); - }); - - GraphDSL.create(builder -> { - //... - return FlowShape.of(inlet, outlet); - }); - - GraphDSL.create(builder -> { - //... - return BidiShape.of(inlet1, outlet1, inlet2, outlet2); - }); - //#graph-create-2 - } - - { - //#graph-builder - GraphDSL.create(builder -> { - builder.from(outlet).toInlet(inlet); - builder.from(outlet).via(builder.add(flow)).toInlet(inlet); - builder.from(builder.add(Source.single(0))).to(builder.add(Sink.head())); - //... - return ClosedShape.getInstance(); - }); - //#graph-builder - } - - //#source-creators - Source>> src = Source.maybe(); - // Complete the promise with an empty option to emulate the old lazyEmpty - promise.trySuccess(scala.Option.empty()); - - final Source ticks = Source.tick( - FiniteDuration.create(0, TimeUnit.MILLISECONDS), - FiniteDuration.create(200, TimeUnit.MILLISECONDS), - "tick"); - - final Source pubSource = - Source.fromPublisher(TestPublisher.manualProbe(true, sys)); - - final Source futSource = - Source.fromFuture(Futures.successful(42)); - - final Source> subSource = - Source.asSubscriber(); - //#source-creators - - //#sink-creators - final Sink subSink = - Sink.fromSubscriber(TestSubscriber.manualProbe(sys)); - //#sink-creators - - //#sink-as-publisher - final Sink> pubSink = - Sink.asPublisher(false); - - final Sink> pubSinkFanout = - Sink.asPublisher(true); - //#sink-as-publisher - - //#empty-flow - Flow emptyFlow = Flow.create(); - // or - Flow emptyFlow2 = Flow.of(Integer.class); - //#empty-flow - - //#flatMapConcat - Flow.>create(). - flatMapConcat(new Function, Source>(){ - @Override public Source apply(Source param) throws Exception { - return param; - } - }); - //#flatMapConcat - - Uri uri = null; - //#raw-query - final akka.japi.Option theRawQueryString = uri.rawQueryString(); - //#raw-query - - //#query-param - final akka.japi.Option aQueryParam = uri.query().get("a"); - //#query-param - - //#file-source-sink - final Source> fileSrc = - FileIO.fromFile(new File(".")); - - final Source> otherFileSrc = - FileIO.fromFile(new File("."), 1024); - - final Sink> fileSink = - FileIO.toFile(new File(".")); - //#file-source-sink - - //#input-output-stream-source-sink - final Source> inputStreamSrc = - StreamConverters.fromInputStream(new Creator(){ - public InputStream create() { - return new SomeInputStream(); - } - }); - - final Source> otherInputStreamSrc = - StreamConverters.fromInputStream(new Creator(){ - public InputStream create() { - return new SomeInputStream(); - } - }, 1024); - - final Sink> outputStreamSink = - StreamConverters.fromOutputStream(new Creator(){ - public OutputStream create() { - return new SomeOutputStream(); - } - }); - //#input-output-stream-source-sink - - - //#output-input-stream-source-sink - final FiniteDuration timeout = FiniteDuration.Zero(); - - final Source outputStreamSrc = - StreamConverters.asOutputStream(); - - final Source otherOutputStreamSrc = - StreamConverters.asOutputStream(timeout); - - final Sink someInputStreamSink = - StreamConverters.asInputStream(); - - final Sink someOtherInputStreamSink = - StreamConverters.asInputStream(timeout); - //#output-input-stream-source-sink - - } - -} diff --git a/akka-docs-dev/rst/java/migration-guide-1.0-2.x-java.rst b/akka-docs-dev/rst/java/migration-guide-1.0-2.x-java.rst deleted file mode 100644 index 31fa3bec0e..0000000000 --- a/akka-docs-dev/rst/java/migration-guide-1.0-2.x-java.rst +++ /dev/null @@ -1,715 +0,0 @@ -.. _migration-2.0-java: - -########################## -Migration Guide 1.0 to 2.x -########################## - -The 2.0 release contains some structural changes that require some -simple, mechanical source-level changes in client code. While these are detailed below, -there is another change that may have an impact on the runtime behavior of your streams -and which therefore is listed first. - -Operator Fusion is on by default -================================ - -Akka Streams 2.0 contains an initial version of stream operator fusion support. This means that -the processing steps of a flow or stream graph can be executed within the same Actor and has three -consequences: - - * starting up a stream may take longer than before due to executing the fusion algorithm - * passing elements from one processing stage to the next is a lot faster between fused - stages due to avoiding the asynchronous messaging overhead - * fused stream processing stages do no longer run in parallel to each other, meaning that - only up to one CPU core is used for each fused part - -The first point can be countered by pre-fusing and then reusing a stream blueprint, see ``akka.stream.Fusing``. -In order to balance the effects of the second and third bullet points you will have to insert asynchronous -boundaries manually into your flows and graphs by way of adding ``Attributes.asyncBoundary`` to pieces that -shall communicate with the rest of the graph in an asynchronous fashion. - -.. warning:: - - Without fusing (i.e. up to version 2.0-M2) each stream processing stage had an implicit input buffer - that holds a few elements for efficiency reasons. If your flow graphs contain cycles then these buffers - may have been crucial in order to avoid deadlocks. With fusing these implicit buffers are no longer - there, data elements are passed without buffering between fused stages. In those cases where buffering - is needed in order to allow the stream to run at all, you will have to insert explicit buffers with the - ``.buffer()`` combinator—typically a buffer of size 2 is enough to allow a feedback loop to function. - -The new fusing behavior can be disabled by setting the configuration parameter ``akka.stream.materializer.auto-fusing=off``. -In that case you can still manually fuse those graphs which shall run on less Actors. Fusable elements are - - * all GraphStages (this includes all built-in junctions apart from ``groupBy``) - * all Stages (this includes all built-in linear operators) - * TCP connections - -Introduced proper named constructor methods instead of ``wrap()`` -================================================================= - -There were several, unrelated uses of ``wrap()`` which made it hard to find and hard to understand the intention of -the call. Therefore these use-cases now have methods with different names, helping Java 8 type inference (by reducing -the number of overloads) and finding relevant methods in the documentation. - -Creating a Flow from other stages ---------------------------------- - -It was possible to create a ``Flow`` from a graph with the correct shape (``FlowShape``) using ``wrap()``. Now this -must be done with the more descriptive method ``Flow.fromGraph()``. - -It was possible to create a ``Flow`` from a ``Source`` and a ``Sink`` using ``wrap()``. Now this functionality can -be accessed trough the more descriptive methods ``Flow.fromSinkAndSource`` and ``Flow.fromSinkAndSourceMat``. - - -Creating a BidiFlow from other stages -------------------------------------- - -It was possible to create a ``BidiFlow`` from a graph with the correct shape (``BidiShape``) using ``wrap()``. Now this -must be done with the more descriptive method ``BidiFlow.fromGraph()``. - -It was possible to create a ``BidiFlow`` from two ``Flow`` s using ``wrap()``. Now this functionality can -be accessed trough the more descriptive methods ``BidiFlow.fromFlows`` and ``BidiFlow.fromFlowsMat``. - -Update procedure ----------------- - -1. Replace all uses of ``Flow.wrap`` when it converts a ``Graph`` to a ``Flow`` with ``Flow.fromGraph`` -2. Replace all uses of ``Flow.wrap`` when it converts a ``Source`` and ``Sink`` to a ``Flow`` with - ``Flow.fromSinkAndSource`` or ``Flow.fromSinkAndSourceMat`` -3. Replace all uses of ``BidiFlow.wrap`` when it converts a ``Graph`` to a ``BidiFlow`` with ``BidiFlow.fromGraph`` -4. Replace all uses of ``BidiFlow.wrap`` when it converts two ``Flow`` s to a ``BidiFlow`` with - ``BidiFlow.fromFlows`` or ``BidiFlow.fromFlowsMat`` -5. Replace all uses of ``BidiFlow.apply()`` (Scala DSL) or ``BidiFlow.create()`` (Java DSL) when it converts two - functions to a ``BidiFlow`` with ``BidiFlow.fromFunctions`` - -Example -^^^^^^^ - -:: - - Graph, BoxedUnit> graphSource = ...; - // This no longer works! - Source source = Source.wrap(graphSource); - - Graph, BoxedUnit> graphSink = ...; - // This no longer works! - Sink sink = Sink.wrap(graphSink); - - Graph, BoxedUnit> graphFlow = ...; - // This no longer works! - Flow flow = Flow.wrap(graphFlow); - - // This no longer works! - Flow.wrap(Sink.head(), Source.single(0), Keep.left()); - -should be replaced by - -.. includecode:: code/docs/MigrationsJava.java#flow-wrap - -and - -:: - - Graph, BoxedUnit> bidiGraph = ...; - // This no longer works! - BidiFlow bidiFlow = BidiFlow.wrap(bidiGraph); - - // This no longer works! - BidiFlow.wrap(flow1, flow2, Keep.both()); - - -Should be replaced by - -.. includecode:: code/docs/MigrationsJava.java#bidi-wrap - - -Renamed ``inlet()`` and ``outlet()`` to ``in()`` and ``out()`` in ``SourceShape``, ``SinkShape`` and ``FlowShape`` -================================================================================================================== - -The input and output ports of these shapes where called ``inlet()`` and ``outlet()`` compared to other shapes that -consistently used ``in()`` and ``out()``. Now all :class:`Shape` s use ``in()`` and ``out()``. - -Update procedure ----------------- - -Change all references to ``inlet()`` to ``in()`` and all references to ``outlet()`` to ``out()`` when referring to the ports -of :class:`FlowShape`, :class:`SourceShape` and :class:`SinkShape`. - - -FlowGraph class and builder methods have been renamed -===================================================== - -Due to incorrect overlap with the :class:`Flow` concept we renamed the :class:`FlowGraph` class to :class:`GraphDSL`. -There is now only one graph creation method called ``create`` which is analogous to the old ``partial`` method. For -closed graphs now it is explicitly required to return ``ClosedShape`` at the end of the builder block. - -Update procedure ----------------- - -1. Search and replace all occurrences of ``FlowGraph`` with ``GraphDSL``. -2. Replace all occurrences of ``GraphDSL.partial()`` or ``GraphDSL.closed()`` with ``GraphDSL.create()``. -3. Add ``ClosedShape`` as a return value of the builder block if it was ``FlowGraph.closed()`` before. -4. Wrap the closed graph with ``RunnableGraph.fromGraph`` if it was ``FlowGraph.closed()`` before. - -Example -^^^^^^^ - -:: - - // This no longer works! - FlowGraph.factory().closed(builder -> { - //... - }); - - // This no longer works! - FlowGraph.factory().partial(builder -> { - //... - return new FlowShape<>(inlet, outlet); - }); - -should be replaced by - -.. includecode:: code/docs/MigrationsJava.java#graph-create - -Methods that create Source, Sink, Flow from Graphs have been removed -==================================================================== - -Previously there were convenience methods available on ``Sink``, ``Source``, ``Flow`` an ``BidiFlow`` to create -these DSL elements from a graph builder directly. Now this requires two explicit steps to reduce the number of overloaded -methods (helps Java 8 type inference) and also reduces the ways how these elements can be created. There is only one -graph creation method to learn (``GraphDSL.create``) and then there is only one conversion method to use ``fromGraph()``. - -This means that the following methods have been removed: - - ``adapt()`` method on ``Source``, ``Sink``, ``Flow`` and ``BidiFlow`` (both DSLs) - - ``apply()`` overloads providing a graph ``Builder`` on ``Source``, ``Sink``, ``Flow`` and ``BidiFlow`` (Scala DSL) - - ``create()`` overloads providing a graph ``Builder`` on ``Source``, ``Sink``, ``Flow`` and ``BidiFlow`` (Java DSL) - -Update procedure ----------------- - -Everywhere where ``Source``, ``Sink``, ``Flow`` and ``BidiFlow`` is created from a graph using a builder have to -be replaced with two steps - -1. Create a ``Graph`` with the correct ``Shape`` using ``GraphDSL.create`` (e.g.. for ``Source`` it means first - creating a ``Graph`` with ``SourceShape``) -2. Create the required DSL element by calling ``fromGraph()`` on the required DSL element (e.g. ``Source.fromGraph``) - passing the graph created in the previous step - -Example -^^^^^^^ - -:: - - // This no longer works! - Source.factory().create(builder -> { - //... - return outlet; - }); - - // This no longer works! - Sink.factory().create(builder -> { - //... - return inlet; - }); - - // This no longer works! - Flow.factory().create(builder -> { - //... - return new Pair<>(inlet, outlet); - }); - - // This no longer works! - BidiFlow.factory().create(builder -> { - //... - return new BidiShape<>(inlet1, outlet1, inlet2, outlet2); - }); - -should be replaced by - -.. includecode:: code/docs/MigrationsJava.java#graph-create-2 - -Some graph Builder methods have been removed -============================================ - -Due to the high number of overloads Java 8 type inference suffered, and it was also hard to figure out which time -to use which method. Therefore various redundant methods have been removed. As a consequence, every ``Sink``, ``Source`` -and ``Flow`` needs to be explicitly added via ``builder.add()``. - -Update procedure ----------------- - -1. All uses of ``builder.edge(outlet,inlet)`` should be replaced by the alternative ``builder.from(outlet).toInlet(inlet)`` -3. All uses of ``builder.source`` should be replaced by ``builder.from(builder.add(source))`` -4. All uses of ``builder.flow`` should be replaced by ``builder.….via(builder.add(flow))`` -5. All uses of ``builder.sink`` should be replaced by ``builder.….to(builder.add(sink)))`` - -:: - - FlowGraph.factory().closed(builder -> { - // These no longer work - builder.edge(outlet, inlet); - builder.flow(outlet, flow, inlet); - builder.source(Source.single(0)); - builder.sink(Sink.head()); - //... - }); - -should be replaced by - -.. includecode:: code/docs/MigrationsJava.java#graph-builder - -Source constructor name changes -=============================== - -``Source.lazyEmpty`` has been replaced by ``Source.maybe`` which returns a ``Promise`` that can be completed by one or -zero elements by providing an ``Option``. This is different from ``lazyEmpty`` which only allowed completion to be -sent, but no elements. - -The ``from()`` overload on ``Source`` has been refactored to separate methods to reduce the number of overloads and -make source creation more discoverable. - -``Source.subscriber`` has been renamed to ``Source.asSubscriber``. - -Update procedure ----------------- - -1. All uses of ``Source.lazyEmpty`` should be replaced by ``Source.maybe`` and the returned ``Promise`` completed with - a ``None`` (an empty ``Option``) -2. Replace all uses of ``Source.from(delay,interval,tick)`` with the method ``Source.tick(delay,interval,tick)`` -3. Replace all uses of ``Source.from(publisher)`` with the method ``Source.fromPublisher(publisher)`` -4. Replace all uses of ``Source.from(future)`` with the method ``Source.fromFuture(future))`` -5. Replace all uses of ``Source.subscriber`` with the method ``Source.asSubscriber`` - -Example -^^^^^^^ - -:: - - // This no longer works! - Source> src = Source.lazyEmpty(); - //... - promise.trySuccess(BoxedUnit.UNIT); - - // This no longer works! - final Source ticks = Source.from( - FiniteDuration.create(0, TimeUnit.MILLISECONDS), - FiniteDuration.create(200, TimeUnit.MILLISECONDS), - "tick"); - - // This no longer works! - final Source pubSource = - Source.from(TestPublisher.manualProbe(true, sys)); - - // This no longer works! - final Source futSource = - Source.from(Futures.successful(42)); - - // This no longer works! - final Source> subSource = - Source.subscriber(); - -should be replaced by - -.. includecode:: code/docs/MigrationsJava.java#source-creators - -Sink constructor name changes -============================= - -``Sink.create(subscriber)`` has been renamed to ``Sink.fromSubscriber(subscriber)`` to reduce the number of overloads and -make sink creation more discoverable. - -Update procedure ----------------- - -1. Replace all uses of ``Sink.create(subscriber)`` with the method ``Sink.fromSubscriber(subscriber)`` - -Example -^^^^^^^ - -:: - - // This no longer works! - final Sink subSink = - Sink.create(TestSubscriber.manualProbe(sys)); - -should be replaced by - -.. includecode:: code/docs/MigrationsJava.java#sink-creators - -``Flow.empty()`` have been removed -================================== - -The ``empty()`` method has been removed since it behaves exactly the same as ``create()``, creating a ``Flow`` with no -transformations added yet. - -Update procedure ----------------- - -1. Replace all uses of ``Flow.empty()`` with ``Flow.create``. - -:: - - // This no longer works! - Flow emptyFlow = Flow.empty(); - -should be replaced by - -.. includecode:: code/docs/MigrationsJava.java#empty-flow - -``flatten(FlattenStrategy)`` has been replaced by named counterparts -==================================================================== - -To simplify type inference in Java 8 and to make the method more discoverable, ``flatten(FlattenStrategy.concat)`` -has been removed and replaced with the alternative method ``flatMapConcat(f)``. - -Update procedure ----------------- - -1. Replace all occurrences of ``flatten(FlattenStrategy.concat)`` with ``flatMapConcat(identity)`` -2. Consider replacing ``map(f).flatMapConcat(identity)`` with ``flatMapConcat(f)`` - -Example -^^^^^^^ - -:: - - Flow.>create().flatten(FlattenStrategy.concat()); - -should be replaced by - -.. includecode:: code/docs/MigrationsJava.java#flatMapConcat - -`Sink.fanoutPublisher() and Sink.publisher() is now a single method` -==================================================================== - -It was a common user mistake to use ``Sink.publisher`` and get into trouble since it would only support -a single ``Subscriber``, and the discoverability of the apprpriate fix was non-obvious (Sink.fanoutPublisher). -To make the decision whether to support fanout or not an active one, the aforementioned methods have been -replaced with a single method: ``Sink.asPublisher(fanout: Boolean)``. - -Update procedure ----------------- - -1. Replace all occurences of ``Sink.publisher`` with ``Sink.asPublisher(false)`` -2. Replace all occurences of ``Sink.fanoutPublisher`` with ``Sink.asPublisher(true)`` - -Example -^^^^^^^ - -:: - - // This no longer works! - final Sink> pubSink = - Sink.publisher(); - - // This no longer works! - final Sink> pubSink = - Sink.fanoutPublisher(2, 8); - -should be replaced by - -.. includecode:: code/docs/MigrationsJava.java#sink-as-publisher - -FlexiMerge an FlexiRoute has been replaced by GraphStage -======================================================== - -The ``FlexiMerge`` and ``FlexiRoute`` DSLs have been removed since they provided an abstraction that was too limiting -and a better abstraction have been created which is called ``GraphStage``. ``GraphStage`` can express fan-in and -fan-out stages, but many other constructs as well with possibly multiple input and output ports (e.g. a ``BidiStage``). - -This new abstraction provides a more uniform way to crate custom stream processing stages of arbitrary ``Shape``. In -fact, all of the built-in fan-in and fan-out stages are now implemented in terms of ``GraphStage``. - -Update procedure ----------------- - -*There is no simple update procedure. The affected stages must be ported to the new ``GraphStage`` DSL manually. Please -read the* ``GraphStage`` *documentation (TODO) for details.* - -GroupBy, SplitWhen and SplitAfter now return SubFlow or SubSource -================================================================= - -Previously the ``groupBy``, ``splitWhen``, and ``splitAfter`` combinators -returned a type that included a :class:`Source` within its elements. -Transforming these substreams was only possible by nesting the respective -combinators inside a ``map`` of the outer stream. This has been made more -convenient and also safer by dropping down into transforming the substreams -instead: the return type is now a :class:`SubSource` (for sources) or a -:class:`SubFlow` (for flows) that does not implement the :class:`Graph` -interface and therefore only represents an unfinished intermediate builder -step. - -Update Procedure ----------------- - -The transformations that were done on the substreams need to be lifted up one -level. This only works for cases where the processing topology is homogenous -for all substreams. - -Example -^^^^^^^ - -:: - - Flow. create() - // This no longer works! - .groupBy(i -> i % 2) - // This no longer works! - .map(pair -> pair.second().map(i -> i + 3)) - // This no longer works! - .flatten(FlattenStrategy.concat()) - -This is implemented now as - -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/MigrationsJava.java#group-flatten - -Example 2 -^^^^^^^^^ - -:: - - Flow. create() - // This no longer works! - .groupBy(i -> i) - // This no longer works! - .map(pair -> - pair.second().runFold(new Pair<>(pair.first(), 0), - (pair, word) -> new Pair<>(word, pair.second() + 1))) - // This no longer works! - .mapAsyncUnordered(4, i -> i) - -This is implemented now as - -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/MigrationsJava.java#group-fold - -Semantic change in ``isHoldingUpstream`` in the DetachedStage DSL -================================================================= - -The ``isHoldingUpstream`` method used to return true if the upstream port was in holding state and a completion arrived -(inside the ``onUpstreamFinished`` callback). Now it returns ``false`` when the upstream is completed. - -Update procedure ----------------- - -1. Those stages that relied on the previous behavior need to introduce an extra ``Boolean`` field with initial value - ``false`` -2. This field must be set on every call to ``holdUpstream()`` (and variants). -3. In completion, instead of calling ``isHoldingUpstream`` read this variable instead. - -See the example in the AsyncStage migration section for an example of this procedure. - -StatefulStage has been replaced by GraphStage -============================================= - -The :class:`StatefulStage` class had some flaws and limitations, most notably around completion handling which -caused subtle bugs. The new :class:`GraphStage` (:ref:`graphstage-java`) solves these issues and should be used -instead. - -Update procedure ----------------- - -There is no mechanical update procedure available. Please consult the :class:`GraphStage` documentation -(:ref:`graphstage-java`). - - -AsyncStage has been replaced by GraphStage -========================================== - -Due to its complexity and inflexibility ``AsyncStage`` have been removed in favor of ``GraphStage``. Existing -``AsyncStage`` implementations can be ported in a mostly mechanical way. - -Update procedure ----------------- - -1. The subclass of ``AsyncStage`` should be replaced by ``GraphStage`` -2. The new subclass must define an ``in`` and ``out`` port (``Inlet`` and ``Outlet`` instance) and override the ``shape`` - method returning a ``FlowShape`` -3. An instance of ``GraphStageLogic`` must be returned by overriding ``createLogic()``. The original processing logic and - state will be encapsulated in this ``GraphStageLogic`` -4. Using ``setHandler(port, handler)`` and ``InHandler`` instance should be set on ``in`` and an ``OutHandler`` should - be set on ``out`` -5. ``onPush``, ``onUpstreamFinished`` and ``onUpstreamFailed`` are now available in the ``InHandler`` subclass created - by the user -6. ``onPull`` and ``onDownstreamFinished`` are now available in the ``OutHandler`` subclass created by the user -7. the callbacks above no longer take an extra `ctxt` context parameter. -8. ``onPull`` only signals the stage, the actual element can be obtained by calling ``grab(in)`` -9. ``ctx.push(elem)`` is now ``push(out, elem)`` -10. ``ctx.pull()`` is now ``pull(in)`` -11. ``ctx.finish()`` is now ``completeStage()`` -12. ``ctx.pushAndFinish(elem)`` is now simply two calls: ``push(out, elem); completeStage()`` -13. ``ctx.fail(cause)`` is now ``failStage(cause)`` -14. ``ctx.isFinishing()`` is now ``isClosed(in)`` -15. ``ctx.absorbTermination()`` can be replaced with ``if (isAvailable(shape.outlet)) `` -16. ``ctx.pushAndPull(elem)`` can be replaced with ``push(out, elem); pull(in)`` -17. ``ctx.holdUpstreamAndPush`` and ``context.holdDownstreamAndPull`` can be replaced by simply ``push(elem)`` and - ``pull()`` respectively -18. The following calls should be removed: ``ctx.ignore()``, ``ctx.holdUpstream()`` and ``ctx.holdDownstream()``. -19. ``ctx.isHoldingUpstream()`` can be replaced with ``isAvailable(out)`` -20. ``ctx.isHoldingDowntream()`` can be replaced with ``!(isClosed(in) || hasBeenPulled(in))`` -21. ``ctx.getAsyncCallback()`` is now ``getAsyncCallback(callback)`` which now takes a callback as a parameter. This - would correspond to the ``onAsyncInput()`` callback in the original ``AsyncStage`` - -We show the necessary steps in terms of an example ``AsyncStage`` - -Example -^^^^^^^ - -TODO - -Akka HTTP: Uri parsing mode relaxed-with-raw-query replaced with rawQueryString -=============================================================================== - -Previously Akka HTTP allowed to configure the parsing mode of an Uri's Query part (``?a=b&c=d``) to ``relaxed-with-raw-query`` -which is useful when Uris are not formatted using the usual "key/value pairs" syntax. - -Instead of exposing it as an option for the parser, this is now available as the ``Option rawQueryString()`` -/ ``Option queryString()`` methods on on ``model.Uri``. - -For parsing the Query part use ``Query query(Charset charset, Uri.ParsingMode mode)``. - -Update procedure ----------------- -1. If the ``uri-parsing-mode`` was set to ``relaxed-with-raw-query``, remove it -2. In places where the query string was accessed in ``relaxed-with-raw-query`` mode, use the ``rawQueryString``/``queryString`` methods instead -3. In places where the parsed query parts (such as ``parameter``) were used, invoke parsing directly using ``uri.query().get("a")`` - -Example -^^^^^^^ - -:: - - // config, no longer works - akka.http.parsing.uri-parsing-mode = relaxed-with-raw-query - -should be replaced by: - -.. includecode:: code/docs/MigrationsJava.java#raw-query - -And use of query parameters from ``Uri`` that looked like this: - -:: - - // This no longer works! - uri.parameter("name"); - -should be replaced by: - -.. includecode:: code/docs/MigrationsJava.java#query-param - -SynchronousFileSource and SynchronousFileSink -============================================= - -Both have been replaced by ``FileIO.toFile(…)`` and ``FileIO.fromFile(…)`` due to discoverability issues -paired with names which leaked internal implementation details. - -Update procedure ----------------- - -Replace ``SynchronousFileSource.create(`` with ``FileIO.fromFile(`` - -Replace ``SynchronousFileSink.create(`` with ``FileIO.toFile(`` - -Replace ``SynchronousFileSink.appendTo(f)`` with ``FileIO.toFile(f, true)`` - -Example -^^^^^^^ - -:: - - // This no longer works! - final Source> src = - SynchronousFileSource.create(new File(".")); - - // This no longer works! - final Source> src = - SynchronousFileSource.create(new File("."), 1024); - - // This no longer works! - final Sink> sink = - `SynchronousFileSink.appendTo(new File(".")); - -should be replaced by - -.. includecode:: code/docs/MigrationsJava.java#file-source-sink - -InputStreamSource and OutputStreamSink -====================================== - -Both have been replaced by ``StreamConverters.fromInputStream(…)`` and ``StreamConverters.fromOutputStream(…)`` due to discoverability issues. - -Update procedure ----------------- - -Replace ``InputStreamSource.create(`` with ``StreamConverters.fromInputStream(`` - -Replace ``OutputStreamSink.create(`` with ``StreamConverters.fromOutputStream(`` - -Example -^^^^^^^ - -:: - - // This no longer works! - final Source> inputStreamSrc = - InputStreamSource.create(new Creator(){ - public InputStream create() { - return new SomeInputStream(); - } - }); - - // This no longer works! - final Source> otherInputStreamSrc = - InputStreamSource.create(new Creator(){ - public InputStream create() { - return new SomeInputStream(); - } - }, 1024); - - // This no longer works! - final Sink> outputStreamSink = - OutputStreamSink.create(new Creator(){ - public OutputStream create() { - return new SomeOutputStream(); - } - }) - -should be replaced by - -.. includecode:: code/docs/MigrationsJava.java#input-output-stream-source-sink - - -OutputStreamSource and InputStreamSink -====================================== - -Both have been replaced by ``StreamConverters.asOutputStream(…)`` and ``StreamConverters.asInputStream(…)`` due to discoverability issues. - -Update procedure ----------------- - -Replace ``OutputStreamSource.create(`` with ``StreamConverters.asOutputStream(`` - -Replace ``InputStreamSink.create(`` with ``StreamConverters.asInputStream(`` - -Example -^^^^^^^ - -:: - - // This no longer works! - final Source outputStreamSrc = - OutputStreamSource.create(); - - // This no longer works! - final Source otherOutputStreamSrc = - OutputStreamSource.create(timeout); - - // This no longer works! - final Sink someInputStreamSink = - InputStreamSink.create(); - - // This no longer works! - final Sink someOtherInputStreamSink = - InputStreamSink.create(timeout); - -should be replaced by - -.. includecode:: code/docs/MigrationsJava.java#output-input-stream-source-sink diff --git a/akka-docs-dev/rst/scala.rst b/akka-docs-dev/rst/scala.rst deleted file mode 100644 index 78cc44e68e..0000000000 --- a/akka-docs-dev/rst/scala.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. _stream-scala-api: - -Scala Documentation -=================== - -.. toctree:: - :maxdepth: 3 - - scala/stream-index - scala/http/index diff --git a/akka-docs-dev/rst/scala/code/docs/MigrationsScala.scala b/akka-docs-dev/rst/scala/code/docs/MigrationsScala.scala deleted file mode 100644 index 37357c098b..0000000000 --- a/akka-docs-dev/rst/scala/code/docs/MigrationsScala.scala +++ /dev/null @@ -1,281 +0,0 @@ -package docs - -import java.io.{ InputStream, File } - -import akka.http.scaladsl.model.Uri -import akka.stream.scaladsl._ -import akka.stream._ -import akka.stream.stage.{ OutHandler, InHandler, GraphStageLogic, GraphStage } -import akka.stream.testkit.{ AkkaSpec, TestPublisher, TestSubscriber } - -import scala.concurrent.{ Future, ExecutionContext, Promise } -import scala.concurrent.duration._ -import scala.util.{ Failure, Random, Success, Try } - -class MigrationsScala extends AkkaSpec { - - "Examples in migration guide" must { - "compile" in { - val flow1 = Flow[Int] - val flow2 = Flow[Int] - - def inlet: Inlet[Int] = ??? - def outlet: Outlet[Int] = ??? - - def inlet1: Inlet[Int] = ??? - def outlet1: Outlet[Int] = ??? - def inlet2: Inlet[Int] = ??? - def outlet2: Outlet[Int] = ??? - - lazy val dontExecuteMe = { - //#flow-wrap - val graphSource: Graph[SourceShape[Int], Unit] = ??? - val source: Source[Int, Unit] = Source.fromGraph(graphSource) - - val graphSink: Graph[SinkShape[Int], Unit] = ??? - val sink: Sink[Int, Unit] = Sink.fromGraph(graphSink) - - val graphFlow: Graph[FlowShape[Int, Int], Unit] = ??? - val flow: Flow[Int, Int, Unit] = Flow.fromGraph(graphFlow) - - Flow.fromSinkAndSource(Sink.head[Int], Source.single(0)) - //#flow-wrap - - //#bidiflow-wrap - val bidiGraph: Graph[BidiShape[Int, Int, Int, Int], Unit] = ??? - val bidi: BidiFlow[Int, Int, Int, Int, Unit] = BidiFlow.fromGraph(bidiGraph) - - BidiFlow.fromFlows(flow1, flow2) - - BidiFlow.fromFunctions((x: Int) => x + 1, (y: Int) => y * 3) - //#bidiflow-wrap - - //#graph-create - // Replaces GraphDSL.closed() - GraphDSL.create() { builder => - //... - ClosedShape - } - - // Replaces GraphDSL.partial() - GraphDSL.create() { builder => - //... - FlowShape(inlet, outlet) - } - //#graph-create - - //#graph-create-2 - Source.fromGraph( - GraphDSL.create() { builder => - //... - SourceShape(outlet) - }) - - Sink.fromGraph( - GraphDSL.create() { builder => - //... - SinkShape(inlet) - }) - - Flow.fromGraph( - GraphDSL.create() { builder => - //... - FlowShape(inlet, outlet) - }) - - BidiFlow.fromGraph( - GraphDSL.create() { builder => - //... - BidiShape(inlet1, outlet1, inlet2, outlet2) - }) - //#graph-create-2 - - //#graph-edges - RunnableGraph.fromGraph( - GraphDSL.create() { implicit builder => - import GraphDSL.Implicits._ - outlet ~> inlet - outlet ~> flow ~> inlet - //... - ClosedShape - }) - //#graph-edges - - val promise = Promise[Unit]() - - //#source-creators - val src: Source[Int, Promise[Option[Int]]] = Source.maybe[Int] - //... - // This finishes the stream without emitting anything, just like Source.lazyEmpty did - promise.trySuccess(Some(())) - - val ticks = Source.tick(1.second, 3.seconds, "tick") - - val pubSource = Source.fromPublisher(TestPublisher.manualProbe[Int]()) - - val itSource = Source.fromIterator(() => Iterator.continually(Random.nextGaussian)) - - val futSource = Source.fromFuture(Future.successful(42)) - - val subSource = Source.asSubscriber - //#source-creators - - //#sink-creators - val subSink = Sink.fromSubscriber(TestSubscriber.manualProbe[Int]()) - //#sink-creators - - //#sink-as-publisher - val pubSink = Sink.asPublisher(fanout = false) - - val pubSinkFanout = Sink.asPublisher(fanout = true) - //#sink-as-publisher - - //#flatMapConcat - Flow[Source[Int, Any]].flatMapConcat(identity) - //#flatMapConcat - - //#group-flatten - Flow[Int] - .groupBy(2, _ % 2) // the first parameter sets max number of substreams - .map(_ + 3) - .concatSubstreams - //#group-flatten - - val MaxDistinctWords = 1000 - //#group-fold - Flow[String] - .groupBy(MaxDistinctWords, identity) - .fold(("", 0))((pair, word) => (word, pair._2 + 1)) - .mergeSubstreams - //#group-fold - - //#port-async - class MapAsyncOne[In, Out](f: In ⇒ Future[Out])(implicit ec: ExecutionContext) - extends GraphStage[FlowShape[In, Out]] { - val in: Inlet[In] = Inlet("MapAsyncOne.in") - val out: Outlet[Out] = Outlet("MapAsyncOne.out") - override val shape: FlowShape[In, Out] = FlowShape(in, out) - - // The actual logic is encapsulated in a GraphStageLogic now - override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = - new GraphStageLogic(shape) { - - // All of the state *must* be encapsulated in the GraphStageLogic, - // not in the GraphStage - private var elemInFlight: Out = _ - - val callback = getAsyncCallback(onAsyncInput) - var holdingUpstream = false - - // All upstream related events now are handled in an InHandler instance - setHandler(in, new InHandler { - // No context or element parameter for onPush - override def onPush(): Unit = { - // The element is not passed as an argument but needs to be dequeued explicitly - val elem = grab(in) - val future = f(elem) - future.onComplete(callback.invoke) - // ctx.holdUpstream is no longer needed, but we need to track the state - holdingUpstream = true - } - - // No context parameter - override def onUpstreamFinish(): Unit = { - if (holdingUpstream) absorbTermination() - else completeStage() // ctx.finish turns into completeStage() - } - }) - - setHandler(out, new OutHandler { - override def onPull(): Unit = { - if (elemInFlight != null) { - val e = elemInFlight - elemInFlight = null.asInstanceOf[Out] - pushIt(e) - } // holdDownstream is no longer needed - } - }) - - // absorbTermination turns into the code below. - // This emulates the behavior of the AsyncStage stage. - private def absorbTermination(): Unit = - if (isAvailable(shape.out)) getHandler(out).onPull() - - // The line below emulates the behavior of the AsyncStage holdingDownstream - private def holdingDownstream(): Boolean = - !(isClosed(in) || hasBeenPulled(in)) - - // Any method can be used as a callback, we chose the previous name for - // easier comparison with the original code - private def onAsyncInput(input: Try[Out]) = - input match { - case Failure(ex) ⇒ failStage(ex) - case Success(e) if holdingDownstream() ⇒ pushIt(e) - case Success(e) ⇒ - elemInFlight = e - // ctx.ignore is no longer needed - } - - private def pushIt(elem: Out): Unit = { - // ctx.isFinishing turns into isClosed(in) - if (isClosed(in)) { - // pushAndFinish is now two actions - push(out, elem) - completeStage() - } else { - // pushAndPull is now two actions - push(out, elem) - pull(in) - holdingUpstream = false - } - } - } - - } - - //#port-async - - val uri: Uri = ??? - //#raw-query - val queryPart: Option[String] = uri.rawQueryString - //#raw-query - - //#query-param - val param: Option[String] = uri.query().get("a") - //#query-param - - //#file-source-sink - val fileSrc = FileIO.fromFile(new File(".")) - - val otherFileSrc = FileIO.fromFile(new File("."), 1024) - - val someFileSink = FileIO.toFile(new File(".")) - //#file-source-sink - - class SomeInputStream extends java.io.InputStream { override def read(): Int = 0 } - class SomeOutputStream extends java.io.OutputStream { override def write(b: Int): Unit = () } - - //#input-output-stream-source-sink - val inputStreamSrc = StreamConverters.fromInputStream(() => new SomeInputStream()) - - val otherInputStreamSrc = StreamConverters.fromInputStream(() => new SomeInputStream()) - - val someOutputStreamSink = StreamConverters.fromOutputStream(() => new SomeOutputStream()) - //#input-output-stream-source-sink - - //#output-input-stream-source-sink - val timeout: FiniteDuration = 0.seconds - - val outputStreamSrc = StreamConverters.asOutputStream() - - val otherOutputStreamSrc = StreamConverters.asOutputStream(timeout) - - val someInputStreamSink = StreamConverters.asInputStream() - - val someOtherInputStreamSink = StreamConverters.asInputStream(timeout) - //#output-input-stream-source-sink - } - } - } - -} diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/HttpServerExampleSpec.scala.orig b/akka-docs-dev/rst/scala/code/docs/http/scaladsl/HttpServerExampleSpec.scala.orig deleted file mode 100644 index cab7a27ca1..0000000000 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/HttpServerExampleSpec.scala.orig +++ /dev/null @@ -1,407 +0,0 @@ -/* - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package docs.http.scaladsl -<<<<<<< 53710dc764ea110a746112f2bd6010494fa1f9ac - -import akka.actor.{ ActorRef, ActorSystem } -import akka.event.LoggingAdapter -import akka.http.scaladsl.Http -import akka.http.scaladsl.Http.ServerBinding -import akka.http.scaladsl.model._ -import akka.stream.ActorMaterializer -import akka.stream.scaladsl.{ Flow, Sink } -import akka.stream.stage.{ Context, PushStage } -import akka.testkit.TestActors -import org.scalatest.{ Matchers, WordSpec } -import scala.language.postfixOps - -import scala.concurrent.{ ExecutionContext, Future } -======= -/* -// FIXME, uncomment this! - -import scala.concurrent.Future -import org.scalatest.{ WordSpec, Matchers } -import akka.actor.ActorSystem ->>>>>>> =htc,doc #18535 improved docs on spray-json usage - -class HttpServerExampleSpec extends WordSpec with Matchers { - - // never actually called - val log: LoggingAdapter = null - - def compileOnlySpec(body: => Unit) = () - - "binding-example" in compileOnlySpec { - import akka.http.scaladsl.Http - import akka.stream.ActorMaterializer - import akka.stream.scaladsl._ - - implicit val system = ActorSystem() - implicit val materializer = ActorMaterializer() - implicit val ec = system.dispatcher - - val serverSource: Source[Http.IncomingConnection, Future[Http.ServerBinding]] = - Http().bind(interface = "localhost", port = 8080) - val bindingFuture: Future[Http.ServerBinding] = - serverSource.to(Sink.foreach { connection => // foreach materializes the source - println("Accepted new connection from " + connection.remoteAddress) - // ... and then actually handle the connection - }).run() - } - - "binding-failure-high-level-example" in compileOnlySpec { - import akka.http.scaladsl.Http - import akka.http.scaladsl.server.Directives._ - import akka.stream.ActorMaterializer - - implicit val system = ActorSystem() - implicit val materializer = ActorMaterializer() - implicit val ec = system.dispatcher - - val handler = get { - complete("Hello world!") - } - - // let's say the OS won't allow us to bind to 80. - val (host, port) = ("localhost", 80) - val bindingFuture: Future[ServerBinding] = - Http().bindAndHandle(handler, host, port) - - bindingFuture onFailure { - case ex: Exception => - log.error(ex, "Failed to bind to {}:{}!", host, port) - } - - } - - // mock values: - val handleConnections: Sink[Http.IncomingConnection, Future[Http.ServerBinding]] = - Sink.ignore.mapMaterializedValue(_ => Future.failed(new Exception(""))) - - "binding-failure-handling" in compileOnlySpec { - implicit val system = ActorSystem() - implicit val materializer = ActorMaterializer() - implicit val ec = system.dispatcher - - // let's say the OS won't allow us to bind to 80. - val (host, port) = ("localhost", 80) - val serverSource = Http().bind(host, port) - - val bindingFuture: Future[ServerBinding] = serverSource - .to(handleConnections) // Sink[Http.IncomingConnection, _] - .run() - - bindingFuture onFailure { - case ex: Exception => - log.error(ex, "Failed to bind to {}:{}!", host, port) - } - } - - object MyExampleMonitoringActor { - def props = TestActors.echoActorProps - } - - "incoming-connections-source-failure-handling" in compileOnlySpec { - implicit val system = ActorSystem() - implicit val materializer = ActorMaterializer() - implicit val ec = system.dispatcher - - import Http._ - val (host, port) = ("localhost", 8080) - val serverSource = Http().bind(host, port) - - val failureMonitor: ActorRef = system.actorOf(MyExampleMonitoringActor.props) - - val reactToTopLevelFailures = Flow[IncomingConnection] - .transform { () => - new PushStage[IncomingConnection, IncomingConnection] { - override def onPush(elem: IncomingConnection, ctx: Context[IncomingConnection]) = - ctx.push(elem) - - override def onUpstreamFailure(cause: Throwable, ctx: Context[IncomingConnection]) = { - failureMonitor ! cause - super.onUpstreamFailure(cause, ctx) - } - } - } - - serverSource - .via(reactToTopLevelFailures) - .to(handleConnections) // Sink[Http.IncomingConnection, _] - .run() - } - - "connection-stream-failure-handling" in compileOnlySpec { - implicit val system = ActorSystem() - implicit val materializer = ActorMaterializer() - implicit val ec = system.dispatcher - - val (host, port) = ("localhost", 8080) - val serverSource = Http().bind(host, port) - - val reactToConnectionFailure = Flow[HttpRequest] - .transform { () => - new PushStage[HttpRequest, HttpRequest] { - override def onPush(elem: HttpRequest, ctx: Context[HttpRequest]) = - ctx.push(elem) - - override def onUpstreamFailure(cause: Throwable, ctx: Context[HttpRequest]) = { - // handle the failure somehow - super.onUpstreamFailure(cause, ctx) - } - } - } - - val httpEcho = Flow[HttpRequest] - .via(reactToConnectionFailure) - .map { request => - // simple text "echo" response: - HttpResponse(entity = HttpEntity(ContentTypes.`text/plain`, request.entity.dataBytes)) - } - - serverSource - .runForeach { con => - con.handleWith(httpEcho) - } - } - - "full-server-example" in compileOnlySpec { - import akka.http.scaladsl.Http - import akka.http.scaladsl.model.HttpMethods._ - import akka.http.scaladsl.model._ - import akka.stream.ActorMaterializer - import akka.stream.scaladsl.Sink - - implicit val system = ActorSystem() - implicit val materializer = ActorMaterializer() - - val serverSource = Http().bind(interface = "localhost", port = 8080) - - val requestHandler: HttpRequest => HttpResponse = { - case HttpRequest(GET, Uri.Path("/"), _, _, _) => - HttpResponse(entity = HttpEntity(MediaTypes.`text/html`, - "Hello world!")) - - case HttpRequest(GET, Uri.Path("/ping"), _, _, _) => - HttpResponse(entity = "PONG!") - - case HttpRequest(GET, Uri.Path("/crash"), _, _, _) => - sys.error("BOOM!") - - case _: HttpRequest => - HttpResponse(404, entity = "Unknown resource!") - } - - val bindingFuture: Future[Http.ServerBinding] = - serverSource.to(Sink.foreach { connection => - println("Accepted new connection from " + connection.remoteAddress) - - connection handleWithSyncHandler requestHandler - // this is equivalent to - // connection handleWith { Flow[HttpRequest] map requestHandler } - }).run() - } - - "low-level-server-example" in compileOnlySpec { - import akka.http.scaladsl.Http - import akka.http.scaladsl.model.HttpMethods._ - import akka.http.scaladsl.model._ - import akka.stream.ActorMaterializer - - implicit val system = ActorSystem() - implicit val materializer = ActorMaterializer() - - val requestHandler: HttpRequest => HttpResponse = { - case HttpRequest(GET, Uri.Path("/"), _, _, _) => - HttpResponse(entity = HttpEntity(MediaTypes.`text/html`, - "Hello world!")) - - case HttpRequest(GET, Uri.Path("/ping"), _, _, _) => - HttpResponse(entity = "PONG!") - - case HttpRequest(GET, Uri.Path("/crash"), _, _, _) => - sys.error("BOOM!") - - case _: HttpRequest => - HttpResponse(404, entity = "Unknown resource!") - } - - Http().bindAndHandleSync(requestHandler, "localhost", 8080) - } - - // format: OFF - - "high-level-server-example" in compileOnlySpec { - import akka.http.scaladsl.Http - import akka.http.scaladsl.marshallers.xml.ScalaXmlSupport._ - import akka.http.scaladsl.server.Directives._ - import akka.stream.ActorMaterializer - - implicit val system = ActorSystem() - implicit val materializer = ActorMaterializer() - - val route = - get { - pathSingleSlash { - complete { - - Hello world! - - } - } ~ - path("ping") { - complete("PONG!") - } ~ - path("crash") { - sys.error("BOOM!") - } - } - - // `route` will be implicitly converted to `Flow` using `RouteResult.route2HandlerFlow` - Http().bindAndHandle(route, "localhost", 8080) - } - - "minimal-routing-example" in compileOnlySpec { - import akka.http.scaladsl.Http - import akka.http.scaladsl.marshallers.xml.ScalaXmlSupport._ - import akka.http.scaladsl.server.Directives._ - import akka.stream.ActorMaterializer - - object Main extends App { - implicit val system = ActorSystem("my-system") - implicit val materializer = ActorMaterializer() - implicit val ec = system.dispatcher - - val route = - path("hello") { - get { - complete { -

        Say hello to akka-http

        - } - } - } - - val bindingFuture = Http().bindAndHandle(route, "localhost", 8080) - - println(s"Server online at http://localhost:8080/\nPress RETURN to stop...") - Console.readLine() // for the future transformations - bindingFuture - .flatMap(_.unbind()) // trigger unbinding from the port - .onComplete(_ ⇒ system.shutdown()) // and shutdown when done - } - } - - "long-routing-example" in compileOnlySpec { - //#long-routing-example - import akka.actor.ActorRef - import akka.http.scaladsl.coding.Deflate - import akka.http.scaladsl.marshalling.ToResponseMarshaller - import akka.http.scaladsl.model.StatusCodes.MovedPermanently - import akka.http.scaladsl.server.Directives._ - // TODO: these explicit imports are only needed in complex cases, like below; Also, not needed on Scala 2.11 - import akka.http.scaladsl.server.directives.ParameterDirectives.ParamMagnet - import akka.http.scaladsl.server.directives.FormFieldDirectives.FieldMagnet - import akka.http.scaladsl.unmarshalling.FromRequestUnmarshaller - import akka.pattern.ask - import akka.util.Timeout - - // types used by the API routes - type Money = Double // only for demo purposes, don't try this at home! - type TransactionResult = String - case class User(name: String) - case class Order(email: String, amount: Money) - case class Update(order: Order) - case class OrderItem(i: Int, os: Option[String], s: String) - - // marshalling would usually be derived automatically using libraries - implicit val orderUM: FromRequestUnmarshaller[Order] = ??? - implicit val orderM: ToResponseMarshaller[Order] = ??? - implicit val orderSeqM: ToResponseMarshaller[Seq[Order]] = ??? - implicit val timeout: Timeout = ??? // for actor asks - implicit val ec: ExecutionContext = ??? - implicit val mat: ActorMaterializer = ??? - implicit val sys: ActorSystem = ??? - - // backend entry points - def myAuthenticator: Authenticator[User] = ??? - def retrieveOrdersFromDB: Seq[Order] = ??? - def myDbActor: ActorRef = ??? - def processOrderRequest(id: Int, complete: Order => Unit): Unit = ??? - - val route = { - path("orders") { - authenticateBasic(realm = "admin area", myAuthenticator) { user => - get { - encodeResponseWith(Deflate) { - complete { - // marshal custom object with in-scope marshaller - retrieveOrdersFromDB - } - } - } ~ - post { - // decompress gzipped or deflated requests if required - decodeRequest { - // unmarshal with in-scope unmarshaller - entity(as[Order]) { order => - complete { - // ... write order to DB - "Order received" - } - } - } - } - } - } ~ - // extract URI path element as Int - pathPrefix("order" / IntNumber) { orderId => - pathEnd { - (put | parameter('method ! "put")) { - // form extraction from multipart or www-url-encoded forms - formFields('email, 'total.as[Money]).as(Order) { order => - complete { - // complete with serialized Future result - (myDbActor ? Update(order)).mapTo[TransactionResult] - } - } - } ~ - get { - // debugging helper - logRequest("GET-ORDER") { - // use in-scope marshaller to create completer function - completeWith(instanceOf[Order]) { completer => - // custom - processOrderRequest(orderId, completer) - } - } - } - } ~ - path("items") { - get { - // parameters to case class extraction - parameters('size.as[Int], 'color ?, 'dangerous ? "no") - .as(OrderItem) { orderItem => - // ... route using case class instance created from - // required and optional query parameters - complete("") // hide - } - } - } - } ~ - pathPrefix("documentation") { - // optionally compresses the response with Gzip or Deflate - // if the client accepts compressed responses - encodeResponse { - // serve up static content from a JAR resource - getFromResourceDirectory("docs") - } - } ~ - path("oldApi" / Rest) { pathRest => - redirect("http://oldapi.example.com/" + pathRest, MovedPermanently) - } - } - } -} diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/FormFieldDirectivesExamplesSpec.scala b/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/FormFieldDirectivesExamplesSpec.scala deleted file mode 100644 index 4824c46880..0000000000 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/FormFieldDirectivesExamplesSpec.scala +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package docs.http.scaladsl.server -package directives - -import akka.http.scaladsl.server.Route -import akka.http.scaladsl.model._ - -class FormFieldDirectivesExamplesSpec extends RoutingSpec { - "formFields" in { - val route = - formFields('color, 'age.as[Int]) { (color, age) => - complete(s"The color is '$color' and the age ten years ago was ${age - 10}") - } - - // tests: - Post("/", FormData("color" -> "blue", "age" -> "68")) ~> route ~> check { - responseAs[String] shouldEqual "The color is 'blue' and the age ten years ago was 58" - } - - Get("/") ~> Route.seal(route) ~> check { - status shouldEqual StatusCodes.BadRequest - responseAs[String] shouldEqual "Request is missing required form field 'color'" - } - } - "formField" in { - val route = - formField('color) { color => - complete(s"The color is '$color'") - } ~ - formField('id.as[Int]) { id => - complete(s"The id is '$id'") - } - - // tests: - Post("/", FormData("color" -> "blue")) ~> route ~> check { - responseAs[String] shouldEqual "The color is 'blue'" - } - - Get("/") ~> Route.seal(route) ~> check { - status shouldEqual StatusCodes.BadRequest - responseAs[String] shouldEqual "Request is missing required form field 'color'" - } - } - -} diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/websocket-directives/handleWebsocketMessagesForProtocol.rst b/akka-docs-dev/rst/scala/http/routing-dsl/directives/websocket-directives/handleWebsocketMessagesForProtocol.rst deleted file mode 100644 index 254761dfc2..0000000000 --- a/akka-docs-dev/rst/scala/http/routing-dsl/directives/websocket-directives/handleWebsocketMessagesForProtocol.rst +++ /dev/null @@ -1,31 +0,0 @@ -.. _-handleWebsocketMessagesForProtocol-: - -handleWebsocketMessagesForProtocol -================================== - -Signature ---------- - -.. includecode2:: /../../akka-http/src/main/scala/akka/http/scaladsl/server/directives/WebsocketDirectives.scala - :snippet: handleWebsocketMessagesForProtocol - -Description ------------ -Handles Websocket requests with the given handler if the given subprotocol is offered in the ``Sec-Websocket-Protocol`` -header of the request and rejects other requests with an ``ExpectedWebsocketRequestRejection`` or an -``UnsupportedWebsocketSubprotocolRejection``. - -The directive first checks if the request was a valid Websocket handshake request and if the request offers the passed -subprotocol name. If yes, the directive completes the request with the passed handler. Otherwise, the request is -either rejected with an ``ExpectedWebsocketRequestRejection`` or an ``UnsupportedWebsocketSubprotocolRejection``. - -To support several subprotocols, for example at the same path, several instances of ``handleWebsocketMessagesForProtocol`` can -be chained using ``~`` as you can see in the below example. - -For more information about the Websocket support, see :ref:`server-side-websocket-support-scala`. - -Example -------- - -.. includecode2:: ../../../../code/docs/http/scaladsl/server/directives/WebsocketDirectivesExamplesSpec.scala - :snippet: handle-multiple-protocols diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/websocket-directives/index.rst b/akka-docs-dev/rst/scala/http/routing-dsl/directives/websocket-directives/index.rst deleted file mode 100644 index 82920ed198..0000000000 --- a/akka-docs-dev/rst/scala/http/routing-dsl/directives/websocket-directives/index.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. _WebsocketDirectives: - -WebsocketDirectives -=================== - -.. toctree:: - :maxdepth: 1 - - handleWebsocketMessages - handleWebsocketMessagesForProtocol \ No newline at end of file diff --git a/akka-docs-dev/rst/scala/migration-guide-1.0-2.x-scala.rst b/akka-docs-dev/rst/scala/migration-guide-1.0-2.x-scala.rst deleted file mode 100644 index 59f615fafd..0000000000 --- a/akka-docs-dev/rst/scala/migration-guide-1.0-2.x-scala.rst +++ /dev/null @@ -1,739 +0,0 @@ -.. _migration-2.0-scala: - -########################## -Migration Guide 1.0 to 2.x -########################## - -The 2.0 release contains some structural changes that require some -simple, mechanical source-level changes in client code. While these are detailed below, -there is another change that may have an impact on the runtime behavior of your streams -and which therefore is listed first. - -Operator Fusion is on by default -================================ - -Akka Streams 2.0 contains an initial version of stream operator fusion support. This means that -the processing steps of a flow or stream graph can be executed within the same Actor and has three -consequences: - - * starting up a stream may take longer than before due to executing the fusion algorithm - * passing elements from one processing stage to the next is a lot faster between fused - stages due to avoiding the asynchronous messaging overhead - * fused stream processing stages do no longer run in parallel to each other, meaning that - only up to one CPU core is used for each fused part - -The first point can be countered by pre-fusing and then reusing a stream blueprint, see ``akka.stream.Fusing``. -In order to balance the effects of the second and third bullet points you will have to insert asynchronous -boundaries manually into your flows and graphs by way of adding ``Attributes.asyncBoundary`` to pieces that -shall communicate with the rest of the graph in an asynchronous fashion. - -.. warning:: - - Without fusing (i.e. up to version 2.0-M2) each stream processing stage had an implicit input buffer - that holds a few elements for efficiency reasons. If your flow graphs contain cycles then these buffers - may have been crucial in order to avoid deadlocks. With fusing these implicit buffers are no longer - there, data elements are passed without buffering between fused stages. In those cases where buffering - is needed in order to allow the stream to run at all, you will have to insert explicit buffers with the - ``.buffer()`` combinator—typically a buffer of size 2 is enough to allow a feedback loop to function. - -The new fusing behavior can be disabled by setting the configuration parameter ``akka.stream.materializer.auto-fusing=off``. -In that case you can still manually fuse those graphs which shall run on less Actors. Fusable elements are - - * all GraphStages (this includes all built-in junctions apart from ``groupBy``) - * all Stages (this includes all built-in linear operators) - * TCP connections - -Introduced proper named constructor methods instead of ``wrap()`` -================================================================= - -There were several, unrelated uses of ``wrap()`` which made it hard to find and hard to understand the intention of -the call. Therefore these use-cases now have methods with different names, helping Java 8 type inference (by reducing -the number of overloads) and finding relevant methods in the documentation. - -Creating a Flow from other stages ---------------------------------- - -It was possible to create a ``Flow`` from a graph with the correct shape (``FlowShape``) using ``wrap()``. Now this -must be done with the more descriptive method ``Flow.fromGraph()``. - -It was possible to create a ``Flow`` from a ``Source`` and a ``Sink`` using ``wrap()``. Now this functionality can -be accessed trough the more descriptive methods ``Flow.fromSinkAndSource`` and ``Flow.fromSinkAndSourceMat``. - - -Creating a BidiFlow from other stages -------------------------------------- - -It was possible to create a ``BidiFlow`` from a graph with the correct shape (``BidiShape``) using ``wrap()``. Now this -must be done with the more descriptive method ``BidiFlow.fromGraph()``. - -It was possible to create a ``BidiFlow`` from two ``Flow`` s using ``wrap()``. Now this functionality can -be accessed trough the more descriptive methods ``BidiFlow.fromFlows`` and ``BidiFlow.fromFlowsMat``. - -It was possible to create a ``BidiFlow`` from two functions using ``apply()`` (Scala DSL) or ``create()`` (Java DSL). -Now this functionality can be accessed trough the more descriptive method ``BidiFlow.fromFunctions``. - -Update procedure ----------------- - -1. Replace all uses of ``Flow.wrap`` when it converts a ``Graph`` to a ``Flow`` with ``Flow.fromGraph`` -2. Replace all uses of ``Flow.wrap`` when it converts a ``Source`` and ``Sink`` to a ``Flow`` with - ``Flow.fromSinkAndSource`` or ``Flow.fromSinkAndSourceMat`` -3. Replace all uses of ``BidiFlow.wrap`` when it converts a ``Graph`` to a ``BidiFlow`` with ``BidiFlow.fromGraph`` -4. Replace all uses of ``BidiFlow.wrap`` when it converts two ``Flow`` s to a ``BidiFlow`` with - ``BidiFlow.fromFlows`` or ``BidiFlow.fromFlowsMat`` -5. Replace all uses of ``BidiFlow.apply()`` when it converts two - functions to a ``BidiFlow`` with ``BidiFlow.fromFunctions`` - -Example -^^^^^^^ - -:: - - val graphSource: Graph[SourceShape[Int], Unit] = ??? - // This no longer works! - val source: Source[Int, Unit] = Source.wrap(graphSource) - - val graphSink: Graph[SinkShape[Int], Unit] = ??? - // This no longer works! - val sink: Sink[Int, Unit] = Sink.wrap(graphSink) - - val graphFlow: Graph[FlowShape[Int, Int], Unit] = ??? - // This no longer works! - val flow: Flow[Int, Int, Unit] = Flow.wrap(graphFlow) - - // This no longer works - Flow.wrap(Sink.head[Int], Source.single(0))(Keep.left) - -should be replaced by - -.. includecode:: code/docs/MigrationsScala.scala#flow-wrap - -and - -:: - - val bidiGraph: Graph[BidiShape[Int, Int, Int, Int], Unit = ??? - // This no longer works! - val bidi: BidiFlow[Int, Int, Int, Int, Unit] = BidiFlow.wrap(bidiGraph) - - // This no longer works! - BidiFlow.wrap(flow1, flow2)(Keep.both) - - // This no longer works! - BidiFlow((x: Int) => x + 1, (y: Int) => y * 3) - - -Should be replaced by - -.. includecode:: code/docs/MigrationsScala.scala#bidiflow-wrap - -FlowGraph class and builder methods have been renamed -===================================================== - -Due to incorrect overlap with the :class:`Flow` concept we renamed the :class:`FlowGraph` class to :class:`GraphDSL`. -There is now only one graph creation method called ``create`` which is analogous to the old ``partial`` method. For -closed graphs now it is explicitly required to return ``ClosedShape`` at the end of the builder block. - -Update procedure ----------------- - -1. Search and replace all occurrences of ``FlowGraph`` with ``GraphDSL``. -2. Replace all occurrences of ``GraphDSL.partial()`` or ``GraphDSL.closed()`` with ``GraphDSL.create()``. -3. Add ``ClosedShape`` as a return value of the builder block if it was ``FlowGraph.closed()`` before. -4. Wrap the closed graph with ``RunnableGraph.fromGraph`` if it was ``FlowGraph.closed()`` before. - -Example -^^^^^^^ - -:: - - // This no longer works! - FlowGraph.closed() { builder => - //... - } - - // This no longer works! - FlowGraph.partial() { builder => - //... - FlowShape(inlet, outlet) - } - -should be replaced by - -.. includecode:: code/docs/MigrationsScala.scala#graph-create - -Methods that create Source, Sink, Flow from Graphs have been removed -==================================================================== - -Previously there were convenience methods available on ``Sink``, ``Source``, ``Flow`` an ``BidiFlow`` to create -these DSL elements from a graph builder directly. Now this requires two explicit steps to reduce the number of overloaded -methods (helps Java 8 type inference) and also reduces the ways how these elements can be created. There is only one -graph creation method to learn (``GraphDSL.create``) and then there is only one conversion method to use ``fromGraph()``. - -This means that the following methods have been removed: - - ``adapt()`` method on ``Source``, ``Sink``, ``Flow`` and ``BidiFlow`` (both DSLs) - - ``apply()`` overloads providing a graph ``Builder`` on ``Source``, ``Sink``, ``Flow`` and ``BidiFlow`` (Scala DSL) - - ``create()`` overloads providing a graph ``Builder`` on ``Source``, ``Sink``, ``Flow`` and ``BidiFlow`` (Java DSL) - -Update procedure ----------------- - -Everywhere where ``Source``, ``Sink``, ``Flow`` and ``BidiFlow`` is created from a graph using a builder have to -be replaced with two steps - -1. Create a ``Graph`` with the correct ``Shape`` using ``GraphDSL.create`` (e.g.. for ``Source`` it means first - creating a ``Graph`` with ``SourceShape``) -2. Create the required DSL element by calling ``fromGraph()`` on the required DSL element (e.g. ``Source.fromGraph``) - passing the graph created in the previous step - -Example -^^^^^^^ - -:: - - // This no longer works! - Source() { builder => - //... - outlet - } - - // This no longer works! - Sink() { builder => - //... - inlet - } - - // This no longer works! - Flow() { builder => - //... - (inlet, outlet) - } - - // This no longer works! - BidiFlow() { builder => - //... - BidiShape(inlet1, outlet1, inlet2, outlet2) - } - -should be replaced by - -.. includecode:: code/docs/MigrationsScala.scala#graph-create-2 - -Several Graph builder methods have been removed -=============================================== - -The ``addEdge`` methods have been removed from the DSL to reduce the ways connections can be made and to reduce the -number of overloads. Now only the ``~>`` notation is available which requires the import of the implicits -``GraphDSL.Implicits._``. - -Update procedure ----------------- - -1. Replace all uses of ``scaladsl.Builder.addEdge(Outlet, Inlet)`` by the graphical DSL ``~>``. -2. Replace all uses of ``scaladsl.Builder.addEdge(Outlet, FlowShape, Inlet)`` by the graphical DSL ``~>``. - methods, or the graphical DSL ``~>``. -3. Import ``FlowGraph.Implicits._`` in the builder block or an enclosing scope. - -Example -^^^^^^^ - -:: - - FlowGraph.closed() { builder => - //... - // This no longer works! - builder.addEdge(outlet, inlet) - // This no longer works! - builder.addEdge(outlet, flow1, inlet) - //... - } - -should be replaced by - -.. includecode:: code/docs/MigrationsScala.scala#graph-edges - -Source constructor name changes -=============================== - -``Source.lazyEmpty`` has been replaced by ``Source.maybe`` which returns a ``Promise`` that can be completed by one or -zero elements by providing an ``Option``. This is different from ``lazyEmpty`` which only allowed completion to be -sent, but no elements. - -The ``apply()`` overload on ``Source`` has been refactored to separate methods to reduce the number of overloads and -make source creation more discoverable. - -``Source.subscriber`` has been renamed to ``Source.asSubscriber``. - -Update procedure ----------------- - -1. All uses of ``Source.lazyEmpty`` should be replaced by ``Source.maybe`` and the returned ``Promise`` completed with - a ``None`` (an empty ``Option``) -2. Replace all uses of ``Source(delay,interval,tick)`` with the method ``Source.tick(delay,interval,tick)`` -3. Replace all uses of ``Source(publisher)`` with the method ``Source.fromPublisher(publisher)`` -4. Replace all uses of ``Source(() => iterator)`` with the method ``Source.fromIterator(() => iterator))`` -5. Replace all uses of ``Source(future)`` with the method ``Source.fromFuture(future))`` -6. Replace all uses of ``Source.subscriber`` with the method ``Source.asSubscriber`` - -Example -^^^^^^^ - -:: - - // This no longer works! - val src: Source[Int, Promise[Unit]] = Source.lazyEmpty[Int] - //... - promise.trySuccess(()) - - // This no longer works! - val ticks = Source(1.second, 3.seconds, "tick") - - // This no longer works! - val pubSource = Source(TestPublisher.manualProbe[Int]()) - - // This no longer works! - val itSource = Source(() => Iterator.continually(Random.nextGaussian)) - - // This no longer works! - val futSource = Source(Future.successful(42)) - - // This no longer works! - val subSource = Source.subscriber - -should be replaced by - -.. includecode:: code/docs/MigrationsScala.scala#source-creators - -Sink constructor name changes -============================= - -``Sink.apply(subscriber)`` has been renamed to ``Sink.fromSubscriber(subscriber)`` to reduce the number of overloads and -make sink creation more discoverable. - -Update procedure ----------------- - -1. Replace all uses of ``Sink(subscriber)`` with the method ``Sink.fromSubscriber(subscriber)`` - -Example -^^^^^^^ - -:: - - // This no longer works! - val subSink = Sink(TestSubscriber.manualProbe[Int]()) - -should be replaced by - -.. includecode:: code/docs/MigrationsScala.scala#sink-creators - -``flatten(FlattenStrategy)`` has been replaced by named counterparts -==================================================================== - -To simplify type inference in Java 8 and to make the method more discoverable, ``flatten(FlattenStrategy.concat)`` -has been removed and replaced with the alternative method ``flatten(FlattenStrategy.concat)``. - -Update procedure ----------------- - -1. Replace all occurrences of ``flatten(FlattenStrategy.concat)`` with ``flatMapConcat(identity)`` -2. Consider replacing all occurrences of ``map(f).flatMapConcat(identity)`` with ``flatMapConcat(f)`` - -Example -^^^^^^^ - -:: - - // This no longer works! - Flow[Source[Int, Any]].flatten(FlattenStrategy.concat) - -should be replaced by - -.. includecode:: code/docs/MigrationsScala.scala#flatMapConcat - -`Sink.fanoutPublisher()` and `Sink.publisher()` is now a single method -====================================================================== - -It was a common user mistake to use ``Sink.publisher`` and get into trouble since it would only support -a single ``Subscriber``, and the discoverability of the apprpriate fix was non-obvious (Sink.fanoutPublisher). -To make the decision whether to support fanout or not an active one, the aforementioned methods have been -replaced with a single method: ``Sink.asPublisher(fanout: Boolean)``. - -Update procedure ----------------- - -1. Replace all occurences of ``Sink.publisher`` with ``Sink.asPublisher(false)`` -2. Replace all occurences of ``Sink.fanoutPublisher`` with ``Sink.asPublisher(true)`` - -Example -^^^^^^^ - -:: - - // This no longer works! - val subSink = Sink.publisher - - // This no longer works! - val subSink = Sink.fanoutPublisher(2, 8) - -should be replaced by - -.. includecode:: code/docs/MigrationsScala.scala#sink-as-publisher - -FlexiMerge an FlexiRoute has been replaced by GraphStage -======================================================== - -The ``FlexiMerge`` and ``FlexiRoute`` DSLs have been removed since they provided an abstraction that was too limiting -and a better abstraction have been created which is called ``GraphStage``. ``GraphStage`` can express fan-in and -fan-out stages, but many other constructs as well with possibly multiple input and output ports (e.g. a ``BidiStage``). - -This new abstraction provides a more uniform way to crate custom stream processing stages of arbitrary ``Shape``. In -fact, all of the built-in fan-in and fan-out stages are now implemented in terms of ``GraphStage``. - -Update procedure ----------------- - -*There is no simple update procedure. The affected stages must be ported to the new ``GraphStage`` DSL manually. Please -read the* ``GraphStage`` *documentation (TODO) for details.* - -GroupBy, SplitWhen and SplitAfter now return SubFlow -==================================================== - -Previously the ``groupBy``, ``splitWhen``, and ``splitAfter`` combinators -returned a type that included a :class:`Source` within its elements. -Transforming these substreams was only possible by nesting the respective -combinators inside a ``map`` of the outer stream. This has been made more -convenient and also safer by dropping down into transforming the substreams -instead: the return type is now a :class:`SubFlow` that does not implement the -:class:`Graph` interface and therefore only represents an unfinished -intermediate builder step. The substream mode can be ended by closing the -substreams (i.e. attaching a :class:`Sink`) or merging them back together. - -Update Procedure ----------------- - -The transformations that were done on the substreams need to be lifted up one -level. This only works for cases where the processing topology is homogenous -for all substreams. - -Example -^^^^^^^ - -:: - - Flow[Int] - // This no longer works! - .groupBy(_ % 2) - // This no longer works! - .map { - case (key, source) => source.map(_ + 3) - } - // This no longer works! - .flatten(FlattenStrategy.concat) - -This is implemented now as - -.. includecode:: code/docs/MigrationsScala.scala#group-flatten - -Example 2 -^^^^^^^^^ - -:: - - Flow[String] - // This no longer works! - .groupBy(identity) - // This no longer works! - .map { - case (key, source) => source.runFold((key, 0))((pair, word) => (key, pair._2 + 1)) - } - // This no longer works! - .mapAsyncUnordered(4, identity) - -This is implemented now as - -.. includecode:: code/docs/MigrationsScala.scala#group-fold - -Variance of Inlet and Outlet -============================ - -Scala uses *declaration site variance* which was cumbersome in the cases of ``Inlet`` and ``Outlet`` as they are -purely symbolic object containing no fields or methods and which are used both in input and output locations (wiring -an ``Outlet`` into an ``Inlet``; reading in a stage from an ``Inlet``). Because of this reasons all users of these -port abstractions now use *use-site variance* (just like Java variance works). This in general does not affect user -code expect the case of custom shapes, which now require ``@uncheckedVariance`` annotations on their ``Inlet`` and -``Outlet`` members (since these are now invariant, but the Scala compiler does not know that they have no fields or -methods that would violate variance constraints) - -This change does not affect Java DSL users. - -Update procedure ----------------- - -1. All custom shapes must use ``@uncheckedVariance`` on their ``Inlet`` and ``Outlet`` members. - -Renamed ``inlet()`` and ``outlet()`` to ``in()`` and ``out()`` in ``SourceShape``, ``SinkShape`` and ``FlowShape`` -================================================================================================================== - -The input and output ports of these shapes where called ``inlet()`` and ``outlet()`` compared to other shapes that -consistently used ``in()`` and ``out()``. Now all :class:`Shape` s use ``in()`` and ``out()``. - -Update procedure ----------------- - -Change all references to ``inlet()`` to ``in()`` and all references to ``outlet()`` to ``out()`` when referring to the ports -of :class:`FlowShape`, :class:`SourceShape` and :class:`SinkShape`. - -Semantic change in ``isHoldingUpstream`` in the DetachedStage DSL -================================================================= - -The ``isHoldingUpstream`` method used to return true if the upstream port was in holding state and a completion arrived -(inside the ``onUpstreamFinished`` callback). Now it returns ``false`` when the upstream is completed. - -Update procedure ----------------- - -1. Those stages that relied on the previous behavior need to introduce an extra ``Boolean`` field with initial value - ``false`` -2. This field must be set on every call to ``holdUpstream()`` (and variants). -3. In completion, instead of calling ``isHoldingUpstream`` read this variable instead. - -See the example in the AsyncStage migration section for an example of this procedure. - - -StatefulStage has been replaced by GraphStage -============================================= - -The :class:`StatefulStage` class had some flaws and limitations, most notably around completion handling which -caused subtle bugs. The new :class:`GraphStage` (:ref:`graphstage-java`) solves these issues and should be used -instead. - -Update procedure ----------------- - -There is no mechanical update procedure available. Please consult the :class:`GraphStage` documentation -(:ref:`graphstage-java`). - - -AsyncStage has been replaced by GraphStage -========================================== - -Due to its complexity and inflexibility ``AsyncStage`` have been removed in favor of ``GraphStage``. Existing -``AsyncStage`` implementations can be ported in a mostly mechanical way. - -Update procedure ----------------- - -1. The subclass of ``AsyncStage`` should be replaced by ``GraphStage`` -2. The new subclass must define an ``in`` and ``out`` port (``Inlet`` and ``Outlet`` instance) and override the ``shape`` - method returning a ``FlowShape`` -3. An instance of ``GraphStageLogic`` must be returned by overriding ``createLogic()``. The original processing logic and - state will be encapsulated in this ``GraphStageLogic`` -4. Using ``setHandler(port, handler)`` and ``InHandler`` instance should be set on ``in`` and an ``OutHandler`` should - be set on ``out`` -5. ``onPush``, ``onUpstreamFinished`` and ``onUpstreamFailed`` are now available in the ``InHandler`` subclass created - by the user -6. ``onPull`` and ``onDownstreamFinished`` are now available in the ``OutHandler`` subclass created by the user -7. the callbacks above no longer take an extra `ctxt` context parameter. -8. ``onPull`` only signals the stage, the actual element can be obtained by calling ``grab(in)`` -9. ``ctx.push(elem)`` is now ``push(out, elem)`` -10. ``ctx.pull()`` is now ``pull(in)`` -11. ``ctx.finish()`` is now ``completeStage()`` -12. ``ctx.pushAndFinish(elem)`` is now simply two calls: ``push(out, elem); completeStage()`` -13. ``ctx.fail(cause)`` is now ``failStage(cause)`` -14. ``ctx.isFinishing()`` is now ``isClosed(in)`` -15. ``ctx.absorbTermination()`` can be replaced with ``if (isAvailable(shape.outlet)) `` -16. ``ctx.pushAndPull(elem)`` can be replaced with ``push(out, elem); pull(in)`` -17. ``ctx.holdUpstreamAndPush`` and ``context.holdDownstreamAndPull`` can be replaced by simply ``push(elem)`` and - ``pull()`` respectively -18. The following calls should be removed: ``ctx.ignore()``, ``ctx.holdUpstream()`` and ``ctx.holdDownstream()``. -19. ``ctx.isHoldingUpstream()`` can be replaced with ``isAvailable(out)`` -20. ``ctx.isHoldingDowntream()`` can be replaced with ``!(isClosed(in) || hasBeenPulled(in))`` -21. ``ctx.getAsyncCallback()`` is now ``getAsyncCallback(callback)`` which now takes a callback as a parameter. This - would correspond to the ``onAsyncInput()`` callback in the original ``AsyncStage`` - -We show the necessary steps in terms of an example ``AsyncStage`` - -Example -^^^^^^^ - -:: - - class MapAsyncOne[In, Out](f: In ⇒ Future[Out])(implicit ec: ExecutionContext) - extends AsyncStage[In, Out, Try[Out]] { - - private var elemInFlight: Out = _ - - override def onPush(elem: In, ctx: AsyncContext[Out, Try[Out]]) = { - val future = f(elem) - val cb = ctx.getAsyncCallback - future.onComplete(cb.invoke) - ctx.holdUpstream() - } - - override def onPull(ctx: AsyncContext[Out, Try[Out]]) = - if (elemInFlight != null) { - val e = elemInFlight - elemInFlight = null.asInstanceOf[Out] - pushIt(e, ctx) - } else ctx.holdDownstream() - - override def onAsyncInput(input: Try[Out], ctx: AsyncContext[Out, Try[Out]]) = - input match { - case Failure(ex) ⇒ ctx.fail(ex) - case Success(e) if ctx.isHoldingDownstream ⇒ pushIt(e, ctx) - case Success(e) ⇒ - elemInFlight = e - ctx.ignore() - } - - override def onUpstreamFinish(ctx: AsyncContext[Out, Try[Out]]) = - if (ctx.isHoldingUpstream) ctx.absorbTermination() - else ctx.finish() - - private def pushIt(elem: Out, ctx: AsyncContext[Out, Try[Out]]) = - if (ctx.isFinishing) ctx.pushAndFinish(elem) - else ctx.pushAndPull(elem) - } - -should be replaced by - -.. includecode:: code/docs/MigrationsScala.scala#port-async - -Akka HTTP: Uri parsing mode relaxed-with-raw-query replaced with rawQueryString -=============================================================================== - -Previously Akka HTTP allowed to configure the parsing mode of an Uri's Query part (``?a=b&c=d``) to ``relaxed-with-raw-query`` -which is useful when Uris are not formatted using the usual "key/value pairs" syntax. - -Instead of exposing it as an option for the parser, this is now available as the ``rawQueryString(): Option[String]`` -/ ``queryString(): Option[String]`` methods on on ``model.Uri``. - - -For parsing the Query part use ``query(charset: Charset = UTF8, mode: Uri.ParsingMode = Uri.ParsingMode.Relaxed): Query``. - -Update procedure ----------------- -1. If the ``uri-parsing-mode`` was set to ``relaxed-with-raw-query``, remove it -2. In places where the query string was accessed in ``relaxed-with-raw-query`` mode, use the ``rawQueryString``/``queryString`` methods instead -3. In places where the parsed query parts (such as ``parameter``) were used, invoke parsing directly using ``uri.query().get("a")`` - -Example -^^^^^^^ - -:: - - // config, no longer works - akka.http.parsing.uri-parsing-mode = relaxed-with-raw-query - -should be replaced by: - -.. includecode:: code/docs/MigrationsScala.scala#raw-query - -And use of query parameters from ``Uri`` that looked like this: - -:: - - // This no longer works! - uri.parameter("name") - -should be replaced by: - -.. includecode:: code/docs/MigrationsScala.scala#query-param - -SynchronousFileSource and SynchronousFileSink -============================================= - - -``SynchronousFileSource`` and ``SynchronousFileSink`` -have been replaced by ``FileIO.read(…)`` and ``FileIO.write(…)`` due to discoverability issues -paired with names which leaked internal implementation details. - -Update procedure ----------------- - -Replace ``SynchronousFileSource(`` and ``SynchronousFileSource.apply(`` with ``FileIO.fromFile(`` - -Replace ``SynchronousFileSink(`` and ``SynchronousFileSink.apply(`` with ``FileIO.toFile(`` - -Example -^^^^^^^ - -:: - - // This no longer works! - val fileSrc = SynchronousFileSource(new File(".")) - - // This no longer works! - val otherFileSrc = SynchronousFileSource(new File("."), 1024) - - // This no longer works! - val someFileSink = SynchronousFileSink(new File(".")) - - -should be replaced by - -.. includecode:: code/docs/MigrationsScala.scala#file-source-sink - -InputStreamSource and OutputStreamSink -====================================== - -Both have been replaced by ``StreamConverters.fromInputStream(…)`` and ``StreamConverters.fromOutputStream(…)`` due to discoverability issues. - -Update procedure ----------------- - -Replace ``InputStreamSource(`` and ``InputStreamSource.apply(`` with ``StreamConverters.fromInputStream(`` -i -Replace ``OutputStreamSink(`` and ``OutputStreamSink.apply(`` with ``StreamConverters.fromOutputStream(`` - -Example -^^^^^^^ - -:: - - // This no longer works! - val inputStreamSrc = InputStreamSource(() => new SomeInputStream()) - - // This no longer works! - val otherInputStreamSrc = InputStreamSource(() => new SomeInputStream(), 1024) - - // This no longer works! - val someOutputStreamSink = OutputStreamSink(() => new SomeOutputStream()) - -should be replaced by - -.. includecode:: code/docs/MigrationsScala.scala#input-output-stream-source-sink - -OutputStreamSource and InputStreamSink -====================================== - -Both have been replaced by ``StreamConverters.asOutputStream(…)`` and ``StreamConverters.asInputStream(…)`` due to discoverability issues. - -Update procedure ----------------- - -Replace ``OutputStreamSource(`` and ``OutputStreamSource.apply(`` with ``StreamConverters.asOutputStream(`` - -Replace ``InputStreamSink(`` and ``InputStreamSink.apply(`` with ``StreamConverters.asInputStream(`` - -Example -^^^^^^^ - -:: - - // This no longer works! - val outputStreamSrc = OutputStreamSource() - - // This no longer works! - val otherOutputStreamSrc = OutputStreamSource(timeout) - - // This no longer works! - val someInputStreamSink = InputStreamSink() - - // This no longer works! - val someOtherInputStreamSink = InputStreamSink(timeout); - -should be replaced by - -.. includecode:: code/docs/MigrationsScala.scala#output-input-stream-source-sink diff --git a/akka-docs-dev/rst/stream-configuration.rst b/akka-docs-dev/rst/stream-configuration.rst deleted file mode 100644 index 520a4c6387..0000000000 --- a/akka-docs-dev/rst/stream-configuration.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. _stream-config: - -############# -Configuration -############# - -.. literalinclude:: ../../akka-stream/src/main/resources/reference.conf \ No newline at end of file diff --git a/akka-docs-dev/src/test/resources/application.conf b/akka-docs-dev/src/test/resources/application.conf deleted file mode 100644 index dafc521805..0000000000 --- a/akka-docs-dev/src/test/resources/application.conf +++ /dev/null @@ -1 +0,0 @@ -akka.loggers = ["akka.testkit.TestEventListener"] \ No newline at end of file diff --git a/akka-docs-dev/_sphinx/exts/includecode2.py b/akka-docs/_sphinx/exts/includecode2.py similarity index 98% rename from akka-docs-dev/_sphinx/exts/includecode2.py rename to akka-docs/_sphinx/exts/includecode2.py index 9e6a12dc09..37f518fa26 100644 --- a/akka-docs-dev/_sphinx/exts/includecode2.py +++ b/akka-docs/_sphinx/exts/includecode2.py @@ -43,7 +43,7 @@ class IncludeCode2(Directive): encoding = self.options.get('encoding', env.config.source_encoding) codec_info = codecs.lookup(encoding) try: - f = codecs.StreamReaderWriter(open(fn, 'U'), + f = codecs.StreamReaderWriter(open(fn, 'Ub'), codec_info[2], codec_info[3], 'strict') lines = f.readlines() f.close() diff --git a/akka-docs/_sphinx/themes/akka/static/effects.core.js b/akka-docs/_sphinx/themes/akka/static/effects.core.js index 53cb941a35..a4740a5ff8 100644 --- a/akka-docs/_sphinx/themes/akka/static/effects.core.js +++ b/akka-docs/_sphinx/themes/akka/static/effects.core.js @@ -1,7 +1,7 @@ /* * jQuery UI Effects 1.5.3 * - * Copyright (c) 2008 Aaron Eisenberger (aaronchi@gmail.com) + * Copyright (C) 2008-2016 Aaron Eisenberger (aaronchi@gmail.com) * Dual licensed under the MIT (MIT-LICENSE.txt) * and GPL (GPL-LICENSE.txt) licenses. * diff --git a/akka-docs/_sphinx/themes/akka/static/effects.highlight.js b/akka-docs/_sphinx/themes/akka/static/effects.highlight.js index c9c332522b..fc3e90421e 100644 --- a/akka-docs/_sphinx/themes/akka/static/effects.highlight.js +++ b/akka-docs/_sphinx/themes/akka/static/effects.highlight.js @@ -1,7 +1,7 @@ /* * jQuery UI Effects Highlight @VERSION * - * Copyright (c) 2008 Aaron Eisenberger (aaronchi@gmail.com) + * Copyright (C) 2008-2016 Aaron Eisenberger (aaronchi@gmail.com) * Dual licensed under the MIT (MIT-LICENSE.txt) * and GPL (GPL-LICENSE.txt) licenses. * diff --git a/akka-docs/_sphinx/themes/akka/static/toc.js b/akka-docs/_sphinx/themes/akka/static/toc.js index 98f18fd518..b9c3ffa475 100644 --- a/akka-docs/_sphinx/themes/akka/static/toc.js +++ b/akka-docs/_sphinx/themes/akka/static/toc.js @@ -3,7 +3,7 @@ * jQuery TOC Plugin v1.1.3 * http://code.google.com/p/samaxesjs/ * - * Copyright (c) 2011 samaxes.com + * Copyright (C) 2011-2016 samaxes.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-docs/rst/additional/osgi.rst b/akka-docs/rst/additional/osgi.rst index 47c91c6851..62f2057517 100644 --- a/akka-docs/rst/additional/osgi.rst +++ b/akka-docs/rst/additional/osgi.rst @@ -1,12 +1,104 @@ Akka in OSGi ============ +Background +---------- + +OSGi_ is a mature packaging and deployment standard for component-based systems. It +has similar capabilities as Project Jigsaw (originally scheduled for JDK 1.8), but has far stronger facilities to +support legacy Java code. This is to say that while Jigsaw-ready modules require significant changes to most source files +and on occasion to the structure of the overall application, OSGi can be used to modularize almost any Java code as far +back as JDK 1.2, usually with no changes at all to the binaries. + +.. _OSGI: http://www.osgi.org/developer + +These legacy capabilities are OSGi's major strength and its major weakness. The creators of OSGi realized early on that +implementors would be unlikely to rush to support OSGi metadata in existing JARs. There were already a handful of new +concepts to learn in the JRE and the added value to teams that were managing well with straight J2EE was not obvious. +Facilities emerged to "wrap" binary JARs so they could be used as bundles, but this functionality was only used in limited +situations. An application of the "80/20 Rule" here would have that "80% of the complexity is with 20% of the configuration", +but it was enough to give OSGi a reputation that has stuck with it to this day. + +This document aims to the productivity basics folks need to use it with Akka, the 20% that users need to get 80% of what they want. +For more information than is provided here, `OSGi In Action`_ is worth exploring. + +.. _OSGi In Action: https://www.manning.com/books/osgi-in-action + +Core Components and Structure of OSGi Applications +-------------------------------------------------- + +The fundamental unit of deployment in OSGi is the ``Bundle``. A bundle is a Java JAR with `additional +entries ` in ``MANIFEST.MF`` that minimally expose the name and version +of the bundle and packages for import and export. Since these manifest entries are ignored outside OSGi deployments, +a bundle can interchangeably be used as a JAR in the JRE. + +When a bundle is loaded, a specialized implementation of the Java ``ClassLoader`` is instantiated for each bundle. Each +classloader reads the manifest entries and publishes both capabilities (in the form of the ``Bundle-Exports``) and +requirements (as ``Bundle-Imports``) in a container singleton for discovery by other bundles. The process of matching imports to +exports across bundles through these classloaders is the process of resolution, one of six discrete steps in the lifecycle +FSM of a bundle in an OSGi container: + +1. INSTALLED: A bundle that is installed has been loaded from disk and a classloader instantiated with its capabilities. + Bundles are iteratively installed manually or through container-specific descriptors. For those familiar with legacy packging + such as EJB, the modular nature of OSGi means that bundles may be used by multiple applications with overlapping dependencies. + By resolving them individually from repositories, these overlaps can be de-duplicated across multiple deployemnts to + the same container. + +2. RESOLVED: A bundle that has been resolved is one that has had its requirements (imports) satisfied. Resolution does + mean that a bundle can be started. + +3. STARTING: A bundle that is started can be used by other bundles. For an otherwise complete application closure of + resolved bundles, the implication here is they must be started in the order directed by a depth-first search for all to + be started. When a bundle is starting, any exposed lifecycle interfaces in the bundle are called, giving the bundle + the opportunity to start its own service endpoints and threads. + +4. ACTIVE: Once a bundle's lifecycle interfaces return without error, a bundle is marked as active. + +5. STOPPING: A bundle that is stopping is in the process of calling the bundle's stop lifecycle and transitions back to + the RESOLVED state when complete. Any long running services or threads that were created while STARTING should be shut + down when the bundle's stop lifecycle is called. + +6. UNINSTALLED: A bundle can only transition to this state from the INSTALLED state, meaning it cannot be uninstalled + before it is stopped. + +Note the dependency in this FSM on lifecycle interfaces. While there is no requirement that a bundle publishes these +interfaces or accepts such callbacks, the lifecycle interfaces provide the semantics of a ``main()`` method and allow +the bundle to start and stop long-running services such as REST web services, ActorSystems, Clusters, etc. + +Secondly, note when considering requirements and capabilities, it's a common misconception to equate these with repository +dependencies as might be found in Maven or Ivy. While they provide similar practical functionality, OSGi has several +parallel type of dependency (such as Blueprint Services) that cannot be easily mapped to repository capabilities. In fact, +the core specification leaves these facilities up to the container in use. In turn, some containers have tooling to generate +application load descriptors from repository metadata. + +Notable Behavior Changes +------------------------ + +Combined with understanding the bundle lifecycle, the OSGi developer must pay attention to sometimes unexpected behaviors +that are introduced. These are generally within the JVM specification, but are unexpected and can lead to frustration. + +* Bundles should not export overlapping package spaces. It is not uncommon for legacy JVM frameworks to expect plugins + in an application composed of multiple JARs to reside under a single package name. For example, a frontend application + might scan all classes from ``com.example.plugins`` for specific service implementations with that package existing in + several contributed JARs. + + While it is possible to support overlapping packages with complex manifest headers, it's much better to use non-overlapping + package spaces and facilities such as `Akka Cluster`_ + for service discovery. Stylistically, many organizations opt to use the root package path as the name of the bundle + distribution file. + +.. _Akka Cluster: @github@/akka-docs/rst/scala/code/docs/akka/current/common/cluster.html + +* Resources are not shared across bundles unless they are explicitly exported, as with classes. The common + case of this is expecting that ``getClass().getClassLoader().getResources("foo")`` will return all files on the classpath + named ``foo``. The ``getResources()`` method only returns resources from the current classloader, and since there are + separate classloaders for every bundle, resource files such as configurations are no longer searchable in this manner. + Configuring the OSGi Framework ------------------------------ -To use Akka in an OSGi environment, the ``org.osgi.framework.bootdelegation`` -property must be set to always delegate the ``sun.misc`` package to the boot classloader -instead of resolving it through the normal OSGi class space. +To use Akka in an OSGi environment, the container must be configured such that the ``org.osgi.framework.bootdelegation`` +property delegates the ``sun.misc`` package to the boot classloader instead of resolving it through the normal OSGi class space. Activator --------- @@ -17,8 +109,11 @@ to conveniently set up the ActorSystem. .. includecode:: code/docs/osgi/Activator.scala#Activator -The ``ActorSystemActivator`` creates the actor system with a class loader that finds resources -(``reference.conf`` files) and classes from the application bundle and all transitive dependencies. +The goal here is to map the OSGi lifecycle more directly to the Akka lifecycle. The ``ActorSystemActivator`` creates +the actor system with a class loader that finds resources (``application.conf`` and ``reference.conf`` files) and classes +from the application bundle and all transitive dependencies. + + The ``ActorSystemActivator`` class is included in the ``akka-osgi`` artifact:: @@ -32,5 +127,7 @@ The ``ActorSystemActivator`` class is included in the ``akka-osgi`` artifact:: Sample ------ -A complete sample project is provided in `akka-sample-osgi-dining-hakkers <@github@/akka-samples/akka-sample-osgi-dining-hakkers>`_. +A complete sample project is provided in `akka-sample-osgi-dining-hakkers`_ + +.. _akka-sample-osgi-dining-hakkers: @github@/akka-samples/akka-sample-osgi-dining-hakkers diff --git a/akka-docs/rst/common/code/docs/circuitbreaker/CircuitBreakerDocSpec.scala b/akka-docs/rst/common/code/docs/circuitbreaker/CircuitBreakerDocSpec.scala index e12fc16d90..b14429ba13 100644 --- a/akka-docs/rst/common/code/docs/circuitbreaker/CircuitBreakerDocSpec.scala +++ b/akka-docs/rst/common/code/docs/circuitbreaker/CircuitBreakerDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.circuitbreaker diff --git a/akka-docs/rst/common/code/docs/circuitbreaker/DangerousJavaActor.java b/akka-docs/rst/common/code/docs/circuitbreaker/DangerousJavaActor.java index 81dede9ea5..fd261278aa 100644 --- a/akka-docs/rst/common/code/docs/circuitbreaker/DangerousJavaActor.java +++ b/akka-docs/rst/common/code/docs/circuitbreaker/DangerousJavaActor.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.circuitbreaker; diff --git a/akka-docs/rst/common/code/docs/duration/Java.java b/akka-docs/rst/common/code/docs/duration/Java.java index 6c659295c4..d3499fd4d3 100644 --- a/akka-docs/rst/common/code/docs/duration/Java.java +++ b/akka-docs/rst/common/code/docs/duration/Java.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2013-2015 Typesafe Inc. + * Copyright (C) 2013-2016 Typesafe Inc. */ package docs.duration; diff --git a/akka-docs/rst/common/code/docs/duration/Sample.scala b/akka-docs/rst/common/code/docs/duration/Sample.scala index 40b0dcfb93..ecc0b4c18f 100644 --- a/akka-docs/rst/common/code/docs/duration/Sample.scala +++ b/akka-docs/rst/common/code/docs/duration/Sample.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2013-2015 Typesafe Inc. + * Copyright (C) 2013-2016 Typesafe Inc. */ package docs.duration diff --git a/akka-docs/rst/conf.py b/akka-docs/rst/conf.py index ec706cd072..b38afeb563 100644 --- a/akka-docs/rst/conf.py +++ b/akka-docs/rst/conf.py @@ -8,7 +8,7 @@ import sys, os # -- General configuration ----------------------------------------------------- sys.path.append(os.path.abspath('../_sphinx/exts')) -extensions = ['sphinx.ext.todo', 'includecode'] +extensions = ['sphinx.ext.todo', 'includecode', 'includecode2'] templates_path = ['_templates'] source_suffix = '.rst' diff --git a/akka-docs/rst/general/code/docs/config/ConfigDoc.java b/akka-docs/rst/general/code/docs/config/ConfigDoc.java index ace5cff38f..c50d29b858 100644 --- a/akka-docs/rst/general/code/docs/config/ConfigDoc.java +++ b/akka-docs/rst/general/code/docs/config/ConfigDoc.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.config; diff --git a/akka-docs/rst/general/code/docs/config/ConfigDocSpec.scala b/akka-docs/rst/general/code/docs/config/ConfigDocSpec.scala index 35f0c7e225..d5ea03afb4 100644 --- a/akka-docs/rst/general/code/docs/config/ConfigDocSpec.scala +++ b/akka-docs/rst/general/code/docs/config/ConfigDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.config diff --git a/akka-docs-dev/rst/stages-overview.rst b/akka-docs/rst/general/stream/stages-overview.rst similarity index 88% rename from akka-docs-dev/rst/stages-overview.rst rename to akka-docs/rst/general/stream/stages-overview.rst index fc9a66b703..5c1a4c39ab 100644 --- a/akka-docs-dev/rst/stages-overview.rst +++ b/akka-docs/rst/general/stream/stages-overview.rst @@ -20,7 +20,7 @@ Custom components are not covered by this table since their semantics are define Simple processing stages ^^^^^^^^^^^^^^^^^^^^^^^^ -These stages are all expressible as a ``PushPullStage``. These stages can transform the rate of incoming elements +These stages are all expressible as a ``GraphStage``. These stages can transform the rate of incoming elements since there are stages that emit multiple elements for a single input (e.g. `mapConcat') or consume multiple elements before emitting one output (e.g. ``filter``). However, these rate transformations are data-driven, i.e. it is the incoming elements that define how the rate is affected. This is in contrast with :ref:`detached-stages-overview` @@ -81,15 +81,17 @@ Backpressure aware stages These stages are all expressible as a ``DetachedStage``. These stages are aware of the backpressure provided by their downstreams and able to adapt their behavior to that signal. -===================== ========================================================================================================================= ============================================================================================================================== ===================================================================================== -Stage Emits when Backpressures when Completes when -===================== ========================================================================================================================= ============================================================================================================================== ===================================================================================== -conflate downstream stops backpressuring and there is a conflated element available never [2]_ upstream completes -expand downstream stops backpressuring downstream backpressures upstream completes -buffer (Backpressure) downstream stops backpressuring and there is a pending element in the buffer buffer is full upstream completes and buffered elements has been drained -buffer (DropX) downstream stops backpressuring and there is a pending element in the buffer never [2]_ upstream completes and buffered elements has been drained -buffer (Fail) downstream stops backpressuring and there is a pending element in the buffer fails the stream instead of backpressuring when buffer is full upstream completes and buffered elements has been drained -===================== ========================================================================================================================= ============================================================================================================================== ===================================================================================== +===================== ========================================================================================================================= ==================================================================================================================================== ===================================================================================== +Stage Emits when Backpressures when Completes when +===================== ========================================================================================================================= ==================================================================================================================================== ===================================================================================== +conflate downstream stops backpressuring and there is a conflated element available never [2]_ upstream completes +batch downstream stops backpressuring and there is a batched element available batched elements reached the max limit of allowed batched elements & downstream backpressures upstream completes and a "possibly pending" element was drained [3]_ +batchWeighted downstream stops backpressuring and there is a batched element available batched elements reached the max weight limit of allowed batched elements (plus a pending element [3]_ ) & downstream backpressures upstream completes and a "possibly pending" element was drained [3]_ +expand downstream stops backpressuring downstream backpressures upstream completes +buffer (Backpressure) downstream stops backpressuring and there is a pending element in the buffer buffer is full upstream completes and buffered elements has been drained +buffer (DropX) downstream stops backpressuring and there is a pending element in the buffer never [2]_ upstream completes and buffered elements has been drained +buffer (Fail) downstream stops backpressuring and there is a pending element in the buffer fails the stream instead of backpressuring when buffer is full upstream completes and buffered elements has been drained +===================== ========================================================================================================================= ==================================================================================================================================== ===================================================================================== Nesting and flattening stages ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -103,9 +105,9 @@ nested streams and turn them into a stream of elements instead (flattening). Stage Emits when Backpressures when Completes when ===================== ========================================================================================================================================= ============================================================================================================================== ===================================================================================== prefixAndTail the configured number of prefix elements are available. Emits this prefix, and the rest as a substream downstream backpressures or substream backpressures prefix elements has been consumed and substream has been consumed -groupBy an element for which the grouping function returns a group that has not yet been created. Emits the new group there is an element pending for a group whose substream backpressures upstream completes [3]_ -splitWhen an element for which the provided predicate is true, opening and emitting a new substream for subsequent elements there is an element pending for the next substream, but the previous is not fully consumed yet, or the substream backpressures upstream completes [3]_ -splitAfter an element passes through. When the provided predicate is true it emitts the element * and opens a new substream for subsequent element there is an element pending for the next substream, but the previous is not fully consumed yet, or the substream backpressures upstream completes [3]_ +groupBy an element for which the grouping function returns a group that has not yet been created. Emits the new group there is an element pending for a group whose substream backpressures upstream completes [4]_ +splitWhen an element for which the provided predicate is true, opening and emitting a new substream for subsequent elements there is an element pending for the next substream, but the previous is not fully consumed yet, or the substream backpressures upstream completes [4]_ +splitAfter an element passes through. When the provided predicate is true it emitts the element * and opens a new substream for subsequent element there is an element pending for the next substream, but the previous is not fully consumed yet, or the substream backpressures upstream completes [4]_ flatMapConcat the current consumed substream has an element available downstream backpressures upstream completes and all consumed substreams complete flatMapMerge one of the currently consumed substreams has an element available downstream backpressures upstream completes and all consumed substreams complete ===================== ========================================================================================================================================= ============================================================================================================================== ===================================================================================== @@ -122,7 +124,7 @@ a single output combining the elements from all of the inputs in different ways. Stage Emits when Backpressures when Completes when ===================== ========================================================================================================================= ============================================================================================================================== ===================================================================================== merge one of the inputs has an element available downstream backpressures all upstreams complete (*) -mergeSorted all of the inputs have an element available downstream backpressures all upstreams complete +mergeSorted all of the inputs have an element available downstream backpressures all upstreams complete mergePreferred one of the inputs has an element available, preferring a defined input if multiple have elements available downstream backpressures all upstreams complete (*) zip all of the inputs have an element available downstream backpressures any upstream completes zipWith all of the inputs have an element available downstream backpressures any upstream completes @@ -149,6 +151,20 @@ broadcast all of the outputs stops backpressuring and there is an i balance any of the outputs stops backpressuring; emits the element to the first available output all of the outputs backpressure upstream completes ===================== ========================================================================================================================= ============================================================================================================================== ===================================================================================== +Watching status stages +^^^^^^^^^^^^^^^^^^^^^^ + +Materializes to a Future that will be completed with Done or failed depending whether the upstream of the stage has been completed or failed. +The stage otherwise passes through elements unchanged. + +===================== ======================================================================== ========================================================== ===================================================================================== +Stage Emits when Backpressures when Completes when +===================== ======================================================================== ========================================================== ===================================================================================== +watchTermination input has an element available output backpressures upstream completes +===================== ======================================================================== ========================================================== ===================================================================================== + + .. [1] If a Future fails, the stream also fails (unless a different supervision strategy is applied) .. [2] Except if the encapsulated computation is not fast enough -.. [3] Until the end of stream it is not possible to know whether new substreams will be needed or not +.. [3] Batch & BatchWeighted stages eagerly pulling elements, and this behavior may result in a single pending (i.e. buffered) element which cannot be aggregated to the batched value +.. [4] Until the end of stream it is not possible to know whether new substreams will be needed or not diff --git a/akka-docs/rst/general/stream/stream-configuration.rst b/akka-docs/rst/general/stream/stream-configuration.rst new file mode 100644 index 0000000000..5257351bf5 --- /dev/null +++ b/akka-docs/rst/general/stream/stream-configuration.rst @@ -0,0 +1,7 @@ +.. _stream-config: + +############# +Configuration +############# + +.. literalinclude:: ../../../../akka-stream/src/main/resources/reference.conf \ No newline at end of file diff --git a/akka-docs-dev/rst/stream-design.rst b/akka-docs/rst/general/stream/stream-design.rst similarity index 100% rename from akka-docs-dev/rst/stream-design.rst rename to akka-docs/rst/general/stream/stream-design.rst diff --git a/akka-docs-dev/rst/images/akka-http-file-listing.png b/akka-docs/rst/images/akka-http-file-listing.png similarity index 100% rename from akka-docs-dev/rst/images/akka-http-file-listing.png rename to akka-docs/rst/images/akka-http-file-listing.png diff --git a/akka-docs-dev/rst/images/asyncBoundary.png b/akka-docs/rst/images/asyncBoundary.png similarity index 100% rename from akka-docs-dev/rst/images/asyncBoundary.png rename to akka-docs/rst/images/asyncBoundary.png diff --git a/akka-docs-dev/rst/images/compose_attributes.png b/akka-docs/rst/images/compose_attributes.png similarity index 100% rename from akka-docs-dev/rst/images/compose_attributes.png rename to akka-docs/rst/images/compose_attributes.png diff --git a/akka-docs-dev/rst/images/compose_composites.png b/akka-docs/rst/images/compose_composites.png similarity index 100% rename from akka-docs-dev/rst/images/compose_composites.png rename to akka-docs/rst/images/compose_composites.png diff --git a/akka-docs-dev/rst/images/compose_graph.png b/akka-docs/rst/images/compose_graph.png similarity index 100% rename from akka-docs-dev/rst/images/compose_graph.png rename to akka-docs/rst/images/compose_graph.png diff --git a/akka-docs-dev/rst/images/compose_graph_flow.png b/akka-docs/rst/images/compose_graph_flow.png similarity index 100% rename from akka-docs-dev/rst/images/compose_graph_flow.png rename to akka-docs/rst/images/compose_graph_flow.png diff --git a/akka-docs-dev/rst/images/compose_graph_partial.png b/akka-docs/rst/images/compose_graph_partial.png similarity index 100% rename from akka-docs-dev/rst/images/compose_graph_partial.png rename to akka-docs/rst/images/compose_graph_partial.png diff --git a/akka-docs-dev/rst/images/compose_graph_shape.png b/akka-docs/rst/images/compose_graph_shape.png similarity index 100% rename from akka-docs-dev/rst/images/compose_graph_shape.png rename to akka-docs/rst/images/compose_graph_shape.png diff --git a/akka-docs-dev/rst/images/compose_mat.png b/akka-docs/rst/images/compose_mat.png similarity index 100% rename from akka-docs-dev/rst/images/compose_mat.png rename to akka-docs/rst/images/compose_mat.png diff --git a/akka-docs-dev/rst/images/compose_nested_flow.png b/akka-docs/rst/images/compose_nested_flow.png similarity index 100% rename from akka-docs-dev/rst/images/compose_nested_flow.png rename to akka-docs/rst/images/compose_nested_flow.png diff --git a/akka-docs-dev/rst/images/compose_nested_flow_opaque.png b/akka-docs/rst/images/compose_nested_flow_opaque.png similarity index 100% rename from akka-docs-dev/rst/images/compose_nested_flow_opaque.png rename to akka-docs/rst/images/compose_nested_flow_opaque.png diff --git a/akka-docs-dev/rst/images/compose_shapes.png b/akka-docs/rst/images/compose_shapes.png similarity index 100% rename from akka-docs-dev/rst/images/compose_shapes.png rename to akka-docs/rst/images/compose_shapes.png diff --git a/akka-docs-dev/rst/images/composition.png b/akka-docs/rst/images/composition.png similarity index 100% rename from akka-docs-dev/rst/images/composition.png rename to akka-docs/rst/images/composition.png diff --git a/akka-docs-dev/rst/images/composition.svg b/akka-docs/rst/images/composition.svg similarity index 100% rename from akka-docs-dev/rst/images/composition.svg rename to akka-docs/rst/images/composition.svg diff --git a/akka-docs-dev/rst/images/graph_stage_chain.png b/akka-docs/rst/images/graph_stage_chain.png similarity index 100% rename from akka-docs-dev/rst/images/graph_stage_chain.png rename to akka-docs/rst/images/graph_stage_chain.png diff --git a/akka-docs-dev/rst/images/graph_stage_chain.svg b/akka-docs/rst/images/graph_stage_chain.svg similarity index 100% rename from akka-docs-dev/rst/images/graph_stage_chain.svg rename to akka-docs/rst/images/graph_stage_chain.svg diff --git a/akka-docs-dev/rst/images/graph_stage_conceptual.png b/akka-docs/rst/images/graph_stage_conceptual.png similarity index 100% rename from akka-docs-dev/rst/images/graph_stage_conceptual.png rename to akka-docs/rst/images/graph_stage_conceptual.png diff --git a/akka-docs-dev/rst/images/graph_stage_conceptual.svg b/akka-docs/rst/images/graph_stage_conceptual.svg similarity index 100% rename from akka-docs-dev/rst/images/graph_stage_conceptual.svg rename to akka-docs/rst/images/graph_stage_conceptual.svg diff --git a/akka-docs-dev/rst/images/graph_stage_detached_tracks_1.png b/akka-docs/rst/images/graph_stage_detached_tracks_1.png similarity index 100% rename from akka-docs-dev/rst/images/graph_stage_detached_tracks_1.png rename to akka-docs/rst/images/graph_stage_detached_tracks_1.png diff --git a/akka-docs-dev/rst/images/graph_stage_detached_tracks_1.svg b/akka-docs/rst/images/graph_stage_detached_tracks_1.svg similarity index 100% rename from akka-docs-dev/rst/images/graph_stage_detached_tracks_1.svg rename to akka-docs/rst/images/graph_stage_detached_tracks_1.svg diff --git a/akka-docs-dev/rst/images/graph_stage_detached_tracks_2.png b/akka-docs/rst/images/graph_stage_detached_tracks_2.png similarity index 100% rename from akka-docs-dev/rst/images/graph_stage_detached_tracks_2.png rename to akka-docs/rst/images/graph_stage_detached_tracks_2.png diff --git a/akka-docs-dev/rst/images/graph_stage_detached_tracks_2.svg b/akka-docs/rst/images/graph_stage_detached_tracks_2.svg similarity index 100% rename from akka-docs-dev/rst/images/graph_stage_detached_tracks_2.svg rename to akka-docs/rst/images/graph_stage_detached_tracks_2.svg diff --git a/akka-docs-dev/rst/images/graph_stage_duplicate.png b/akka-docs/rst/images/graph_stage_duplicate.png similarity index 100% rename from akka-docs-dev/rst/images/graph_stage_duplicate.png rename to akka-docs/rst/images/graph_stage_duplicate.png diff --git a/akka-docs-dev/rst/images/graph_stage_duplicate.svg b/akka-docs/rst/images/graph_stage_duplicate.svg similarity index 100% rename from akka-docs-dev/rst/images/graph_stage_duplicate.svg rename to akka-docs/rst/images/graph_stage_duplicate.svg diff --git a/akka-docs-dev/rst/images/graph_stage_filter.png b/akka-docs/rst/images/graph_stage_filter.png similarity index 100% rename from akka-docs-dev/rst/images/graph_stage_filter.png rename to akka-docs/rst/images/graph_stage_filter.png diff --git a/akka-docs-dev/rst/images/graph_stage_filter.svg b/akka-docs/rst/images/graph_stage_filter.svg similarity index 100% rename from akka-docs-dev/rst/images/graph_stage_filter.svg rename to akka-docs/rst/images/graph_stage_filter.svg diff --git a/akka-docs-dev/rst/images/graph_stage_map.png b/akka-docs/rst/images/graph_stage_map.png similarity index 100% rename from akka-docs-dev/rst/images/graph_stage_map.png rename to akka-docs/rst/images/graph_stage_map.png diff --git a/akka-docs-dev/rst/images/graph_stage_map.svg b/akka-docs/rst/images/graph_stage_map.svg similarity index 100% rename from akka-docs-dev/rst/images/graph_stage_map.svg rename to akka-docs/rst/images/graph_stage_map.svg diff --git a/akka-docs-dev/rst/images/graph_stage_tracks_1.png b/akka-docs/rst/images/graph_stage_tracks_1.png similarity index 100% rename from akka-docs-dev/rst/images/graph_stage_tracks_1.png rename to akka-docs/rst/images/graph_stage_tracks_1.png diff --git a/akka-docs-dev/rst/images/graph_stage_tracks_1.svg b/akka-docs/rst/images/graph_stage_tracks_1.svg similarity index 100% rename from akka-docs-dev/rst/images/graph_stage_tracks_1.svg rename to akka-docs/rst/images/graph_stage_tracks_1.svg diff --git a/akka-docs-dev/rst/images/inport_transitions.png b/akka-docs/rst/images/inport_transitions.png similarity index 100% rename from akka-docs-dev/rst/images/inport_transitions.png rename to akka-docs/rst/images/inport_transitions.png diff --git a/akka-docs-dev/rst/images/outport_transitions.png b/akka-docs/rst/images/outport_transitions.png similarity index 100% rename from akka-docs-dev/rst/images/outport_transitions.png rename to akka-docs/rst/images/outport_transitions.png diff --git a/akka-docs-dev/rst/images/port_transitions.svg b/akka-docs/rst/images/port_transitions.svg similarity index 100% rename from akka-docs-dev/rst/images/port_transitions.svg rename to akka-docs/rst/images/port_transitions.svg diff --git a/akka-docs-dev/rst/images/simple-graph-example.png b/akka-docs/rst/images/simple-graph-example.png similarity index 100% rename from akka-docs-dev/rst/images/simple-graph-example.png rename to akka-docs/rst/images/simple-graph-example.png diff --git a/akka-docs-dev/rst/images/stage_chain.png b/akka-docs/rst/images/stage_chain.png similarity index 100% rename from akka-docs-dev/rst/images/stage_chain.png rename to akka-docs/rst/images/stage_chain.png diff --git a/akka-docs-dev/rst/images/stage_conceptual.png b/akka-docs/rst/images/stage_conceptual.png similarity index 100% rename from akka-docs-dev/rst/images/stage_conceptual.png rename to akka-docs/rst/images/stage_conceptual.png diff --git a/akka-docs-dev/rst/images/stage_doubler.png b/akka-docs/rst/images/stage_doubler.png similarity index 100% rename from akka-docs-dev/rst/images/stage_doubler.png rename to akka-docs/rst/images/stage_doubler.png diff --git a/akka-docs-dev/rst/images/stage_filter.png b/akka-docs/rst/images/stage_filter.png similarity index 100% rename from akka-docs-dev/rst/images/stage_filter.png rename to akka-docs/rst/images/stage_filter.png diff --git a/akka-docs-dev/rst/images/stage_map.png b/akka-docs/rst/images/stage_map.png similarity index 100% rename from akka-docs-dev/rst/images/stage_map.png rename to akka-docs/rst/images/stage_map.png diff --git a/akka-docs-dev/rst/images/stage_msc_absorb_1.png b/akka-docs/rst/images/stage_msc_absorb_1.png similarity index 100% rename from akka-docs-dev/rst/images/stage_msc_absorb_1.png rename to akka-docs/rst/images/stage_msc_absorb_1.png diff --git a/akka-docs-dev/rst/images/stage_msc_absorb_2.png b/akka-docs/rst/images/stage_msc_absorb_2.png similarity index 100% rename from akka-docs-dev/rst/images/stage_msc_absorb_2.png rename to akka-docs/rst/images/stage_msc_absorb_2.png diff --git a/akka-docs-dev/rst/images/stage_msc_buffer.png b/akka-docs/rst/images/stage_msc_buffer.png similarity index 100% rename from akka-docs-dev/rst/images/stage_msc_buffer.png rename to akka-docs/rst/images/stage_msc_buffer.png diff --git a/akka-docs-dev/rst/images/stage_msc_general.png b/akka-docs/rst/images/stage_msc_general.png similarity index 100% rename from akka-docs-dev/rst/images/stage_msc_general.png rename to akka-docs/rst/images/stage_msc_general.png diff --git a/akka-docs-dev/rst/images/stages.svg b/akka-docs/rst/images/stages.svg similarity index 100% rename from akka-docs-dev/rst/images/stages.svg rename to akka-docs/rst/images/stages.svg diff --git a/akka-docs-dev/rst/images/stages_sequence_charts.svg b/akka-docs/rst/images/stages_sequence_charts.svg similarity index 100% rename from akka-docs-dev/rst/images/stages_sequence_charts.svg rename to akka-docs/rst/images/stages_sequence_charts.svg diff --git a/akka-docs/rst/intro/deployment-scenarios.rst b/akka-docs/rst/intro/deployment-scenarios.rst index fca8c2cd69..f59ef00944 100644 --- a/akka-docs/rst/intro/deployment-scenarios.rst +++ b/akka-docs/rst/intro/deployment-scenarios.rst @@ -59,3 +59,15 @@ Use ``Ctrl-C`` to interrupt and exit the application. On a Windows machine you can also use the ``bin\akka-sample-main-scala.bat`` script. + +In a Docker container +===================== +You can use both Akka remoting and Akka Cluster inside of Docker containers. But note +that you will need to take special care with the network configuration when using Docker, +described here: :ref:`remote-configuration-nat` + +For an example of how to set up a project using Akka Cluster and Docker take a look at the +`"akka-docker-cluster" activator template`__. + +__ https://www.typesafe.com/activator/template/akka-docker-cluster + diff --git a/akka-docs/rst/java.rst b/akka-docs/rst/java.rst index 18d8569efb..af63887894 100644 --- a/akka-docs/rst/java.rst +++ b/akka-docs/rst/java.rst @@ -13,6 +13,8 @@ Java Documentation java/index-futures java/index-network java/index-utilities + java/stream/index + java/http/index java/howto experimental/index-java dev/index diff --git a/akka-docs/rst/java/cluster-sharding.rst b/akka-docs/rst/java/cluster-sharding.rst index 16085429ab..f176be3d02 100644 --- a/akka-docs/rst/java/cluster-sharding.rst +++ b/akka-docs/rst/java/cluster-sharding.rst @@ -214,7 +214,7 @@ If the state of the entities are persistent you may stop entities that are not u reduce memory consumption. This is done by the application specific implementation of the entity actors for example by defining receive timeout (``context.setReceiveTimeout``). If a message is already enqueued to the entity when it stops itself the enqueued message -in the mailbox will be dropped. To support graceful passivation without loosing such +in the mailbox will be dropped. To support graceful passivation without losing such messages the entity actor can send ``ShardRegion.Passivate`` to its parent ``Shard``. The specified wrapped message in ``Passivate`` will be sent back to the entity, which is then supposed to stop itself. Incoming messages will be buffered by the ``Shard`` @@ -357,4 +357,4 @@ a ``ClusterShard.ClusterShardingStats`` containing the identifiers of the shards of entities that are alive in each shard. The purpose of these messages is testing and monitoring, they are not provided to give access to -directly sending messages to the individual entities. \ No newline at end of file +directly sending messages to the individual entities. diff --git a/akka-docs/rst/java/cluster-usage.rst b/akka-docs/rst/java/cluster-usage.rst index c3b1f565dc..552223a183 100644 --- a/akka-docs/rst/java/cluster-usage.rst +++ b/akka-docs/rst/java/cluster-usage.rst @@ -35,8 +35,8 @@ settings, but with ``akka.cluster.ClusterActorRefProvider``. The ``akka.cluster.seed-nodes`` should normally also be added to your ``application.conf`` file. .. note:: - If you are using Docker or the nodes for some other reason have separate internal and external ip addresses - you must configure remoting according to :ref:`remote-configuration-nat-java` + If you are running Akka in a Docker container or the nodes for some other reason have separate internal and + external ip addresses you must configure remoting according to :ref:`remote-configuration-nat-java` The seed nodes are configured contact points for initial, automatic, join of the cluster. diff --git a/akka-docs/rst/java/code/docs/actor/ActorDocTest.java b/akka-docs/rst/java/code/docs/actor/ActorDocTest.java new file mode 100644 index 0000000000..016e90f31b --- /dev/null +++ b/akka-docs/rst/java/code/docs/actor/ActorDocTest.java @@ -0,0 +1,637 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package docs.actor; + +import akka.actor.*; +import akka.japi.pf.ReceiveBuilder; +import akka.testkit.ErrorFilter; +import akka.testkit.EventFilter; +import akka.testkit.TestEvent; +import com.typesafe.config.Config; +import com.typesafe.config.ConfigFactory; +import scala.PartialFunction; +import scala.runtime.BoxedUnit; +import static docs.actor.Messages.Swap.Swap; +import static akka.japi.Util.immutableSeq; + +import java.util.concurrent.TimeUnit; + +import akka.testkit.JavaTestKit; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import static org.junit.Assert.*; + +//#import-props +import akka.actor.Props; +//#import-props +//#import-actorRef +import akka.actor.ActorRef; +import akka.actor.ActorSystem; +//#import-actorRef +//#import-identify +import akka.actor.ActorIdentity; +import akka.actor.ActorSelection; +import akka.actor.Identify; +//#import-identify +//#import-graceFulStop +import akka.pattern.AskTimeoutException; +import scala.concurrent.Await; +import scala.concurrent.duration.Duration; +import scala.concurrent.Future; +import static akka.pattern.Patterns.gracefulStop; +//#import-graceFulStop + +public class ActorDocTest { + + public static Config config = ConfigFactory.parseString( + "akka {\n" + + " loggers = [\"akka.testkit.TestEventListener\"]\n" + + " loglevel = \"WARNING\"\n" + + " stdout-loglevel = \"WARNING\"\n" + + "}\n" + ); + + static ActorSystem system = null; + + @BeforeClass + public static void beforeClass() { + system = ActorSystem.create("ActorDocTest", config); + } + + @AfterClass + public static void afterClass() throws Exception { + Await.result(system.terminate(), Duration.create("5 seconds")); + } + + static + //#context-actorOf + public class FirstActor extends AbstractActor { + final ActorRef child = context().actorOf(Props.create(MyActor.class), "myChild"); + //#plus-some-behavior + public FirstActor() { + receive(ReceiveBuilder. + matchAny(x -> { + sender().tell(x, self()); + }).build() + ); + } + //#plus-some-behavior + } + //#context-actorOf + + static public abstract class SomeActor extends AbstractActor { + //#receive-constructor + public SomeActor() { + receive(ReceiveBuilder. + //#and-some-behavior + match(String.class, s -> System.out.println(s.toLowerCase())). + //#and-some-behavior + build()); + } + //#receive-constructor + @Override + //#receive + public abstract PartialFunction receive(); + //#receive + } + + static public class ActorWithArgs extends AbstractActor { + private final String args; + + ActorWithArgs(String args) { + this.args = args; + receive(ReceiveBuilder. + matchAny(x -> { }).build() + ); + } + } + + static + //#props-factory + public class DemoActor extends AbstractActor { + /** + * Create Props for an actor of this type. + * @param magicNumber The magic number to be passed to this actor’s constructor. + * @return a Props for creating this actor, which can then be further configured + * (e.g. calling `.withDispatcher()` on it) + */ + static Props props(Integer magicNumber) { + // You need to specify the actual type of the returned actor + // since Java 8 lambdas have some runtime type information erased + return Props.create(DemoActor.class, () -> new DemoActor(magicNumber)); + } + + private final Integer magicNumber; + + DemoActor(Integer magicNumber) { + this.magicNumber = magicNumber; + receive(ReceiveBuilder. + match(Integer.class, i -> { + sender().tell(i + magicNumber, self()); + }).build() + ); + } + } + + //#props-factory + static + //#props-factory + public class SomeOtherActor extends AbstractActor { + // Props(new DemoActor(42)) would not be safe + ActorRef demoActor = context().actorOf(DemoActor.props(42), "demo"); + // ... + //#props-factory + public SomeOtherActor() { + receive(emptyBehavior()); + } + //#props-factory + } + //#props-factory + + public static class Hook extends AbstractActor { + ActorRef target = null; + public Hook() { + receive(emptyBehavior()); + } + //#preStart + @Override + public void preStart() { + target = context().actorOf(Props.create(MyActor.class, "target")); + } + //#preStart + //#postStop + @Override + public void postStop() { + //#clean-up-some-resources + final String message = "stopped"; + //#tell + // don’t forget to think about who is the sender (2nd argument) + target.tell(message, self()); + //#tell + final Object result = ""; + //#forward + target.forward(result, context()); + //#forward + target = null; + //#clean-up-some-resources + } + //#postStop + + // compilation test only + public void compileSelections() { + //#selection-local + // will look up this absolute path + context().actorSelection("/user/serviceA/actor"); + // will look up sibling beneath same supervisor + context().actorSelection("../joe"); + //#selection-local + + //#selection-wildcard + // will look all children to serviceB with names starting with worker + context().actorSelection("/user/serviceB/worker*"); + // will look up all siblings beneath same supervisor + context().actorSelection("../*"); + //#selection-wildcard + + //#selection-remote + context().actorSelection("akka.tcp://app@otherhost:1234/user/serviceB"); + //#selection-remote + } + } + + public static class ReplyException extends AbstractActor { + public ReplyException() { + receive(ReceiveBuilder. + matchAny(x -> { + //#reply-exception + try { + String result = operation(); + sender().tell(result, self()); + } catch (Exception e) { + sender().tell(new akka.actor.Status.Failure(e), self()); + throw e; + } + //#reply-exception + }).build() + ); + } + + private String operation() { + return "Hi"; + } + } + + static + //#gracefulStop-actor + public class Manager extends AbstractActor { + private static enum Shutdown { + Shutdown + } + public static final Shutdown SHUTDOWN = Shutdown.Shutdown; + + private ActorRef worker = + context().watch(context().actorOf(Props.create(Cruncher.class), "worker")); + + public Manager() { + receive(ReceiveBuilder. + matchEquals("job", s -> { + worker.tell("crunch", self()); + }). + matchEquals(SHUTDOWN, x -> { + worker.tell(PoisonPill.getInstance(), self()); + context().become(shuttingDown); + }).build() + ); + } + + public PartialFunction shuttingDown = + ReceiveBuilder. + matchEquals("job", s -> { + sender().tell("service unavailable, shutting down", self()); + }). + match(Terminated.class, t -> t.actor().equals(worker), t -> { + context().stop(self()); + }).build(); + } + //#gracefulStop-actor + + @Test + public void usePatternsGracefulStop() throws Exception { + ActorRef actorRef = system.actorOf(Props.create(Manager.class)); + //#gracefulStop + try { + Future stopped = + gracefulStop(actorRef, Duration.create(5, TimeUnit.SECONDS), Manager.SHUTDOWN); + Await.result(stopped, Duration.create(6, TimeUnit.SECONDS)); + // the actor has been stopped + } catch (AskTimeoutException e) { + // the actor wasn't stopped within 5 seconds + } + //#gracefulStop + } + + + public static class Cruncher extends AbstractActor { + public Cruncher() { + receive(ReceiveBuilder. + matchEquals("crunch", s -> { }).build() + ); + } + } + + static + //#swapper + public class Swapper extends AbstractLoggingActor { + public Swapper() { + receive(ReceiveBuilder. + matchEquals(Swap, s -> { + log().info("Hi"); + context().become(ReceiveBuilder. + matchEquals(Swap, x -> { + log().info("Ho"); + context().unbecome(); // resets the latest 'become' (just for fun) + }).build(), false); // push on top instead of replace + }).build() + ); + } + } + + //#swapper + static + //#swapper + public class SwapperApp { + public static void main(String[] args) { + ActorSystem system = ActorSystem.create("SwapperSystem"); + ActorRef swapper = system.actorOf(Props.create(Swapper.class), "swapper"); + swapper.tell(Swap, ActorRef.noSender()); // logs Hi + swapper.tell(Swap, ActorRef.noSender()); // logs Ho + swapper.tell(Swap, ActorRef.noSender()); // logs Hi + swapper.tell(Swap, ActorRef.noSender()); // logs Ho + swapper.tell(Swap, ActorRef.noSender()); // logs Hi + swapper.tell(Swap, ActorRef.noSender()); // logs Ho + system.terminate(); + } + } + //#swapper + + + @Test + public void creatingActorWithSystemActorOf() { + //#system-actorOf + // ActorSystem is a heavy object: create only one per application + final ActorSystem system = ActorSystem.create("MySystem", config); + final ActorRef myActor = system.actorOf(Props.create(MyActor.class), "myactor"); + //#system-actorOf + try { + new JavaTestKit(system) { + { + myActor.tell("hello", getRef()); + expectMsgEquals("hello"); + } + }; + } finally { + JavaTestKit.shutdownActorSystem(system); + } + } + + @Test + public void creatingPropsConfig() { + //#creating-props + Props props1 = Props.create(MyActor.class); + Props props2 = Props.create(ActorWithArgs.class, + () -> new ActorWithArgs("arg")); // careful, see below + Props props3 = Props.create(ActorWithArgs.class, "arg"); + //#creating-props + + //#creating-props-deprecated + // NOT RECOMMENDED within another actor: + // encourages to close over enclosing class + Props props7 = Props.create(ActorWithArgs.class, + () -> new ActorWithArgs("arg")); + //#creating-props-deprecated + } + + @Test(expected=IllegalArgumentException.class) + public void creatingPropsIllegal() { + //#creating-props-illegal + // This will throw an IllegalArgumentException since some runtime + // type information of the lambda is erased. + // Use Props.create(actorClass, Creator) instead. + Props props = Props.create(() -> new ActorWithArgs("arg")); + //#creating-props-illegal + } + + static + //#receive-timeout + public class ReceiveTimeoutActor extends AbstractActor { + //#receive-timeout + ActorRef target = context().system().deadLetters(); + //#receive-timeout + public ReceiveTimeoutActor() { + // To set an initial delay + context().setReceiveTimeout(Duration.create("10 seconds")); + + receive(ReceiveBuilder. + matchEquals("Hello", s -> { + // To set in a response to a message + context().setReceiveTimeout(Duration.create("1 second")); + //#receive-timeout + target = sender(); + target.tell("Hello world", self()); + //#receive-timeout + }). + match(ReceiveTimeout.class, r -> { + // To turn it off + context().setReceiveTimeout(Duration.Undefined()); + //#receive-timeout + target.tell("timeout", self()); + //#receive-timeout + }).build() + ); + } + } + //#receive-timeout + + @Test + public void using_receiveTimeout() { + final ActorRef myActor = system.actorOf(Props.create(ReceiveTimeoutActor.class)); + new JavaTestKit(system) { + { + myActor.tell("Hello", getRef()); + expectMsgEquals("Hello world"); + expectMsgEquals("timeout"); + } + }; + } + + static + //#hot-swap-actor + public class HotSwapActor extends AbstractActor { + private PartialFunction angry; + private PartialFunction happy; + + public HotSwapActor() { + angry = + ReceiveBuilder. + matchEquals("foo", s -> { + sender().tell("I am already angry?", self()); + }). + matchEquals("bar", s -> { + context().become(happy); + }).build(); + + happy = ReceiveBuilder. + matchEquals("bar", s -> { + sender().tell("I am already happy :-)", self()); + }). + matchEquals("foo", s -> { + context().become(angry); + }).build(); + + receive(ReceiveBuilder. + matchEquals("foo", s -> { + context().become(angry); + }). + matchEquals("bar", s -> { + context().become(happy); + }).build() + ); + } + } + //#hot-swap-actor + + @Test + public void using_hot_swap() { + final ActorRef actor = system.actorOf(Props.create(HotSwapActor.class), "hot"); + new JavaTestKit(system) { + { + actor.tell("foo", getRef()); + actor.tell("foo", getRef()); + expectMsgEquals("I am already angry?"); + actor.tell("bar", getRef()); + actor.tell("bar", getRef()); + expectMsgEquals("I am already happy :-)"); + actor.tell("foo", getRef()); + actor.tell("foo", getRef()); + expectMsgEquals("I am already angry?"); + expectNoMsg(Duration.create(1, TimeUnit.SECONDS)); + } + }; + } + + + static + //#stash + public class ActorWithProtocol extends AbstractActorWithStash { + public ActorWithProtocol() { + receive(ReceiveBuilder. + matchEquals("open", s -> { + context().become(ReceiveBuilder. + matchEquals("write", ws -> { /* do writing */ }). + matchEquals("close", cs -> { + unstashAll(); + context().unbecome(); + }). + matchAny(msg -> stash()).build(), false); + }). + matchAny(msg -> stash()).build() + ); + } + } + //#stash + + @Test + public void using_Stash() { + final ActorRef actor = system.actorOf(Props.create(ActorWithProtocol.class), "stash"); + } + + static + //#watch + public class WatchActor extends AbstractActor { + private final ActorRef child = context().actorOf(Props.empty(), "target"); + private ActorRef lastSender = system.deadLetters(); + + public WatchActor() { + context().watch(child); // <-- this is the only call needed for registration + + receive(ReceiveBuilder. + matchEquals("kill", s -> { + context().stop(child); + lastSender = sender(); + }). + match(Terminated.class, t -> t.actor().equals(child), t -> { + lastSender.tell("finished", self()); + }).build() + ); + } + } + //#watch + + @Test + public void using_watch() { + ActorRef actor = system.actorOf(Props.create(WatchActor.class)); + + new JavaTestKit(system) { + { + actor.tell("kill", getRef()); + expectMsgEquals("finished"); + } + }; + } + + static + //#identify + public class Follower extends AbstractActor { + final Integer identifyId = 1; + + public Follower(){ + ActorSelection selection = context().actorSelection("/user/another"); + selection.tell(new Identify(identifyId), self()); + + receive(ReceiveBuilder. + match(ActorIdentity.class, id -> id.getRef() != null, id -> { + ActorRef ref = id.getRef(); + context().watch(ref); + context().become(active(ref)); + }). + match(ActorIdentity.class, id -> id.getRef() == null, id -> { + context().stop(self()); + }).build() + ); + } + + final PartialFunction active(final ActorRef another) { + return ReceiveBuilder. + match(Terminated.class, t -> t.actor().equals(another), t -> { + context().stop(self()); + }).build(); + } + } + //#identify + + @Test + public void using_Identify() { + ActorRef a = system.actorOf(Props.empty()); + ActorRef b = system.actorOf(Props.create(Follower.class)); + + new JavaTestKit(system) { + { + watch(b); + system.stop(a); + assertEquals(expectMsgClass(Duration.create(2, TimeUnit.SECONDS), Terminated.class).actor(), b); + } + }; + } + + public static class NoReceiveActor extends AbstractActor { + } + + @Test + public void noReceiveActor() { + EventFilter ex1 = new ErrorFilter(ActorInitializationException.class); + EventFilter[] ignoreExceptions = { ex1 }; + try { + system.eventStream().publish(new TestEvent.Mute(immutableSeq(ignoreExceptions))); + new JavaTestKit(system) {{ + final ActorRef victim = new EventFilter(ActorInitializationException.class) { + protected ActorRef run() { + return system.actorOf(Props.create(NoReceiveActor.class), "victim"); + } + }.message("Actor behavior has not been set with receive(...)").occurrences(1).exec(); + + assertEquals(true, victim.isTerminated()); + }}; + } finally { + system.eventStream().publish(new TestEvent.UnMute(immutableSeq(ignoreExceptions))); + } + } + + public static class MultipleReceiveActor extends AbstractActor { + public MultipleReceiveActor() { + receive(ReceiveBuilder. + match(String.class, s1 -> s1.toLowerCase().equals("become"), s1 -> { + sender().tell(s1.toUpperCase(), self()); + receive(ReceiveBuilder. + match(String.class, s2 -> { + sender().tell(s2.toLowerCase(), self()); + }).build() + ); + }). + match(String.class, s1 -> { + sender().tell(s1.toUpperCase(), self()); + }).build() + ); + } + } + + @Test + public void multipleReceiveActor() { + EventFilter ex1 = new ErrorFilter(IllegalActorStateException.class); + EventFilter[] ignoreExceptions = { ex1 }; + try { + system.eventStream().publish(new TestEvent.Mute(immutableSeq(ignoreExceptions))); + new JavaTestKit(system) {{ + new EventFilter(IllegalActorStateException.class) { + protected Boolean run() { + ActorRef victim = system.actorOf(Props.create(MultipleReceiveActor.class), "victim2"); + victim.tell("Foo", getRef()); + expectMsgEquals("FOO"); + victim.tell("bEcoMe", getRef()); + expectMsgEquals("BECOME"); + victim.tell("Foo", getRef()); + // if it's upper case, then the actor was restarted + expectMsgEquals("FOO"); + return true; + } + }.message("Actor behavior has already been set with receive(...), " + + "use context().become(...) to change it later").occurrences(1).exec(); + }}; + } finally { + system.eventStream().publish(new TestEvent.UnMute(immutableSeq(ignoreExceptions))); + } + } + +} diff --git a/akka-docs/rst/java/code/docs/actor/FSMDocTest.java b/akka-docs/rst/java/code/docs/actor/FSMDocTest.java index cc3d7a138f..a624962591 100644 --- a/akka-docs/rst/java/code/docs/actor/FSMDocTest.java +++ b/akka-docs/rst/java/code/docs/actor/FSMDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor; diff --git a/akka-docs/rst/java/code/docs/actor/FaultHandlingTest.java b/akka-docs/rst/java/code/docs/actor/FaultHandlingTest.java index 9932e78ede..e879d3e85e 100644 --- a/akka-docs/rst/java/code/docs/actor/FaultHandlingTest.java +++ b/akka-docs/rst/java/code/docs/actor/FaultHandlingTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor; diff --git a/akka-docs/rst/java/code/docs/actor/FaultHandlingTestJava8.java b/akka-docs/rst/java/code/docs/actor/FaultHandlingTestJava8.java new file mode 100644 index 0000000000..d1f653854a --- /dev/null +++ b/akka-docs/rst/java/code/docs/actor/FaultHandlingTestJava8.java @@ -0,0 +1,204 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ +package docs.actor; + +//#testkit +import akka.actor.*; + +import static akka.actor.SupervisorStrategy.resume; +import static akka.actor.SupervisorStrategy.restart; +import static akka.actor.SupervisorStrategy.stop; +import static akka.actor.SupervisorStrategy.escalate; +import akka.japi.pf.DeciderBuilder; +import akka.japi.pf.ReceiveBuilder; +import com.typesafe.config.Config; +import com.typesafe.config.ConfigFactory; +import scala.PartialFunction; +import scala.concurrent.Await; +import static akka.pattern.Patterns.ask; +import scala.concurrent.duration.Duration; +import akka.testkit.TestProbe; + +//#testkit +import akka.testkit.ErrorFilter; +import akka.testkit.EventFilter; +import akka.testkit.TestEvent; +import akka.testkit.JavaTestKit; +import static java.util.concurrent.TimeUnit.SECONDS; +import static akka.japi.Util.immutableSeq; +import scala.Option; + +import org.junit.Test; +import org.junit.BeforeClass; +import org.junit.AfterClass; +import scala.runtime.BoxedUnit; + +//#testkit +public class FaultHandlingTestJava8 { +//#testkit + + public static Config config = ConfigFactory.parseString( + "akka {\n" + + " loggers = [\"akka.testkit.TestEventListener\"]\n" + + " loglevel = \"WARNING\"\n" + + " stdout-loglevel = \"WARNING\"\n" + + "}\n"); + + static + //#supervisor + public class Supervisor extends AbstractActor { + + //#strategy + private static SupervisorStrategy strategy = + new OneForOneStrategy(10, Duration.create("1 minute"), DeciderBuilder. + match(ArithmeticException.class, e -> resume()). + match(NullPointerException.class, e -> restart()). + match(IllegalArgumentException.class, e -> stop()). + matchAny(o -> escalate()).build()); + + @Override + public SupervisorStrategy supervisorStrategy() { + return strategy; + } + + //#strategy + + public Supervisor() { + receive(ReceiveBuilder. + match(Props.class, props -> { + sender().tell(context().actorOf(props), self()); + }).build() + ); + } + } + + //#supervisor + + static + //#supervisor2 + public class Supervisor2 extends AbstractActor { + + //#strategy2 + private static SupervisorStrategy strategy = + new OneForOneStrategy(10, Duration.create("1 minute"), DeciderBuilder. + match(ArithmeticException.class, e -> resume()). + match(NullPointerException.class, e -> restart()). + match(IllegalArgumentException.class, e -> stop()). + matchAny(o -> escalate()).build()); + + @Override + public SupervisorStrategy supervisorStrategy() { + return strategy; + } + + //#strategy2 + + public Supervisor2() { + receive(ReceiveBuilder. + match(Props.class, props -> { + sender().tell(context().actorOf(props), self()); + }).build() + ); + } + + @Override + public void preRestart(Throwable cause, Option msg) { + // do not kill all children, which is the default here + } + } + + //#supervisor2 + + static + //#child + public class Child extends AbstractActor { + int state = 0; + + public Child() { + receive(ReceiveBuilder. + match(Exception.class, exception -> { throw exception; }). + match(Integer.class, i -> state = i). + matchEquals("get", s -> sender().tell(state, self())).build() + ); + } + } + + //#child + + //#testkit + static ActorSystem system; + Duration timeout = Duration.create(5, SECONDS); + + @BeforeClass + public static void start() { + system = ActorSystem.create("FaultHandlingTest", config); + } + + @AfterClass + public static void cleanup() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + @Test + public void mustEmploySupervisorStrategy() throws Exception { + // code here + //#testkit + EventFilter ex1 = new ErrorFilter(ArithmeticException.class); + EventFilter ex2 = new ErrorFilter(NullPointerException.class); + EventFilter ex3 = new ErrorFilter(IllegalArgumentException.class); + EventFilter ex4 = new ErrorFilter(Exception.class); + EventFilter[] ignoreExceptions = { ex1, ex2, ex3, ex4 }; + system.eventStream().publish(new TestEvent.Mute(immutableSeq(ignoreExceptions))); + + //#create + Props superprops = Props.create(Supervisor.class); + ActorRef supervisor = system.actorOf(superprops, "supervisor"); + ActorRef child = (ActorRef) Await.result(ask(supervisor, + Props.create(Child.class), 5000), timeout); + //#create + + //#resume + child.tell(42, ActorRef.noSender()); + assert Await.result(ask(child, "get", 5000), timeout).equals(42); + child.tell(new ArithmeticException(), ActorRef.noSender()); + assert Await.result(ask(child, "get", 5000), timeout).equals(42); + //#resume + + //#restart + child.tell(new NullPointerException(), ActorRef.noSender()); + assert Await.result(ask(child, "get", 5000), timeout).equals(0); + //#restart + + //#stop + final TestProbe probe = new TestProbe(system); + probe.watch(child); + child.tell(new IllegalArgumentException(), ActorRef.noSender()); + probe.expectMsgClass(Terminated.class); + //#stop + + //#escalate-kill + child = (ActorRef) Await.result(ask(supervisor, + Props.create(Child.class), 5000), timeout); + probe.watch(child); + assert Await.result(ask(child, "get", 5000), timeout).equals(0); + child.tell(new Exception(), ActorRef.noSender()); + probe.expectMsgClass(Terminated.class); + //#escalate-kill + + //#escalate-restart + superprops = Props.create(Supervisor2.class); + supervisor = system.actorOf(superprops); + child = (ActorRef) Await.result(ask(supervisor, + Props.create(Child.class), 5000), timeout); + child.tell(23, ActorRef.noSender()); + assert Await.result(ask(child, "get", 5000), timeout).equals(23); + child.tell(new Exception(), ActorRef.noSender()); + assert Await.result(ask(child, "get", 5000), timeout).equals(0); + //#escalate-restart + //#testkit + } + +} +//#testkit diff --git a/akka-docs/rst/java/code/docs/actor/FirstUntypedActor.java b/akka-docs/rst/java/code/docs/actor/FirstUntypedActor.java index 09404068b8..bc1163e513 100644 --- a/akka-docs/rst/java/code/docs/actor/FirstUntypedActor.java +++ b/akka-docs/rst/java/code/docs/actor/FirstUntypedActor.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor; diff --git a/akka-docs/rst/java/code/docs/actor/ImmutableMessage.java b/akka-docs/rst/java/code/docs/actor/ImmutableMessage.java index 9cbc2f5af9..eee2fa41c8 100644 --- a/akka-docs/rst/java/code/docs/actor/ImmutableMessage.java +++ b/akka-docs/rst/java/code/docs/actor/ImmutableMessage.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor; diff --git a/akka-docs/rst/java/code/docs/actor/InboxDocTest.java b/akka-docs/rst/java/code/docs/actor/InboxDocTest.java index 29b76214e6..6bfcaf1d57 100644 --- a/akka-docs/rst/java/code/docs/actor/InboxDocTest.java +++ b/akka-docs/rst/java/code/docs/actor/InboxDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor; diff --git a/akka-docs/rst/java/code/docs/actor/InitializationDocSpecJava.java b/akka-docs/rst/java/code/docs/actor/InitializationDocSpecJava.java index 444d257ce6..424df81f11 100644 --- a/akka-docs/rst/java/code/docs/actor/InitializationDocSpecJava.java +++ b/akka-docs/rst/java/code/docs/actor/InitializationDocSpecJava.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor; diff --git a/akka-docs/rst/java/code/docs/actor/InitializationDocTest.java b/akka-docs/rst/java/code/docs/actor/InitializationDocTest.java new file mode 100644 index 0000000000..fb3343a2e7 --- /dev/null +++ b/akka-docs/rst/java/code/docs/actor/InitializationDocTest.java @@ -0,0 +1,67 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package docs.actor; + +import akka.actor.*; +import akka.japi.pf.ReceiveBuilder; +import akka.testkit.JavaTestKit; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import scala.PartialFunction; +import scala.concurrent.duration.Duration; +import scala.runtime.BoxedUnit; +import scala.concurrent.Await; + +import java.util.concurrent.TimeUnit; + +public class InitializationDocTest { + + static ActorSystem system = null; + + @BeforeClass + public static void beforeClass() { + system = ActorSystem.create("InitializationDocTest"); + } + + @AfterClass + public static void afterClass() throws Exception { + Await.result(system.terminate(), Duration.create("5 seconds")); + } + + public static class MessageInitExample extends AbstractActor { + private String initializeMe = null; + + public MessageInitExample() { + //#messageInit + receive(ReceiveBuilder. + matchEquals("init", m1 -> { + initializeMe = "Up and running"; + context().become(ReceiveBuilder. + matchEquals("U OK?", m2 -> { + sender().tell(initializeMe, self()); + }).build()); + }).build() + //#messageInit + ); + } + } + + @Test + public void testIt() { + + new JavaTestKit(system) {{ + ActorRef testactor = system.actorOf(Props.create(MessageInitExample.class), "testactor"); + String msg = "U OK?"; + + testactor.tell(msg, getRef()); + expectNoMsg(Duration.create(1, TimeUnit.SECONDS)); + + testactor.tell("init", getRef()); + testactor.tell(msg, getRef()); + expectMsgEquals("Up and running"); + }}; + } +} diff --git a/akka-docs/rst/java/code/docs/actor/Messages.java b/akka-docs/rst/java/code/docs/actor/Messages.java new file mode 100644 index 0000000000..1adfbd8fd9 --- /dev/null +++ b/akka-docs/rst/java/code/docs/actor/Messages.java @@ -0,0 +1,149 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package docs.actor; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public class Messages { + static + //#immutable-message + public class ImmutableMessage { + private final int sequenceNumber; + private final List values; + + public ImmutableMessage(int sequenceNumber, List values) { + this.sequenceNumber = sequenceNumber; + this.values = Collections.unmodifiableList(new ArrayList(values)); + } + + public int getSequenceNumber() { + return sequenceNumber; + } + + public List getValues() { + return values; + } + } + //#immutable-message + + public static class DoIt { + private final ImmutableMessage msg; + + DoIt(ImmutableMessage msg) { + this.msg = msg; + } + + public ImmutableMessage getMsg() { + return msg; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + DoIt doIt = (DoIt) o; + + if (!msg.equals(doIt.msg)) return false; + + return true; + } + + @Override + public int hashCode() { + return msg.hashCode(); + } + + @Override + public String toString() { + return "DoIt{" + + "msg=" + msg + + '}'; + } + } + + public static class Message { + final String str; + + Message(String str) { + this.str = str; + } + + public String getStr() { + return str; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + Message message = (Message) o; + + if (!str.equals(message.str)) return false; + + return true; + } + + @Override + public int hashCode() { + return str.hashCode(); + } + + @Override + public String toString() { + return "Message{" + + "str='" + str + '\'' + + '}'; + } + } + + public static enum Swap { + Swap + } + + public static class Result { + final String x; + final String s; + + public Result(String x, String s) { + this.x = x; + this.s = s; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((s == null) ? 0 : s.hashCode()); + result = prime * result + ((x == null) ? 0 : x.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Result other = (Result) obj; + if (s == null) { + if (other.s != null) + return false; + } else if (!s.equals(other.s)) + return false; + if (x == null) { + if (other.x != null) + return false; + } else if (!x.equals(other.x)) + return false; + return true; + } + } +} diff --git a/akka-docs/rst/java/code/docs/actor/MyBoundedUntypedActor.java b/akka-docs/rst/java/code/docs/actor/MyBoundedUntypedActor.java index b09f89eebb..a9ea7b3dc9 100644 --- a/akka-docs/rst/java/code/docs/actor/MyBoundedUntypedActor.java +++ b/akka-docs/rst/java/code/docs/actor/MyBoundedUntypedActor.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor; diff --git a/akka-docs/rst/java/code/docs/actor/MyJavaActor.java b/akka-docs/rst/java/code/docs/actor/MyJavaActor.java new file mode 100644 index 0000000000..1cda43913f --- /dev/null +++ b/akka-docs/rst/java/code/docs/actor/MyJavaActor.java @@ -0,0 +1,33 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package docs.actor; + +//#imports +import akka.actor.AbstractActor; +import akka.event.Logging; +import akka.event.LoggingAdapter; +import akka.japi.pf.ReceiveBuilder; + +//#imports + +//#my-actor +public class MyJavaActor extends AbstractActor { + private final LoggingAdapter log = Logging.getLogger(context().system(), this); + + public MyJavaActor() { + receive(ReceiveBuilder. + match(String.class, s -> { + log.info("Received String message: {}", s); + //#my-actor + //#reply + sender().tell(s, self()); + //#reply + //#my-actor + }). + matchAny(o -> log.info("received unknown message")).build() + ); + } +} +//#my-actor diff --git a/akka-docs/rst/java/code/docs/actor/MyReceiveTimeoutUntypedActor.java b/akka-docs/rst/java/code/docs/actor/MyReceiveTimeoutUntypedActor.java index 192c0b74b1..39e63df6eb 100644 --- a/akka-docs/rst/java/code/docs/actor/MyReceiveTimeoutUntypedActor.java +++ b/akka-docs/rst/java/code/docs/actor/MyReceiveTimeoutUntypedActor.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor; diff --git a/akka-docs/rst/java/code/docs/actor/MyStoppingActor.java b/akka-docs/rst/java/code/docs/actor/MyStoppingActor.java index fcdf0326aa..a7a78674ee 100644 --- a/akka-docs/rst/java/code/docs/actor/MyStoppingActor.java +++ b/akka-docs/rst/java/code/docs/actor/MyStoppingActor.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor; diff --git a/akka-docs/rst/java/code/docs/actor/MyUntypedActor.java b/akka-docs/rst/java/code/docs/actor/MyUntypedActor.java index a62ce8605e..004bf71243 100644 --- a/akka-docs/rst/java/code/docs/actor/MyUntypedActor.java +++ b/akka-docs/rst/java/code/docs/actor/MyUntypedActor.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor; diff --git a/akka-docs/rst/java/code/docs/actor/SampleActor.java b/akka-docs/rst/java/code/docs/actor/SampleActor.java new file mode 100644 index 0000000000..4dd2d6b574 --- /dev/null +++ b/akka-docs/rst/java/code/docs/actor/SampleActor.java @@ -0,0 +1,36 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package docs.actor; + +//#sample-actor +import akka.actor.AbstractActor; +import akka.japi.pf.ReceiveBuilder; +import scala.PartialFunction; +import scala.runtime.BoxedUnit; + +public class SampleActor extends AbstractActor { + + private PartialFunction guarded = ReceiveBuilder. + match(String.class, s -> s.contains("guard"), s -> { + sender().tell("contains(guard): " + s, self()); + context().unbecome(); + }).build(); + + public SampleActor() { + receive(ReceiveBuilder. + match(Double.class, d -> { + sender().tell(d.isNaN() ? 0 : d, self()); + }). + match(Integer.class, i -> { + sender().tell(i * 10, self()); + }). + match(String.class, s -> s.startsWith("guard"), s -> { + sender().tell("startsWith(guard): " + s.toUpperCase(), self()); + context().become(guarded, false); + }).build() + ); + } +} +//#sample-actor diff --git a/akka-docs/rst/java/code/docs/actor/SampleActorTest.java b/akka-docs/rst/java/code/docs/actor/SampleActorTest.java new file mode 100644 index 0000000000..d1ea16a1a6 --- /dev/null +++ b/akka-docs/rst/java/code/docs/actor/SampleActorTest.java @@ -0,0 +1,55 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package docs.actor; + +import akka.actor.ActorRef; +import akka.actor.ActorSystem; +import akka.actor.Props; +import akka.testkit.JavaTestKit; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import static org.junit.Assert.*; + +public class SampleActorTest { + + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("SampleActorTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + @Test + public void testSampleActor() + { + new JavaTestKit(system) {{ + final ActorRef subject = system.actorOf(Props.create(SampleActor.class), "sample-actor"); + final ActorRef probeRef = getRef(); + + subject.tell(47.11, probeRef); + subject.tell("and no guard in the beginning", probeRef); + subject.tell("guard is a good thing", probeRef); + subject.tell(47.11, probeRef); + subject.tell(4711, probeRef); + subject.tell("and no guard in the beginning", probeRef); + subject.tell(4711, probeRef); + subject.tell("and an unmatched message", probeRef); + + expectMsgEquals(47.11); + assertTrue(expectMsgClass(String.class).startsWith("startsWith(guard):")); + assertTrue(expectMsgClass(String.class).startsWith("contains(guard):")); + expectMsgEquals(47110); + expectNoMsg(); + }}; + } +} diff --git a/akka-docs/rst/java/code/docs/actor/SchedulerDocTest.java b/akka-docs/rst/java/code/docs/actor/SchedulerDocTest.java index 43e85e5c38..5cdad7acec 100644 --- a/akka-docs/rst/java/code/docs/actor/SchedulerDocTest.java +++ b/akka-docs/rst/java/code/docs/actor/SchedulerDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor; diff --git a/akka-docs/rst/java/code/docs/actor/TypedActorDocTest.java b/akka-docs/rst/java/code/docs/actor/TypedActorDocTest.java index 96ce22f96f..541de57881 100644 --- a/akka-docs/rst/java/code/docs/actor/TypedActorDocTest.java +++ b/akka-docs/rst/java/code/docs/actor/TypedActorDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor; diff --git a/akka-docs/rst/java/code/docs/actor/UntypedActorDocTest.java b/akka-docs/rst/java/code/docs/actor/UntypedActorDocTest.java index f36b319da3..0d83ce0988 100644 --- a/akka-docs/rst/java/code/docs/actor/UntypedActorDocTest.java +++ b/akka-docs/rst/java/code/docs/actor/UntypedActorDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor; diff --git a/akka-docs/rst/java/code/docs/actor/UntypedActorSwapper.java b/akka-docs/rst/java/code/docs/actor/UntypedActorSwapper.java index f9769e1d1f..aacb379df4 100644 --- a/akka-docs/rst/java/code/docs/actor/UntypedActorSwapper.java +++ b/akka-docs/rst/java/code/docs/actor/UntypedActorSwapper.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor; diff --git a/akka-docs/rst/java/code/docs/actor/fsm/Buncher.java b/akka-docs/rst/java/code/docs/actor/fsm/Buncher.java new file mode 100644 index 0000000000..6ce2bdb532 --- /dev/null +++ b/akka-docs/rst/java/code/docs/actor/fsm/Buncher.java @@ -0,0 +1,136 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package docs.actor.fsm; + +//#simple-imports +import akka.actor.AbstractFSM; +import akka.actor.ActorRef; +import akka.japi.pf.UnitMatch; +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; +import scala.concurrent.duration.Duration; +//#simple-imports + +import static docs.actor.fsm.Buncher.Data; +import static docs.actor.fsm.Buncher.State.*; +import static docs.actor.fsm.Buncher.State; +import static docs.actor.fsm.Buncher.Uninitialized.*; +import static docs.actor.fsm.Events.*; + +//#simple-fsm +public class Buncher extends AbstractFSM { + { + //#fsm-body + startWith(Idle, Uninitialized); + + //#when-syntax + when(Idle, + matchEvent(SetTarget.class, Uninitialized.class, + (setTarget, uninitialized) -> + stay().using(new Todo(setTarget.getRef(), new LinkedList<>())))); + //#when-syntax + + //#transition-elided + onTransition( + matchState(Active, Idle, () -> { + // reuse this matcher + final UnitMatch m = UnitMatch.create( + matchData(Todo.class, + todo -> todo.getTarget().tell(new Batch(todo.getQueue()), self()))); + m.match(stateData()); + }). + state(Idle, Active, () -> {/* Do something here */})); + //#transition-elided + + when(Active, Duration.create(1, "second"), + matchEvent(Arrays.asList(Flush.class, StateTimeout()), Todo.class, + (event, todo) -> goTo(Idle).using(todo.copy(new LinkedList<>())))); + + //#unhandled-elided + whenUnhandled( + matchEvent(Queue.class, Todo.class, + (queue, todo) -> goTo(Active).using(todo.addElement(queue.getObj()))). + anyEvent((event, state) -> { + log().warning("received unhandled request {} in state {}/{}", + event, stateName(), state); + return stay(); + })); + //#unhandled-elided + + initialize(); + //#fsm-body + } + //#simple-fsm + + static + //#simple-state + // states + enum State { + Idle, Active + } + + //#simple-state + static + //#simple-state + // state data + interface Data { + } + + //#simple-state + static + //#simple-state + enum Uninitialized implements Data { + Uninitialized + } + + //#simple-state + static + //#simple-state + final class Todo implements Data { + private final ActorRef target; + private final List queue; + + public Todo(ActorRef target, List queue) { + this.target = target; + this.queue = queue; + } + + public ActorRef getTarget() { + return target; + } + + public List getQueue() { + return queue; + } + //#boilerplate + + @Override + public String toString() { + return "Todo{" + + "target=" + target + + ", queue=" + queue + + '}'; + } + + public Todo addElement(Object element) { + List nQueue = new LinkedList<>(queue); + nQueue.add(element); + return new Todo(this.target, nQueue); + } + + public Todo copy(List queue) { + return new Todo(this.target, queue); + } + + public Todo copy(ActorRef target) { + return new Todo(target, this.queue); + } + //#boilerplate + } + //#simple-state + //#simple-fsm +} +//#simple-fsm diff --git a/akka-docs/rst/java/code/docs/actor/fsm/BuncherTest.java b/akka-docs/rst/java/code/docs/actor/fsm/BuncherTest.java new file mode 100644 index 0000000000..678b7b8bab --- /dev/null +++ b/akka-docs/rst/java/code/docs/actor/fsm/BuncherTest.java @@ -0,0 +1,78 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package docs.actor.fsm; + +import akka.actor.ActorRef; +import akka.actor.ActorSystem; +import akka.actor.Props; +import akka.testkit.JavaTestKit; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import java.util.LinkedList; + +import docs.actor.fsm.*; +import static docs.actor.fsm.Events.Batch; +import static docs.actor.fsm.Events.Queue; +import static docs.actor.fsm.Events.SetTarget; +import static docs.actor.fsm.Events.Flush.Flush; + +//#test-code +public class BuncherTest { + + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("BuncherTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + @Test + public void testBuncherActorBatchesCorrectly() { + new JavaTestKit(system) {{ + final ActorRef buncher = + system.actorOf(Props.create(Buncher.class)); + final ActorRef probe = getRef(); + + buncher.tell(new SetTarget(probe), probe); + buncher.tell(new Queue(42), probe); + buncher.tell(new Queue(43), probe); + LinkedList list1 = new LinkedList<>(); + list1.add(42); + list1.add(43); + expectMsgEquals(new Batch(list1)); + buncher.tell(new Queue(44), probe); + buncher.tell(Flush, probe); + buncher.tell(new Queue(45), probe); + LinkedList list2 = new LinkedList<>(); + list2.add(44); + expectMsgEquals(new Batch(list2)); + LinkedList list3 = new LinkedList<>(); + list3.add(45); + expectMsgEquals(new Batch(list3)); + system.stop(buncher); + }}; + } + + @Test + public void testBuncherActorDoesntBatchUninitialized() { + new JavaTestKit(system) {{ + final ActorRef buncher = + system.actorOf(Props.create(Buncher.class)); + final ActorRef probe = getRef(); + + buncher.tell(new Queue(42), probe); + expectNoMsg(); + system.stop(buncher); + }}; + } +} +//#test-code diff --git a/akka-docs/rst/java/code/docs/actor/fsm/Events.java b/akka-docs/rst/java/code/docs/actor/fsm/Events.java new file mode 100644 index 0000000000..27a8670a40 --- /dev/null +++ b/akka-docs/rst/java/code/docs/actor/fsm/Events.java @@ -0,0 +1,108 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package docs.actor.fsm; + +import akka.actor.ActorRef; +import java.util.List; + +public class Events { + + static + //#simple-events + public final class SetTarget { + private final ActorRef ref; + + public SetTarget(ActorRef ref) { + this.ref = ref; + } + + public ActorRef getRef() { + return ref; + } + //#boilerplate + + @Override + public String toString() { + return "SetTarget{" + + "ref=" + ref + + '}'; + } + //#boilerplate + } + + //#simple-events + static + //#simple-events + public final class Queue { + private final Object obj; + + public Queue(Object obj) { + this.obj = obj; + } + + public Object getObj() { + return obj; + } + //#boilerplate + + @Override + public String toString() { + return "Queue{" + + "obj=" + obj + + '}'; + } + //#boilerplate + } + + //#simple-events + static + //#simple-events + public final class Batch { + private final List list; + + public Batch(List list) { + this.list = list; + } + + public List getList() { + return list; + } + //#boilerplate + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + Batch batch = (Batch) o; + + return list.equals(batch.list); + } + + @Override + public int hashCode() { + return list.hashCode(); + } + + @Override + public String toString() { + final StringBuilder builder = new StringBuilder(); + builder.append( "Batch{list="); + list.stream().forEachOrdered(e -> { builder.append(e); builder.append(","); }); + int len = builder.length(); + builder.replace(len, len, "}"); + return builder.toString(); + } + //#boilerplate + } + + //#simple-events + static + //#simple-events + public enum Flush { + Flush + } + //#simple-events +} diff --git a/akka-docs/rst/java/code/docs/actor/fsm/FSMDocTest.java b/akka-docs/rst/java/code/docs/actor/fsm/FSMDocTest.java new file mode 100644 index 0000000000..155d0c02c1 --- /dev/null +++ b/akka-docs/rst/java/code/docs/actor/fsm/FSMDocTest.java @@ -0,0 +1,179 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package docs.actor.fsm; + +import akka.actor.*; +import akka.testkit.JavaTestKit; +import org.hamcrest.CoreMatchers; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import scala.concurrent.duration.Duration; + +import static org.junit.Assert.*; + +import static docs.actor.fsm.FSMDocTest.StateType.*; +import static docs.actor.fsm.FSMDocTest.Messages.*; +import static java.util.concurrent.TimeUnit.*; + +public class FSMDocTest { + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("FSMDocTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + public static enum StateType { + SomeState, + Processing, + Idle, + Active, + Error + } + + public static enum Messages { + WillDo, + Tick + } + + public static enum Data { + Foo, + Bar + }; + + public static interface X {}; + + public static class DummyFSM extends AbstractFSM { + Integer newData = 42; + //#alt-transition-syntax + public void handler(StateType from, StateType to) { + // handle transition here + } + + //#alt-transition-syntax + { + //#modifier-syntax + when(SomeState, matchAnyEvent((msg, data) -> { + return goTo(Processing).using(newData). + forMax(Duration.create(5, SECONDS)).replying(WillDo); + })); + //#modifier-syntax + + //#NullFunction + when(SomeState, AbstractFSM.NullFunction()); + //#NullFunction + + //#transition-syntax + onTransition( + matchState(Active, Idle, () -> setTimer("timeout", + Tick, Duration.create(1, SECONDS), true)). + state(Active, null, () -> cancelTimer("timeout")). + state(null, Idle, (f, t) -> log().info("entering Idle from " + f))); + //#transition-syntax + + //#alt-transition-syntax + onTransition(this::handler); + //#alt-transition-syntax + + //#stop-syntax + when(Error, matchEventEquals("stop", (event, data) -> { + // do cleanup ... + return stop(); + })); + //#stop-syntax + + //#termination-syntax + onTermination( + matchStop(Normal(), + (state, data) -> {/* Do something here */}). + stop(Shutdown(), + (state, data) -> {/* Do something here */}). + stop(Failure.class, + (reason, state, data) -> {/* Do something here */})); + //#termination-syntax + + //#unhandled-syntax + whenUnhandled( + matchEvent(X.class, (x, data) -> { + log().info("Received unhandled event: " + x); + return stay(); + }). + anyEvent((event, data) -> { + log().warning("Received unknown event: " + event); + return goTo(Error); + })); + } + //#unhandled-syntax + } + + static + //#logging-fsm + public class MyFSM extends AbstractLoggingFSM { + //#body-elided + //#logging-fsm + ActorRef target = null; + //#logging-fsm + @Override + public int logDepth() { return 12; } + { + onTermination( + matchStop(Failure.class, (reason, state, data) -> { + String lastEvents = getLog().mkString("\n\t"); + log().warning("Failure in state " + state + " with data " + data + "\n" + + "Events leading up to this point:\n\t" + lastEvents); + //#logging-fsm + target.tell(reason.cause(), self()); + target.tell(state, self()); + target.tell(data, self()); + target.tell(lastEvents, self()); + //#logging-fsm + }) + ); + //... + //#logging-fsm + startWith(SomeState, Data.Foo); + when(SomeState, matchEvent(ActorRef.class, Data.class, (ref, data) -> { + target = ref; + target.tell("going active", self()); + return goTo(Active); + })); + when(Active, matchEventEquals("stop", (event, data) -> { + target.tell("stopping", self()); + return stop(new Failure("This is not the error you're looking for")); + })); + initialize(); + //#logging-fsm + } + //#body-elided + } + //#logging-fsm + + @Test + public void testLoggingFSM() + { + new JavaTestKit(system) {{ + final ActorRef logger = + system.actorOf(Props.create(MyFSM.class)); + final ActorRef probe = getRef(); + + logger.tell(probe, probe); + expectMsgEquals("going active"); + logger.tell("stop", probe); + expectMsgEquals("stopping"); + expectMsgEquals("This is not the error you're looking for"); + expectMsgEquals(Active); + expectMsgEquals(Data.Foo); + String msg = expectMsgClass(String.class); + assertThat(msg, CoreMatchers.startsWith("LogEntry(SomeState,Foo,Actor[akka://FSMDocTest/system/")); + }}; + } +} diff --git a/akka-docs/rst/java/code/docs/actor/japi/FaultHandlingDocSample.java b/akka-docs/rst/java/code/docs/actor/japi/FaultHandlingDocSample.java index 1a12d268fb..3c7c256a99 100644 --- a/akka-docs/rst/java/code/docs/actor/japi/FaultHandlingDocSample.java +++ b/akka-docs/rst/java/code/docs/actor/japi/FaultHandlingDocSample.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor.japi; diff --git a/akka-docs/rst/java/code/docs/actor/japi/FaultHandlingDocSampleJava8.java b/akka-docs/rst/java/code/docs/actor/japi/FaultHandlingDocSampleJava8.java new file mode 100644 index 0000000000..1466f506a9 --- /dev/null +++ b/akka-docs/rst/java/code/docs/actor/japi/FaultHandlingDocSampleJava8.java @@ -0,0 +1,470 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ +package docs.actor.japi; + +//#all +//#imports +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import akka.actor.*; +import akka.dispatch.Mapper; +import akka.event.LoggingReceive; +import akka.japi.pf.DeciderBuilder; +import akka.japi.pf.ReceiveBuilder; +import akka.util.Timeout; +import com.typesafe.config.Config; +import com.typesafe.config.ConfigFactory; +import scala.concurrent.duration.Duration; +import scala.PartialFunction; +import scala.runtime.BoxedUnit; + +import static akka.japi.Util.classTag; +import static akka.actor.SupervisorStrategy.resume; +import static akka.actor.SupervisorStrategy.restart; +import static akka.actor.SupervisorStrategy.stop; +import static akka.actor.SupervisorStrategy.escalate; + +import static akka.pattern.Patterns.ask; +import static akka.pattern.Patterns.pipe; + +import static docs.actor.japi.FaultHandlingDocSample.WorkerApi.*; +import static docs.actor.japi.FaultHandlingDocSample.CounterServiceApi.*; +import static docs.actor.japi.FaultHandlingDocSample.CounterApi.*; +import static docs.actor.japi.FaultHandlingDocSample.StorageApi.*; + +//#imports + +public class FaultHandlingDocSampleJava8 { + + /** + * Runs the sample + */ + public static void main(String[] args) { + Config config = ConfigFactory.parseString( + "akka.loglevel = \"DEBUG\"\n" + + "akka.actor.debug {\n" + + " receive = on\n" + + " lifecycle = on\n" + + "}\n"); + + ActorSystem system = ActorSystem.create("FaultToleranceSample", config); + ActorRef worker = system.actorOf(Props.create(Worker.class), "worker"); + ActorRef listener = system.actorOf(Props.create(Listener.class), "listener"); + // start the work and listen on progress + // note that the listener is used as sender of the tell, + // i.e. it will receive replies from the worker + worker.tell(Start, listener); + } + + /** + * Listens on progress from the worker and shuts down the system when enough + * work has been done. + */ + public static class Listener extends AbstractLoggingActor { + + @Override + public void preStart() { + // If we don't get any progress within 15 seconds then the service + // is unavailable + context().setReceiveTimeout(Duration.create("15 seconds")); + } + + public Listener() { + receive(LoggingReceive.create(ReceiveBuilder. + match(Progress.class, progress -> { + log().info("Current progress: {} %", progress.percent); + if (progress.percent >= 100.0) { + log().info("That's all, shutting down"); + context().system().shutdown(); + } + }). + matchEquals(ReceiveTimeout.getInstance(), x -> { + // No progress within 15 seconds, ServiceUnavailable + log().error("Shutting down due to unavailable service"); + context().system().shutdown(); + }).build(), context() + )); + } + } + + //#messages + public interface WorkerApi { + public static final Object Start = "Start"; + public static final Object Do = "Do"; + + public static class Progress { + public final double percent; + + public Progress(double percent) { + this.percent = percent; + } + + public String toString() { + return String.format("%s(%s)", getClass().getSimpleName(), percent); + } + } + } + + //#messages + + /** + * Worker performs some work when it receives the Start message. It will + * continuously notify the sender of the Start message of current Progress. + * The Worker supervise the CounterService. + */ + public static class Worker extends AbstractLoggingActor { + final Timeout askTimeout = new Timeout(Duration.create(5, "seconds")); + + // The sender of the initial Start message will continuously be notified + // about progress + ActorRef progressListener; + final ActorRef counterService = context().actorOf( + Props.create(CounterService.class), "counter"); + final int totalCount = 51; + + // Stop the CounterService child if it throws ServiceUnavailable + private static final SupervisorStrategy strategy = + new OneForOneStrategy(DeciderBuilder. + match(ServiceUnavailable.class, e -> stop()). + matchAny(o -> escalate()).build()); + + @Override + public SupervisorStrategy supervisorStrategy() { + return strategy; + } + + public Worker() { + receive(LoggingReceive.create(ReceiveBuilder. + matchEquals(Start, x -> progressListener == null, x -> { + progressListener = sender(); + context().system().scheduler().schedule( + Duration.Zero(), Duration.create(1, "second"), self(), Do, + context().dispatcher(), null + ); + }). + matchEquals(Do, x -> { + counterService.tell(new Increment(1), self()); + counterService.tell(new Increment(1), self()); + counterService.tell(new Increment(1), self()); + // Send current progress to the initial sender + pipe(ask(counterService, GetCurrentCount, askTimeout) + .mapTo(classTag(CurrentCount.class)) + .map(new Mapper() { + public Progress apply(CurrentCount c) { + return new Progress(100.0 * c.count / totalCount); + } + }, context().dispatcher()), context().dispatcher()) + .to(progressListener); + }).build(), context()) + ); + } + } + + //#messages + public interface CounterServiceApi { + + public static final Object GetCurrentCount = "GetCurrentCount"; + + public static class CurrentCount { + public final String key; + public final long count; + + public CurrentCount(String key, long count) { + this.key = key; + this.count = count; + } + + public String toString() { + return String.format("%s(%s, %s)", getClass().getSimpleName(), key, count); + } + } + + public static class Increment { + public final long n; + + public Increment(long n) { + this.n = n; + } + + public String toString() { + return String.format("%s(%s)", getClass().getSimpleName(), n); + } + } + + public static class ServiceUnavailable extends RuntimeException { + private static final long serialVersionUID = 1L; + public ServiceUnavailable(String msg) { + super(msg); + } + } + + } + + //#messages + + /** + * Adds the value received in Increment message to a persistent counter. + * Replies with CurrentCount when it is asked for CurrentCount. CounterService + * supervise Storage and Counter. + */ + public static class CounterService extends AbstractLoggingActor { + + // Reconnect message + static final Object Reconnect = "Reconnect"; + + private static class SenderMsgPair { + final ActorRef sender; + final Object msg; + + SenderMsgPair(ActorRef sender, Object msg) { + this.msg = msg; + this.sender = sender; + } + } + + final String key = self().path().name(); + ActorRef storage; + ActorRef counter; + final List backlog = new ArrayList<>(); + final int MAX_BACKLOG = 10000; + + // Restart the storage child when StorageException is thrown. + // After 3 restarts within 5 seconds it will be stopped. + private static final SupervisorStrategy strategy = + new OneForOneStrategy(3, Duration.create("5 seconds"), DeciderBuilder. + match(StorageException.class, e -> restart()). + matchAny(o -> escalate()).build()); + + @Override + public SupervisorStrategy supervisorStrategy() { + return strategy; + } + + @Override + public void preStart() { + initStorage(); + } + + /** + * The child storage is restarted in case of failure, but after 3 restarts, + * and still failing it will be stopped. Better to back-off than + * continuously failing. When it has been stopped we will schedule a + * Reconnect after a delay. Watch the child so we receive Terminated message + * when it has been terminated. + */ + void initStorage() { + storage = context().watch(context().actorOf( + Props.create(Storage.class), "storage")); + // Tell the counter, if any, to use the new storage + if (counter != null) + counter.tell(new UseStorage(storage), self()); + // We need the initial value to be able to operate + storage.tell(new Get(key), self()); + } + + public CounterService() { + receive(LoggingReceive.create(ReceiveBuilder. + match(Entry.class, entry -> entry.key.equals(key) && counter == null, entry -> { + // Reply from Storage of the initial value, now we can create the Counter + final long value = entry.value; + counter = context().actorOf(Props.create(Counter.class, key, value)); + // Tell the counter to use current storage + counter.tell(new UseStorage(storage), self()); + // and send the buffered backlog to the counter + for (SenderMsgPair each : backlog) { + counter.tell(each.msg, each.sender); + } + backlog.clear(); + }). + match(Increment.class, increment -> { + forwardOrPlaceInBacklog(increment); + }). + matchEquals(GetCurrentCount, gcc -> { + forwardOrPlaceInBacklog(gcc); + }). + match(Terminated.class, o -> { + // After 3 restarts the storage child is stopped. + // We receive Terminated because we watch the child, see initStorage. + storage = null; + // Tell the counter that there is no storage for the moment + counter.tell(new UseStorage(null), self()); + // Try to re-establish storage after while + context().system().scheduler().scheduleOnce( + Duration.create(10, "seconds"), self(), Reconnect, + context().dispatcher(), null); + }). + matchEquals(Reconnect, o -> { + // Re-establish storage after the scheduled delay + initStorage(); + }).build(), context()) + ); + } + + void forwardOrPlaceInBacklog(Object msg) { + // We need the initial value from storage before we can start delegate to + // the counter. Before that we place the messages in a backlog, to be sent + // to the counter when it is initialized. + if (counter == null) { + if (backlog.size() >= MAX_BACKLOG) + throw new ServiceUnavailable("CounterService not available," + + " lack of initial value"); + backlog.add(new SenderMsgPair(sender(), msg)); + } else { + counter.forward(msg, context()); + } + } + } + + //#messages + public interface CounterApi { + public static class UseStorage { + public final ActorRef storage; + + public UseStorage(ActorRef storage) { + this.storage = storage; + } + + public String toString() { + return String.format("%s(%s)", getClass().getSimpleName(), storage); + } + } + } + + //#messages + + /** + * The in memory count variable that will send current value to the Storage, + * if there is any storage available at the moment. + */ + public static class Counter extends AbstractLoggingActor { + final String key; + long count; + ActorRef storage; + + public Counter(String key, long initialValue) { + this.key = key; + this.count = initialValue; + + receive(LoggingReceive.create(ReceiveBuilder. + match(UseStorage.class, useStorage -> { + storage = useStorage.storage; + storeCount(); + }). + match(Increment.class, increment -> { + count += increment.n; + storeCount(); + }). + matchEquals(GetCurrentCount, gcc -> { + sender().tell(new CurrentCount(key, count), self()); + }).build(), context()) + ); + } + + void storeCount() { + // Delegate dangerous work, to protect our valuable state. + // We can continue without storage. + if (storage != null) { + storage.tell(new Store(new Entry(key, count)), self()); + } + } + } + + //#messages + public interface StorageApi { + + public static class Store { + public final Entry entry; + + public Store(Entry entry) { + this.entry = entry; + } + + public String toString() { + return String.format("%s(%s)", getClass().getSimpleName(), entry); + } + } + + public static class Entry { + public final String key; + public final long value; + + public Entry(String key, long value) { + this.key = key; + this.value = value; + } + + public String toString() { + return String.format("%s(%s, %s)", getClass().getSimpleName(), key, value); + } + } + + public static class Get { + public final String key; + + public Get(String key) { + this.key = key; + } + + public String toString() { + return String.format("%s(%s)", getClass().getSimpleName(), key); + } + } + + public static class StorageException extends RuntimeException { + private static final long serialVersionUID = 1L; + public StorageException(String msg) { + super(msg); + } + } + } + + //#messages + + /** + * Saves key/value pairs to persistent storage when receiving Store message. + * Replies with current value when receiving Get message. Will throw + * StorageException if the underlying data store is out of order. + */ + public static class Storage extends AbstractLoggingActor { + + final DummyDB db = DummyDB.instance; + + public Storage() { + receive(LoggingReceive.create(ReceiveBuilder. + match(Store.class, store -> { + db.save(store.entry.key, store.entry.value); + }). + match(Get.class, get -> { + Long value = db.load(get.key); + sender().tell(new Entry(get.key, value == null ? + Long.valueOf(0L) : value), self()); + }).build(), context()) + ); + } + } + + //#dummydb + public static class DummyDB { + public static final DummyDB instance = new DummyDB(); + private final Map db = new HashMap(); + + private DummyDB() { + } + + public synchronized void save(String key, Long value) throws StorageException { + if (11 <= value && value <= 14) + throw new StorageException("Simulated store failure " + value); + db.put(key, value); + } + + public synchronized Long load(String key) throws StorageException { + return db.get(key); + } + } + //#dummydb +} +//#all diff --git a/akka-docs/rst/java/code/docs/actorlambda/ActorDocTest.java b/akka-docs/rst/java/code/docs/actorlambda/ActorDocTest.java index 926c564973..242c5d56d8 100644 --- a/akka-docs/rst/java/code/docs/actorlambda/ActorDocTest.java +++ b/akka-docs/rst/java/code/docs/actorlambda/ActorDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actorlambda; diff --git a/akka-docs/rst/java/code/docs/actorlambda/FaultHandlingTest.java b/akka-docs/rst/java/code/docs/actorlambda/FaultHandlingTest.java index bfd225c297..579c0a61d7 100644 --- a/akka-docs/rst/java/code/docs/actorlambda/FaultHandlingTest.java +++ b/akka-docs/rst/java/code/docs/actorlambda/FaultHandlingTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actorlambda; diff --git a/akka-docs/rst/java/code/docs/actorlambda/InitializationDocTest.java b/akka-docs/rst/java/code/docs/actorlambda/InitializationDocTest.java index cefe176db9..236cd0bb42 100644 --- a/akka-docs/rst/java/code/docs/actorlambda/InitializationDocTest.java +++ b/akka-docs/rst/java/code/docs/actorlambda/InitializationDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actorlambda; diff --git a/akka-docs/rst/java/code/docs/actorlambda/Messages.java b/akka-docs/rst/java/code/docs/actorlambda/Messages.java index 788c94b383..adb9ac81a4 100644 --- a/akka-docs/rst/java/code/docs/actorlambda/Messages.java +++ b/akka-docs/rst/java/code/docs/actorlambda/Messages.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actorlambda; diff --git a/akka-docs/rst/java/code/docs/actorlambda/MyActor.java b/akka-docs/rst/java/code/docs/actorlambda/MyActor.java index 9dec6d186c..ef2d8d4269 100644 --- a/akka-docs/rst/java/code/docs/actorlambda/MyActor.java +++ b/akka-docs/rst/java/code/docs/actorlambda/MyActor.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actorlambda; diff --git a/akka-docs/rst/java/code/docs/actorlambda/SampleActor.java b/akka-docs/rst/java/code/docs/actorlambda/SampleActor.java index 6e7e1e362d..22d736c4f2 100644 --- a/akka-docs/rst/java/code/docs/actorlambda/SampleActor.java +++ b/akka-docs/rst/java/code/docs/actorlambda/SampleActor.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actorlambda; diff --git a/akka-docs/rst/java/code/docs/actorlambda/SampleActorTest.java b/akka-docs/rst/java/code/docs/actorlambda/SampleActorTest.java index d04b864f03..141225570f 100644 --- a/akka-docs/rst/java/code/docs/actorlambda/SampleActorTest.java +++ b/akka-docs/rst/java/code/docs/actorlambda/SampleActorTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actorlambda; diff --git a/akka-docs/rst/java/code/docs/actorlambda/fsm/Buncher.java b/akka-docs/rst/java/code/docs/actorlambda/fsm/Buncher.java index 9e34d15cde..4209e9656c 100644 --- a/akka-docs/rst/java/code/docs/actorlambda/fsm/Buncher.java +++ b/akka-docs/rst/java/code/docs/actorlambda/fsm/Buncher.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actorlambda.fsm; diff --git a/akka-docs/rst/java/code/docs/actorlambda/fsm/BuncherTest.java b/akka-docs/rst/java/code/docs/actorlambda/fsm/BuncherTest.java index a443c0a6c4..fe52e96796 100644 --- a/akka-docs/rst/java/code/docs/actorlambda/fsm/BuncherTest.java +++ b/akka-docs/rst/java/code/docs/actorlambda/fsm/BuncherTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actorlambda.fsm; diff --git a/akka-docs/rst/java/code/docs/actorlambda/fsm/Events.java b/akka-docs/rst/java/code/docs/actorlambda/fsm/Events.java index c39bc15bf5..0edd2ea069 100644 --- a/akka-docs/rst/java/code/docs/actorlambda/fsm/Events.java +++ b/akka-docs/rst/java/code/docs/actorlambda/fsm/Events.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actorlambda.fsm; diff --git a/akka-docs/rst/java/code/docs/actorlambda/fsm/FSMDocTest.java b/akka-docs/rst/java/code/docs/actorlambda/fsm/FSMDocTest.java index 1fa74e1962..8e5636198c 100644 --- a/akka-docs/rst/java/code/docs/actorlambda/fsm/FSMDocTest.java +++ b/akka-docs/rst/java/code/docs/actorlambda/fsm/FSMDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actorlambda.fsm; diff --git a/akka-docs/rst/java/code/docs/actorlambda/japi/FaultHandlingDocSample.java b/akka-docs/rst/java/code/docs/actorlambda/japi/FaultHandlingDocSample.java index a623b9dbae..7d184e121e 100644 --- a/akka-docs/rst/java/code/docs/actorlambda/japi/FaultHandlingDocSample.java +++ b/akka-docs/rst/java/code/docs/actorlambda/japi/FaultHandlingDocSample.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actorlambda.japi; diff --git a/akka-docs/rst/java/code/docs/agent/AgentDocJavaSpec.scala b/akka-docs/rst/java/code/docs/agent/AgentDocJavaSpec.scala index 83b67cb885..c2453364b2 100644 --- a/akka-docs/rst/java/code/docs/agent/AgentDocJavaSpec.scala +++ b/akka-docs/rst/java/code/docs/agent/AgentDocJavaSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.agent diff --git a/akka-docs/rst/java/code/docs/agent/AgentDocTest.java b/akka-docs/rst/java/code/docs/agent/AgentDocTest.java index 6274510452..0f5e17872c 100644 --- a/akka-docs/rst/java/code/docs/agent/AgentDocTest.java +++ b/akka-docs/rst/java/code/docs/agent/AgentDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.agent; diff --git a/akka-docs/rst/java/code/docs/cluster/ClusterDocTest.java b/akka-docs/rst/java/code/docs/cluster/ClusterDocTest.java index 75b4162c9b..b74c8eb127 100644 --- a/akka-docs/rst/java/code/docs/cluster/ClusterDocTest.java +++ b/akka-docs/rst/java/code/docs/cluster/ClusterDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.cluster; diff --git a/akka-docs/rst/java/code/docs/ddata/DataBot.java b/akka-docs/rst/java/code/docs/ddata/DataBot.java index ec2388e020..8bb9bdf4d1 100644 --- a/akka-docs/rst/java/code/docs/ddata/DataBot.java +++ b/akka-docs/rst/java/code/docs/ddata/DataBot.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.ddata; diff --git a/akka-docs/rst/java/code/docs/ddata/DistributedDataDocTest.java b/akka-docs/rst/java/code/docs/ddata/DistributedDataDocTest.java index dd07bcdafc..f69c4312f5 100644 --- a/akka-docs/rst/java/code/docs/ddata/DistributedDataDocTest.java +++ b/akka-docs/rst/java/code/docs/ddata/DistributedDataDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.ddata; diff --git a/akka-docs/rst/java/code/docs/ddata/japi/TwoPhaseSet.java b/akka-docs/rst/java/code/docs/ddata/japi/TwoPhaseSet.java index ca99fabd56..ffefa99f71 100644 --- a/akka-docs/rst/java/code/docs/ddata/japi/TwoPhaseSet.java +++ b/akka-docs/rst/java/code/docs/ddata/japi/TwoPhaseSet.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.ddata.japi; diff --git a/akka-docs/rst/java/code/docs/ddata/japi/protobuf/TwoPhaseSetSerializer.java b/akka-docs/rst/java/code/docs/ddata/japi/protobuf/TwoPhaseSetSerializer.java index c75e0d194d..07261b5c9d 100644 --- a/akka-docs/rst/java/code/docs/ddata/japi/protobuf/TwoPhaseSetSerializer.java +++ b/akka-docs/rst/java/code/docs/ddata/japi/protobuf/TwoPhaseSetSerializer.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.ddata.japi.protobuf; diff --git a/akka-docs/rst/java/code/docs/ddata/japi/protobuf/TwoPhaseSetSerializer2.java b/akka-docs/rst/java/code/docs/ddata/japi/protobuf/TwoPhaseSetSerializer2.java index 3497b02ff3..2a3709ead8 100644 --- a/akka-docs/rst/java/code/docs/ddata/japi/protobuf/TwoPhaseSetSerializer2.java +++ b/akka-docs/rst/java/code/docs/ddata/japi/protobuf/TwoPhaseSetSerializer2.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.ddata.japi.protobuf; diff --git a/akka-docs/rst/java/code/docs/ddata/japi/protobuf/TwoPhaseSetSerializerWithCompression.java b/akka-docs/rst/java/code/docs/ddata/japi/protobuf/TwoPhaseSetSerializerWithCompression.java index 2927dd3e67..d439fffee4 100644 --- a/akka-docs/rst/java/code/docs/ddata/japi/protobuf/TwoPhaseSetSerializerWithCompression.java +++ b/akka-docs/rst/java/code/docs/ddata/japi/protobuf/TwoPhaseSetSerializerWithCompression.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.ddata.japi.protobuf; diff --git a/akka-docs/rst/java/code/docs/dispatcher/DispatcherDocTest.java b/akka-docs/rst/java/code/docs/dispatcher/DispatcherDocTest.java index 116788f53e..0ea2b35ffc 100644 --- a/akka-docs/rst/java/code/docs/dispatcher/DispatcherDocTest.java +++ b/akka-docs/rst/java/code/docs/dispatcher/DispatcherDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.dispatcher; diff --git a/akka-docs/rst/java/code/docs/dispatcher/MyUnboundedJMailbox.java b/akka-docs/rst/java/code/docs/dispatcher/MyUnboundedJMailbox.java index 71f99f9de6..f98414e77d 100644 --- a/akka-docs/rst/java/code/docs/dispatcher/MyUnboundedJMailbox.java +++ b/akka-docs/rst/java/code/docs/dispatcher/MyUnboundedJMailbox.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.dispatcher; diff --git a/akka-docs/rst/java/code/docs/dispatcher/MyUnboundedJMessageQueueSemantics.java b/akka-docs/rst/java/code/docs/dispatcher/MyUnboundedJMessageQueueSemantics.java index e5ac36ad9c..a84dd38fab 100644 --- a/akka-docs/rst/java/code/docs/dispatcher/MyUnboundedJMessageQueueSemantics.java +++ b/akka-docs/rst/java/code/docs/dispatcher/MyUnboundedJMessageQueueSemantics.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.dispatcher; diff --git a/akka-docs/rst/java/code/docs/event/EventBusDocTest.java b/akka-docs/rst/java/code/docs/event/EventBusDocTest.java index 63088fd8ef..2dd4ae911b 100644 --- a/akka-docs/rst/java/code/docs/event/EventBusDocTest.java +++ b/akka-docs/rst/java/code/docs/event/EventBusDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.event; diff --git a/akka-docs/rst/java/code/docs/event/LoggingDocTest.java b/akka-docs/rst/java/code/docs/event/LoggingDocTest.java index c1f5bed2c4..b47638c710 100644 --- a/akka-docs/rst/java/code/docs/event/LoggingDocTest.java +++ b/akka-docs/rst/java/code/docs/event/LoggingDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.event; diff --git a/akka-docs/rst/java/code/docs/extension/ExtensionDocTest.java b/akka-docs/rst/java/code/docs/extension/ExtensionDocTest.java index 0c096c027c..fe46d37d1c 100644 --- a/akka-docs/rst/java/code/docs/extension/ExtensionDocTest.java +++ b/akka-docs/rst/java/code/docs/extension/ExtensionDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.extension; diff --git a/akka-docs/rst/java/code/docs/extension/SettingsExtensionDocTest.java b/akka-docs/rst/java/code/docs/extension/SettingsExtensionDocTest.java index 156261b4bb..058bac5a70 100644 --- a/akka-docs/rst/java/code/docs/extension/SettingsExtensionDocTest.java +++ b/akka-docs/rst/java/code/docs/extension/SettingsExtensionDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.extension; diff --git a/akka-docs/rst/java/code/docs/future/FutureDocTest.java b/akka-docs/rst/java/code/docs/future/FutureDocTest.java index 71157aba09..be0d2b16a5 100644 --- a/akka-docs/rst/java/code/docs/future/FutureDocTest.java +++ b/akka-docs/rst/java/code/docs/future/FutureDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.future; diff --git a/akka-docs-dev/rst/java/code/docs/http/javadsl/HttpClientExampleDocTest.java b/akka-docs/rst/java/code/docs/http/javadsl/HttpClientExampleDocTest.java similarity index 78% rename from akka-docs-dev/rst/java/code/docs/http/javadsl/HttpClientExampleDocTest.java rename to akka-docs/rst/java/code/docs/http/javadsl/HttpClientExampleDocTest.java index 6879b1a969..e2a15eee8a 100644 --- a/akka-docs-dev/rst/java/code/docs/http/javadsl/HttpClientExampleDocTest.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/HttpClientExampleDocTest.java @@ -1,11 +1,12 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.javadsl; import akka.actor.AbstractActor; import akka.actor.ActorSystem; +import akka.http.javadsl.ConnectHttp; import akka.http.javadsl.HostConnectionPool; import akka.japi.Pair; @@ -20,7 +21,10 @@ import akka.http.javadsl.model.*; import akka.http.javadsl.Http; import scala.util.Try; -import static akka.pattern.Patterns.*; +import static akka.http.javadsl.ConnectHttp.toHost; +import static akka.pattern.PatternsCS.*; + +import java.util.concurrent.CompletionStage; @SuppressWarnings("unused") public class HttpClientExampleDocTest { @@ -32,9 +36,9 @@ public class HttpClientExampleDocTest { final ActorSystem system = ActorSystem.create(); final ActorMaterializer materializer = ActorMaterializer.create(system); - final Flow> connectionFlow = - Http.get(system).outgoingConnection("akka.io", 80); - final Future responseFuture = + final Flow> connectionFlow = + Http.get(system).outgoingConnection(toHost("akka.io", 80)); + final CompletionStage responseFuture = Source.single(HttpRequest.create("/")) .via(connectionFlow) .runWith(Sink.head(), materializer); @@ -52,11 +56,11 @@ public class HttpClientExampleDocTest { Pair, Pair, Integer>, HostConnectionPool> poolClientFlow = - Http.get(system).cachedHostConnectionPool("akka.io", 80, materializer); + Http.get(system).cachedHostConnectionPool(toHost("akka.io", 80), materializer); // construct a pool client flow with context type `Integer` - final Future, Integer>> responseFuture = + final CompletionStage, Integer>> responseFuture = Source .single(Pair.create(HttpRequest.create("/"), 42)) .via(poolClientFlow) @@ -70,7 +74,7 @@ public class HttpClientExampleDocTest { final ActorSystem system = ActorSystem.create(); final Materializer materializer = ActorMaterializer.create(system); - final Future responseFuture = + final CompletionStage responseFuture = Http.get(system) .singleRequest(HttpRequest.create("http://akka.io"), materializer); //#single-request-example @@ -90,7 +94,7 @@ public class HttpClientExampleDocTest { }).build()); } - Future fetch(String url) { + CompletionStage fetch(String url) { return http.singleRequest(HttpRequest.create(url), materializer); } } diff --git a/akka-docs-dev/rst/java/code/docs/http/javadsl/ModelDocTest.java b/akka-docs/rst/java/code/docs/http/javadsl/ModelDocTest.java similarity index 86% rename from akka-docs-dev/rst/java/code/docs/http/javadsl/ModelDocTest.java rename to akka-docs/rst/java/code/docs/http/javadsl/ModelDocTest.java index 56dbff1f71..e2fee1ed78 100644 --- a/akka-docs-dev/rst/java/code/docs/http/javadsl/ModelDocTest.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/ModelDocTest.java @@ -1,16 +1,17 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.javadsl; -import akka.japi.Option; import akka.util.ByteString; import org.junit.Test; //#import-model import akka.http.javadsl.model.*; import akka.http.javadsl.model.headers.*; + +import java.util.Optional; //#import-model @SuppressWarnings("unused") @@ -79,12 +80,12 @@ public class ModelDocTest { //#headers // a method that extracts basic HTTP credentials from a request - private Option getCredentialsOfRequest(HttpRequest request) { - Option auth = request.getHeader(Authorization.class); - if (auth.isDefined() && auth.get().credentials() instanceof BasicHttpCredentials) - return Option.some((BasicHttpCredentials) auth.get().credentials()); + private Optional getCredentialsOfRequest(HttpRequest request) { + Optional auth = request.getHeader(Authorization.class); + if (auth.isPresent() && auth.get().credentials() instanceof BasicHttpCredentials) + return Optional.of((BasicHttpCredentials) auth.get().credentials()); else - return Option.none(); + return Optional.empty(); } //#headers } diff --git a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/FormFieldRequestValsExampleTest.java b/akka-docs/rst/java/code/docs/http/javadsl/server/FormFieldRequestValsExampleTest.java similarity index 96% rename from akka-docs-dev/rst/java/code/docs/http/javadsl/server/FormFieldRequestValsExampleTest.java rename to akka-docs/rst/java/code/docs/http/javadsl/server/FormFieldRequestValsExampleTest.java index 705c89ef35..76c4a90753 100644 --- a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/FormFieldRequestValsExampleTest.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/FormFieldRequestValsExampleTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.javadsl.server; diff --git a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/HeaderRequestValsExampleTest.java b/akka-docs/rst/java/code/docs/http/javadsl/server/HeaderRequestValsExampleTest.java similarity index 96% rename from akka-docs-dev/rst/java/code/docs/http/javadsl/server/HeaderRequestValsExampleTest.java rename to akka-docs/rst/java/code/docs/http/javadsl/server/HeaderRequestValsExampleTest.java index 232d6eff2e..2f207c6524 100644 --- a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/HeaderRequestValsExampleTest.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/HeaderRequestValsExampleTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.javadsl.server; diff --git a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/HighLevelServerBindFailureExample.java b/akka-docs/rst/java/code/docs/http/javadsl/server/HighLevelServerBindFailureExample.java similarity index 54% rename from akka-docs-dev/rst/java/code/docs/http/javadsl/server/HighLevelServerBindFailureExample.java rename to akka-docs/rst/java/code/docs/http/javadsl/server/HighLevelServerBindFailureExample.java index 475fa1d8d6..806aa8fc6a 100644 --- a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/HighLevelServerBindFailureExample.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/HighLevelServerBindFailureExample.java @@ -1,39 +1,32 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.javadsl.server; //#binding-failure-high-level-example import akka.actor.ActorSystem; -import akka.dispatch.OnFailure; -import akka.http.javadsl.model.ContentTypes; -import akka.http.javadsl.server.*; -import akka.http.javadsl.server.values.Parameters; import akka.http.scaladsl.Http; -import scala.concurrent.Future; import java.io.IOException; +import java.util.concurrent.CompletionStage; -@SuppressWarnings("unchecked") public class HighLevelServerBindFailureExample { public static void main(String[] args) throws IOException { // boot up server using the route as defined below final ActorSystem system = ActorSystem.create(); // HttpApp.bindRoute expects a route being provided by HttpApp.createRoute - Future bindingFuture = + CompletionStage bindingFuture = new HighLevelServerExample().bindRoute("localhost", 8080, system); - bindingFuture.onFailure(new OnFailure() { - @Override - public void onFailure(Throwable failure) throws Throwable { + bindingFuture.exceptionally(failure -> { System.err.println("Something very bad happened! " + failure.getMessage()); - system.shutdown(); - } - }, system.dispatcher()); + system.terminate(); + return null; + }); - system.shutdown(); + system.terminate(); } } //#binding-failure-high-level-example diff --git a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/HighLevelServerExample.java b/akka-docs/rst/java/code/docs/http/javadsl/server/HighLevelServerExample.java similarity index 96% rename from akka-docs-dev/rst/java/code/docs/http/javadsl/server/HighLevelServerExample.java rename to akka-docs/rst/java/code/docs/http/javadsl/server/HighLevelServerExample.java index f59547cb23..9a70e9d918 100644 --- a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/HighLevelServerExample.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/HighLevelServerExample.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.javadsl.server; @@ -22,7 +22,7 @@ public class HighLevelServerExample extends HttpApp { new HighLevelServerExample().bindRoute("localhost", 8080, system); System.out.println("Type RETURN to exit"); System.in.read(); - system.shutdown(); + system.terminate(); } // A RequestVal is a type-safe representation of some aspect of the request. diff --git a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/HttpBasicAuthenticatorExample.java b/akka-docs/rst/java/code/docs/http/javadsl/server/HttpBasicAuthenticatorExample.java similarity index 90% rename from akka-docs-dev/rst/java/code/docs/http/javadsl/server/HttpBasicAuthenticatorExample.java rename to akka-docs/rst/java/code/docs/http/javadsl/server/HttpBasicAuthenticatorExample.java index 624b34e441..c4a63237c4 100644 --- a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/HttpBasicAuthenticatorExample.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/HttpBasicAuthenticatorExample.java @@ -1,8 +1,9 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.javadsl.server; +import java.util.Optional; import akka.http.javadsl.model.HttpRequest; import akka.http.javadsl.model.headers.Host; import akka.http.javadsl.server.Handler1; @@ -13,6 +14,10 @@ import akka.http.javadsl.server.values.BasicCredentials; import akka.http.javadsl.server.values.HttpBasicAuthenticator; import akka.http.javadsl.testkit.JUnitRouteTest; import akka.http.scaladsl.model.headers.Authorization; + +import java.util.Optional; +import java.util.concurrent.CompletionStage; + import org.junit.Test; import scala.Option; import scala.concurrent.Future; @@ -27,7 +32,7 @@ public class HttpBasicAuthenticatorExample extends JUnitRouteTest { private final String hardcodedPassword = "correcthorsebatterystaple"; - public Future> authenticate(BasicCredentials credentials) { + public CompletionStage> authenticate(BasicCredentials credentials) { // this is where your actual authentication logic would go if (credentials.available() && // no anonymous access credentials.verify(hardcodedPassword)) { @@ -71,4 +76,4 @@ public class HttpBasicAuthenticatorExample extends JUnitRouteTest { } -} \ No newline at end of file +} diff --git a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/HttpServerExampleDocTest.java b/akka-docs/rst/java/code/docs/http/javadsl/server/HttpServerExampleDocTest.java similarity index 63% rename from akka-docs-dev/rst/java/code/docs/http/javadsl/server/HttpServerExampleDocTest.java rename to akka-docs/rst/java/code/docs/http/javadsl/server/HttpServerExampleDocTest.java index e159b630db..3cabf1bd1d 100644 --- a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/HttpServerExampleDocTest.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/HttpServerExampleDocTest.java @@ -1,9 +1,10 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.javadsl.server; +import akka.NotUsed; import akka.actor.ActorSystem; import akka.dispatch.OnFailure; import akka.http.impl.util.Util; @@ -29,13 +30,10 @@ import akka.stream.stage.PushStage; import akka.stream.stage.SyncDirective; import akka.stream.stage.TerminationDirective; import akka.util.ByteString; -import scala.concurrent.Await; -import scala.concurrent.Future; -import scala.concurrent.duration.FiniteDuration; -import scala.runtime.BoxedUnit; import java.io.BufferedReader; import java.io.InputStreamReader; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; @SuppressWarnings("unused") @@ -46,20 +44,17 @@ public class HttpServerExampleDocTest { ActorSystem system = ActorSystem.create(); Materializer materializer = ActorMaterializer.create(system); - Source> serverSource = + Source> serverSource = Http.get(system).bind("localhost", 8080, materializer); - Future serverBindingFuture = - serverSource.to(Sink.foreach( - new Procedure() { - @Override - public void apply(IncomingConnection connection) throws Exception { - System.out.println("Accepted new connection from " + connection.remoteAddress()); - // ... and then actually handle the connection - } - })).run(materializer); + CompletionStage serverBindingFuture = + serverSource.to(Sink.foreach(connection -> { + System.out.println("Accepted new connection from " + connection.remoteAddress()); + // ... and then actually handle the connection + } + )).run(materializer); //#binding-example - Await.result(serverBindingFuture, new FiniteDuration(3, TimeUnit.SECONDS)); + serverBindingFuture.toCompletableFuture().get(3, TimeUnit.SECONDS); } public static void bindingFailureExample() throws Exception { @@ -67,27 +62,21 @@ public class HttpServerExampleDocTest { ActorSystem system = ActorSystem.create(); Materializer materializer = ActorMaterializer.create(system); - Source> serverSource = + Source> serverSource = Http.get(system).bind("localhost", 80, materializer); - Future serverBindingFuture = - serverSource.to(Sink.foreach( - new Procedure() { - @Override - public void apply(IncomingConnection connection) throws Exception { - System.out.println("Accepted new connection from " + connection.remoteAddress()); - // ... and then actually handle the connection - } - })).run(materializer); + CompletionStage serverBindingFuture = + serverSource.to(Sink.foreach(connection -> { + System.out.println("Accepted new connection from " + connection.remoteAddress()); + // ... and then actually handle the connection + } + )).run(materializer); - serverBindingFuture.onFailure(new OnFailure() { - @Override - public void onFailure(Throwable failure) throws Throwable { + serverBindingFuture.whenCompleteAsync((binding, failure) -> { // possibly report the failure somewhere... - } }, system.dispatcher()); //#binding-failure-handling - Await.result(serverBindingFuture, new FiniteDuration(3, TimeUnit.SECONDS)); + serverBindingFuture.toCompletableFuture().get(3, TimeUnit.SECONDS); } public static void connectionSourceFailureExample() throws Exception { @@ -95,10 +84,10 @@ public class HttpServerExampleDocTest { ActorSystem system = ActorSystem.create(); Materializer materializer = ActorMaterializer.create(system); - Source> serverSource = + Source> serverSource = Http.get(system).bind("localhost", 8080, materializer); - Flow failureDetection = + Flow failureDetection = Flow.of(IncomingConnection.class).transform(() -> new PushStage() { @Override @@ -113,19 +102,16 @@ public class HttpServerExampleDocTest { } }); - Future serverBindingFuture = + CompletionStage serverBindingFuture = serverSource .via(failureDetection) // feed signals through our custom stage - .to(Sink.foreach( - new Procedure() { - @Override - public void apply(IncomingConnection connection) throws Exception { - System.out.println("Accepted new connection from " + connection.remoteAddress()); - // ... and then actually handle the connection - } - })).run(materializer); + .to(Sink.foreach(connection -> { + System.out.println("Accepted new connection from " + connection.remoteAddress()); + // ... and then actually handle the connection + })) + .run(materializer); //#incoming-connections-source-failure-handling - Await.result(serverBindingFuture, new FiniteDuration(3, TimeUnit.SECONDS)); + serverBindingFuture.toCompletableFuture().get(3, TimeUnit.SECONDS); } public static void connectionStreamFailureExample() throws Exception { @@ -133,10 +119,10 @@ public class HttpServerExampleDocTest { ActorSystem system = ActorSystem.create(); Materializer materializer = ActorMaterializer.create(system); - Source> serverSource = + Source> serverSource = Http.get(system).bind("localhost", 8080, materializer); - Flow failureDetection = + Flow failureDetection = Flow.of(HttpRequest.class).transform(() -> new PushStage() { @Override @@ -151,7 +137,7 @@ public class HttpServerExampleDocTest { } }); - Flow httpEcho = + Flow httpEcho = Flow.of(HttpRequest.class) .via(failureDetection) .map(request -> { @@ -162,14 +148,14 @@ public class HttpServerExampleDocTest { .withEntity(entity); }); - Future serverBindingFuture = - serverSource.to(Sink.foreach(con -> { - System.out.println("Accepted new connection from " + con.remoteAddress()); - con.handleWith(httpEcho, materializer); + CompletionStage serverBindingFuture = + serverSource.to(Sink.foreach(conn -> { + System.out.println("Accepted new connection from " + conn.remoteAddress()); + conn.handleWith(httpEcho, materializer); } )).run(materializer); //#connection-stream-failure-handling - Await.result(serverBindingFuture, new FiniteDuration(3, TimeUnit.SECONDS)); + serverBindingFuture.toCompletableFuture().get(3, TimeUnit.SECONDS); } public static void fullServerExample() throws Exception { @@ -180,7 +166,7 @@ public class HttpServerExampleDocTest { //#full-server-example final Materializer materializer = ActorMaterializer.create(system); - Source> serverSource = + Source> serverSource = Http.get(system).bind("localhost", 8080, materializer); //#request-handler @@ -202,7 +188,7 @@ public class HttpServerExampleDocTest { .withEntity(ContentTypes.TEXT_HTML_UTF8, "Hello world!"); else if (uri.path().equals("/hello")) { - String name = Util.getOrElse(uri.query().get("name"), "Mister X"); + String name = uri.query().get("name").orElse("Mister X"); return HttpResponse.create() @@ -218,25 +204,21 @@ public class HttpServerExampleDocTest { }; //#request-handler - Future serverBindingFuture = - serverSource.to(Sink.foreach( - new Procedure() { - @Override - public void apply(IncomingConnection connection) throws Exception { - System.out.println("Accepted new connection from " + connection.remoteAddress()); + CompletionStage serverBindingFuture = + serverSource.to(Sink.foreach(connection -> { + System.out.println("Accepted new connection from " + connection.remoteAddress()); - connection.handleWithSyncHandler(requestHandler, materializer); - // this is equivalent to - //connection.handleWith(Flow.of(HttpRequest.class).map(requestHandler), materializer); - } + connection.handleWithSyncHandler(requestHandler, materializer); + // this is equivalent to + //connection.handleWith(Flow.of(HttpRequest.class).map(requestHandler), materializer); })).run(materializer); //#full-server-example - Await.result(serverBindingFuture, new FiniteDuration(1, TimeUnit.SECONDS)); // will throw if binding fails + serverBindingFuture.toCompletableFuture().get(1, TimeUnit.SECONDS); // will throw if binding fails System.out.println("Press ENTER to stop."); new BufferedReader(new InputStreamReader(System.in)).readLine(); } finally { - system.shutdown(); + system.terminate(); } } public static void main(String[] args) throws Exception { diff --git a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/OAuth2AuthenticatorExample.java b/akka-docs/rst/java/code/docs/http/javadsl/server/OAuth2AuthenticatorExample.java similarity index 92% rename from akka-docs-dev/rst/java/code/docs/http/javadsl/server/OAuth2AuthenticatorExample.java rename to akka-docs/rst/java/code/docs/http/javadsl/server/OAuth2AuthenticatorExample.java index 3152179791..44a1865581 100644 --- a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/OAuth2AuthenticatorExample.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/OAuth2AuthenticatorExample.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.javadsl.server; @@ -16,6 +16,10 @@ import akka.http.javadsl.server.values.OAuth2Authenticator; import akka.http.javadsl.server.values.OAuth2Credentials; import akka.http.javadsl.testkit.JUnitRouteTest; import akka.http.scaladsl.model.headers.Authorization; + +import java.util.Optional; +import java.util.concurrent.CompletionStage; + import org.junit.Test; import scala.Option; import scala.concurrent.Future; @@ -31,7 +35,7 @@ public class OAuth2AuthenticatorExample extends JUnitRouteTest { private final String hardcodedToken = "token"; @Override - public Future> authenticate(OAuth2Credentials credentials) { + public CompletionStage> authenticate(OAuth2Credentials credentials) { // this is where your actual authentication logic would go, looking up the user // based on the token or something in that direction if (credentials.available() && // no anonymous access diff --git a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/PathDirectiveExampleTest.java b/akka-docs/rst/java/code/docs/http/javadsl/server/PathDirectiveExampleTest.java similarity index 97% rename from akka-docs-dev/rst/java/code/docs/http/javadsl/server/PathDirectiveExampleTest.java rename to akka-docs/rst/java/code/docs/http/javadsl/server/PathDirectiveExampleTest.java index 313cdf63c0..e720cc621f 100644 --- a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/PathDirectiveExampleTest.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/PathDirectiveExampleTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.javadsl.server; diff --git a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/WebsocketCoreExample.java b/akka-docs/rst/java/code/docs/http/javadsl/server/WebSocketCoreExample.java similarity index 85% rename from akka-docs-dev/rst/java/code/docs/http/javadsl/server/WebsocketCoreExample.java rename to akka-docs/rst/java/code/docs/http/javadsl/server/WebSocketCoreExample.java index 184668603a..e8232a2aa4 100644 --- a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/WebsocketCoreExample.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/WebSocketCoreExample.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.javadsl.server; @@ -7,8 +7,10 @@ package docs.http.javadsl.server; //#websocket-example-using-core import java.io.BufferedReader; import java.io.InputStreamReader; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; +import akka.NotUsed; import scala.concurrent.Await; import scala.concurrent.Future; import scala.concurrent.duration.FiniteDuration; @@ -29,15 +31,15 @@ import akka.http.javadsl.model.HttpRequest; import akka.http.javadsl.model.HttpResponse; import akka.http.javadsl.model.ws.Message; import akka.http.javadsl.model.ws.TextMessage; -import akka.http.javadsl.model.ws.Websocket; +import akka.http.javadsl.model.ws.WebSocket; -public class WebsocketCoreExample { +public class WebSocketCoreExample { //#websocket-handling public static HttpResponse handleRequest(HttpRequest request) { System.out.println("Handling request to " + request.getUri()); if (request.getUri().path().equals("/greeter")) - return Websocket.handleWebsocketRequestWith(request, greeter()); + return WebSocket.handleWebSocketRequestWith(request, greeter()); else return HttpResponse.create().withStatus(404); } @@ -49,7 +51,7 @@ public class WebsocketCoreExample { try { final Materializer materializer = ActorMaterializer.create(system); - Future serverBindingFuture = + CompletionStage serverBindingFuture = Http.get(system).bindAndHandleSync( new Function() { public HttpResponse apply(HttpRequest request) throws Exception { @@ -58,11 +60,11 @@ public class WebsocketCoreExample { }, "localhost", 8080, materializer); // will throw if binding fails - Await.result(serverBindingFuture, new FiniteDuration(1, TimeUnit.SECONDS)); + serverBindingFuture.toCompletableFuture().get(1, TimeUnit.SECONDS); System.out.println("Press ENTER to stop."); new BufferedReader(new InputStreamReader(System.in)).readLine(); } finally { - system.shutdown(); + system.terminate(); } } @@ -71,7 +73,7 @@ public class WebsocketCoreExample { * A handler that treats incoming messages as a name, * and responds with a greeting to that name */ - public static Flow greeter() { + public static Flow greeter() { return Flow.create() .collect(new JavaPartialFunction() { diff --git a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/WebsocketRoutingExample.java b/akka-docs/rst/java/code/docs/http/javadsl/server/WebSocketRoutingExample.java similarity index 90% rename from akka-docs-dev/rst/java/code/docs/http/javadsl/server/WebsocketRoutingExample.java rename to akka-docs/rst/java/code/docs/http/javadsl/server/WebSocketRoutingExample.java index 343168491a..9890e7114b 100644 --- a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/WebsocketRoutingExample.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/WebSocketRoutingExample.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.javadsl.server; @@ -16,13 +16,13 @@ import akka.http.javadsl.model.ws.TextMessage; import akka.http.javadsl.server.HttpApp; -public class WebsocketRoutingExample extends HttpApp { +public class WebSocketRoutingExample extends HttpApp { //#websocket-route @Override public Route createRoute() { return path("greeter").route( - handleWebsocketMessages(greeter()) + handleWebSocketMessages(greeter()) ); } //#websocket-route diff --git a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/directives/HostDirectivesExamplesTest.java b/akka-docs/rst/java/code/docs/http/javadsl/server/directives/HostDirectivesExamplesTest.java similarity index 97% rename from akka-docs-dev/rst/java/code/docs/http/javadsl/server/directives/HostDirectivesExamplesTest.java rename to akka-docs/rst/java/code/docs/http/javadsl/server/directives/HostDirectivesExamplesTest.java index a3c5b2ada5..ef0a86bc7a 100644 --- a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/directives/HostDirectivesExamplesTest.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/directives/HostDirectivesExamplesTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.http.javadsl.server.directives; diff --git a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/directives/MethodDirectivesExamplesTest.java b/akka-docs/rst/java/code/docs/http/javadsl/server/directives/MethodDirectivesExamplesTest.java similarity index 98% rename from akka-docs-dev/rst/java/code/docs/http/javadsl/server/directives/MethodDirectivesExamplesTest.java rename to akka-docs/rst/java/code/docs/http/javadsl/server/directives/MethodDirectivesExamplesTest.java index eea0be97a2..b2cfcad25b 100644 --- a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/directives/MethodDirectivesExamplesTest.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/directives/MethodDirectivesExamplesTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.http.javadsl.server.directives; diff --git a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/testkit/MyAppService.java b/akka-docs/rst/java/code/docs/http/javadsl/server/testkit/MyAppService.java similarity index 92% rename from akka-docs-dev/rst/java/code/docs/http/javadsl/server/testkit/MyAppService.java rename to akka-docs/rst/java/code/docs/http/javadsl/server/testkit/MyAppService.java index 8f2bb068f5..0295a720bb 100644 --- a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/testkit/MyAppService.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/testkit/MyAppService.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.javadsl.server.testkit; diff --git a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/testkit/TestkitExampleTest.java b/akka-docs/rst/java/code/docs/http/javadsl/server/testkit/TestkitExampleTest.java similarity index 95% rename from akka-docs-dev/rst/java/code/docs/http/javadsl/server/testkit/TestkitExampleTest.java rename to akka-docs/rst/java/code/docs/http/javadsl/server/testkit/TestkitExampleTest.java index bddd124a38..89708ebf35 100644 --- a/akka-docs-dev/rst/java/code/docs/http/javadsl/server/testkit/TestkitExampleTest.java +++ b/akka-docs/rst/java/code/docs/http/javadsl/server/testkit/TestkitExampleTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.javadsl.server.testkit; diff --git a/akka-docs/rst/java/code/docs/io/IODocTest.java b/akka-docs/rst/java/code/docs/io/IODocTest.java index 1721e9550e..5aa6e9eba7 100644 --- a/akka-docs/rst/java/code/docs/io/IODocTest.java +++ b/akka-docs/rst/java/code/docs/io/IODocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.io; diff --git a/akka-docs/rst/java/code/docs/io/JavaReadBackPressure.java b/akka-docs/rst/java/code/docs/io/JavaReadBackPressure.java index f1ce04de23..c127ffbbab 100644 --- a/akka-docs/rst/java/code/docs/io/JavaReadBackPressure.java +++ b/akka-docs/rst/java/code/docs/io/JavaReadBackPressure.java @@ -13,7 +13,7 @@ import java.util.ArrayList; import java.util.List; /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ public class JavaReadBackPressure { diff --git a/akka-docs/rst/java/code/docs/io/JavaUdpMulticast.java b/akka-docs/rst/java/code/docs/io/JavaUdpMulticast.java index ac7e67136a..c080dea469 100644 --- a/akka-docs/rst/java/code/docs/io/JavaUdpMulticast.java +++ b/akka-docs/rst/java/code/docs/io/JavaUdpMulticast.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.io; diff --git a/akka-docs/rst/java/code/docs/io/JavaUdpMulticastTest.java b/akka-docs/rst/java/code/docs/io/JavaUdpMulticastTest.java index 151d2a494a..d79d8ef2be 100644 --- a/akka-docs/rst/java/code/docs/io/JavaUdpMulticastTest.java +++ b/akka-docs/rst/java/code/docs/io/JavaUdpMulticastTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.io; diff --git a/akka-docs/rst/java/code/docs/io/UdpConnectedDocTest.java b/akka-docs/rst/java/code/docs/io/UdpConnectedDocTest.java index ec7ad56576..b7b16ffba7 100644 --- a/akka-docs/rst/java/code/docs/io/UdpConnectedDocTest.java +++ b/akka-docs/rst/java/code/docs/io/UdpConnectedDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.io; diff --git a/akka-docs/rst/java/code/docs/io/UdpDocTest.java b/akka-docs/rst/java/code/docs/io/UdpDocTest.java index 59ede3ce7a..d994625b07 100644 --- a/akka-docs/rst/java/code/docs/io/UdpDocTest.java +++ b/akka-docs/rst/java/code/docs/io/UdpDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.io; diff --git a/akka-docs/rst/java/code/docs/io/japi/EchoHandler.java b/akka-docs/rst/java/code/docs/io/japi/EchoHandler.java index dcb57e362b..1a9b32e2ac 100644 --- a/akka-docs/rst/java/code/docs/io/japi/EchoHandler.java +++ b/akka-docs/rst/java/code/docs/io/japi/EchoHandler.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.io.japi; diff --git a/akka-docs/rst/java/code/docs/io/japi/EchoManager.java b/akka-docs/rst/java/code/docs/io/japi/EchoManager.java index b3963e7a56..b9b460c6a5 100644 --- a/akka-docs/rst/java/code/docs/io/japi/EchoManager.java +++ b/akka-docs/rst/java/code/docs/io/japi/EchoManager.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.io.japi; diff --git a/akka-docs/rst/java/code/docs/io/japi/EchoServer.java b/akka-docs/rst/java/code/docs/io/japi/EchoServer.java index 0f31a87ffe..83befeeedc 100644 --- a/akka-docs/rst/java/code/docs/io/japi/EchoServer.java +++ b/akka-docs/rst/java/code/docs/io/japi/EchoServer.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.io.japi; diff --git a/akka-docs/rst/java/code/docs/io/japi/IODocTest.java b/akka-docs/rst/java/code/docs/io/japi/IODocTest.java index 7d92cb4477..fceec37de1 100644 --- a/akka-docs/rst/java/code/docs/io/japi/IODocTest.java +++ b/akka-docs/rst/java/code/docs/io/japi/IODocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.io.japi; diff --git a/akka-docs/rst/java/code/docs/io/japi/Message.java b/akka-docs/rst/java/code/docs/io/japi/Message.java index 4199fe096f..d03ec173fe 100644 --- a/akka-docs/rst/java/code/docs/io/japi/Message.java +++ b/akka-docs/rst/java/code/docs/io/japi/Message.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2013-2015 Typesafe Inc. + * Copyright (C) 2013-2016 Typesafe Inc. */ package docs.io.japi; diff --git a/akka-docs/rst/java/code/docs/io/japi/SimpleEchoHandler.java b/akka-docs/rst/java/code/docs/io/japi/SimpleEchoHandler.java index 0833869f53..9512818bed 100644 --- a/akka-docs/rst/java/code/docs/io/japi/SimpleEchoHandler.java +++ b/akka-docs/rst/java/code/docs/io/japi/SimpleEchoHandler.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.io.japi; diff --git a/akka-docs/rst/java/code/docs/jrouting/ConsistentHashingRouterDocTest.java b/akka-docs/rst/java/code/docs/jrouting/ConsistentHashingRouterDocTest.java index 9d072cd437..0673211239 100644 --- a/akka-docs/rst/java/code/docs/jrouting/ConsistentHashingRouterDocTest.java +++ b/akka-docs/rst/java/code/docs/jrouting/ConsistentHashingRouterDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.jrouting; diff --git a/akka-docs/rst/java/code/docs/jrouting/CustomRouterDocTest.java b/akka-docs/rst/java/code/docs/jrouting/CustomRouterDocTest.java index 88fa331649..d346d70ec5 100644 --- a/akka-docs/rst/java/code/docs/jrouting/CustomRouterDocTest.java +++ b/akka-docs/rst/java/code/docs/jrouting/CustomRouterDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.jrouting; diff --git a/akka-docs/rst/java/code/docs/jrouting/RedundancyGroup.java b/akka-docs/rst/java/code/docs/jrouting/RedundancyGroup.java index 9062e95710..7f24d69419 100644 --- a/akka-docs/rst/java/code/docs/jrouting/RedundancyGroup.java +++ b/akka-docs/rst/java/code/docs/jrouting/RedundancyGroup.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.jrouting; diff --git a/akka-docs/rst/java/code/docs/jrouting/RouterDocTest.java b/akka-docs/rst/java/code/docs/jrouting/RouterDocTest.java index 9f92400105..3a4b5a0cba 100644 --- a/akka-docs/rst/java/code/docs/jrouting/RouterDocTest.java +++ b/akka-docs/rst/java/code/docs/jrouting/RouterDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.jrouting; diff --git a/akka-docs/rst/java/code/docs/pattern/BackoffSupervisorDocTest.java b/akka-docs/rst/java/code/docs/pattern/BackoffSupervisorDocTest.java index 7d09ec18b4..9bdfcbc46f 100644 --- a/akka-docs/rst/java/code/docs/pattern/BackoffSupervisorDocTest.java +++ b/akka-docs/rst/java/code/docs/pattern/BackoffSupervisorDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.pattern; diff --git a/akka-docs/rst/java/code/docs/pattern/JavaTemplate.java b/akka-docs/rst/java/code/docs/pattern/JavaTemplate.java index 0c0e46167a..71b876faaf 100644 --- a/akka-docs/rst/java/code/docs/pattern/JavaTemplate.java +++ b/akka-docs/rst/java/code/docs/pattern/JavaTemplate.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.pattern; diff --git a/akka-docs/rst/java/code/docs/pattern/SchedulerPatternTest.java b/akka-docs/rst/java/code/docs/pattern/SchedulerPatternTest.java index 18586a5880..2a20345b08 100644 --- a/akka-docs/rst/java/code/docs/pattern/SchedulerPatternTest.java +++ b/akka-docs/rst/java/code/docs/pattern/SchedulerPatternTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.pattern; diff --git a/akka-docs/rst/java/code/docs/persistence/LambdaPersistenceDocTest.java b/akka-docs/rst/java/code/docs/persistence/LambdaPersistenceDocTest.java index c130db275c..0f1740f84f 100644 --- a/akka-docs/rst/java/code/docs/persistence/LambdaPersistenceDocTest.java +++ b/akka-docs/rst/java/code/docs/persistence/LambdaPersistenceDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.persistence; diff --git a/akka-docs/rst/java/code/docs/persistence/LambdaPersistencePluginDocTest.java b/akka-docs/rst/java/code/docs/persistence/LambdaPersistencePluginDocTest.java index a485947113..5a7a5b8861 100644 --- a/akka-docs/rst/java/code/docs/persistence/LambdaPersistencePluginDocTest.java +++ b/akka-docs/rst/java/code/docs/persistence/LambdaPersistencePluginDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.persistence; diff --git a/akka-docs/rst/java/code/docs/persistence/PersistenceDocTest.java b/akka-docs/rst/java/code/docs/persistence/PersistenceDocTest.java index 506b31ec9c..15eb43e28a 100644 --- a/akka-docs/rst/java/code/docs/persistence/PersistenceDocTest.java +++ b/akka-docs/rst/java/code/docs/persistence/PersistenceDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.persistence; diff --git a/akka-docs/rst/java/code/docs/persistence/PersistenceEventAdapterDocTest.java b/akka-docs/rst/java/code/docs/persistence/PersistenceEventAdapterDocTest.java index 896fb3278b..526ec7af18 100644 --- a/akka-docs/rst/java/code/docs/persistence/PersistenceEventAdapterDocTest.java +++ b/akka-docs/rst/java/code/docs/persistence/PersistenceEventAdapterDocTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.persistence; diff --git a/akka-docs/rst/java/code/docs/persistence/PersistenceMultiDocTest.java b/akka-docs/rst/java/code/docs/persistence/PersistenceMultiDocTest.java index 85c6a7d8ec..c9d09d01d0 100644 --- a/akka-docs/rst/java/code/docs/persistence/PersistenceMultiDocTest.java +++ b/akka-docs/rst/java/code/docs/persistence/PersistenceMultiDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.persistence; diff --git a/akka-docs/rst/java/code/docs/persistence/PersistencePluginDocTest.java b/akka-docs/rst/java/code/docs/persistence/PersistencePluginDocTest.java index fb63e2165b..02b0e45dfe 100644 --- a/akka-docs/rst/java/code/docs/persistence/PersistencePluginDocTest.java +++ b/akka-docs/rst/java/code/docs/persistence/PersistencePluginDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.persistence; diff --git a/akka-docs/rst/java/code/docs/persistence/PersistenceQueryDocTest.java b/akka-docs/rst/java/code/docs/persistence/PersistenceQueryDocTest.java index 3add0a05ce..12130eebdd 100644 --- a/akka-docs/rst/java/code/docs/persistence/PersistenceQueryDocTest.java +++ b/akka-docs/rst/java/code/docs/persistence/PersistenceQueryDocTest.java @@ -1,13 +1,15 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.persistence; -import static akka.pattern.Patterns.ask; +import static akka.pattern.PatternsCS.ask; import java.util.HashSet; import java.util.Set; import java.util.Iterator; + +import akka.NotUsed; import com.typesafe.config.Config; import akka.actor.*; @@ -40,6 +42,7 @@ import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; public class PersistenceQueryDocTest { @@ -113,27 +116,27 @@ public class PersistenceQueryDocTest { } @Override - public Source eventsByTag(String tag, long offset) { + public Source eventsByTag(String tag, long offset) { final Props props = MyEventsByTagPublisher.props(tag, offset, refreshInterval); return Source.actorPublisher(props). - mapMaterializedValue(m -> BoxedUnit.UNIT); + mapMaterializedValue(m -> NotUsed.getInstance()); } @Override - public Source eventsByPersistenceId(String persistenceId, + public Source eventsByPersistenceId(String persistenceId, long fromSequenceNr, long toSequenceNr) { // implement in a similar way as eventsByTag throw new UnsupportedOperationException("Not implemented yet"); } @Override - public Source allPersistenceIds() { + public Source allPersistenceIds() { // implement in a similar way as eventsByTag throw new UnsupportedOperationException("Not implemented yet"); } @Override - public Source currentPersistenceIds() { + public Source currentPersistenceIds() { // implement in a similar way as eventsByTag throw new UnsupportedOperationException("Not implemented yet"); } @@ -166,25 +169,25 @@ public class PersistenceQueryDocTest { } @Override - public akka.stream.scaladsl.Source eventsByTag( + public akka.stream.scaladsl.Source eventsByTag( String tag, long offset) { return javadslReadJournal.eventsByTag(tag, offset).asScala(); } @Override - public akka.stream.scaladsl.Source eventsByPersistenceId( + public akka.stream.scaladsl.Source eventsByPersistenceId( String persistenceId, long fromSequenceNr, long toSequenceNr) { return javadslReadJournal.eventsByPersistenceId(persistenceId, fromSequenceNr, toSequenceNr).asScala(); } @Override - public akka.stream.scaladsl.Source allPersistenceIds() { + public akka.stream.scaladsl.Source allPersistenceIds() { return javadslReadJournal.allPersistenceIds().asScala(); } @Override - public akka.stream.scaladsl.Source currentPersistenceIds() { + public akka.stream.scaladsl.Source currentPersistenceIds() { return javadslReadJournal.currentPersistenceIds().asScala(); } @@ -209,7 +212,7 @@ public class PersistenceQueryDocTest { "akka.persistence.query.my-read-journal"); // issue query to journal - Source source = + Source source = readJournal.eventsByPersistenceId("user-1337", 0, Long.MAX_VALUE); // materialize stream, consuming events @@ -262,7 +265,7 @@ public class PersistenceQueryDocTest { //#events-by-tag // assuming journal is able to work with numeric offsets we can: - final Source blueThings = + final Source blueThings = readJournal.eventsByTag("blue", 0L); // find top 10 blue things: @@ -276,7 +279,7 @@ public class PersistenceQueryDocTest { }, mat); // start another query, from the known offset - Source blue = readJournal.eventsByTag("blue", 10); + Source blue = readJournal.eventsByTag("blue", 10); //#events-by-tag } @@ -341,7 +344,7 @@ public class PersistenceQueryDocTest { //#projection-into-different-store-simple-classes class ExampleStore { - Future save(Object any) { + CompletionStage save(Object any) { // ... //#projection-into-different-store-simple-classes return null; @@ -377,13 +380,13 @@ public class PersistenceQueryDocTest { this.name = name; } - public Future saveProgress(long offset) { + public CompletionStage saveProgress(long offset) { // ... //#projection-into-different-store return null; //#projection-into-different-store } - public Future latestOffset() { + public CompletionStage latestOffset() { // ... //#projection-into-different-store return null; @@ -410,17 +413,13 @@ public class PersistenceQueryDocTest { final Props writerProps = Props.create(TheOneWhoWritesToQueryJournal.class, "bid"); final ActorRef writer = system.actorOf(writerProps, "bid-projection-writer"); - long startFromOffset = Await.result(bidProjection.latestOffset(), timeout.duration()); + long startFromOffset = bidProjection.latestOffset().toCompletableFuture().get(3, TimeUnit.SECONDS); readJournal .eventsByTag("bid", startFromOffset) - .mapAsync(8, envelope -> { - final Future f = ask(writer, envelope.event(), timeout); - return f.map(new Mapper() { - @Override public Long apply(Object in) { - return envelope.offset(); - } - }, system.dispatcher()); + .mapAsync(8, envelope -> { + final CompletionStage f = ask(writer, envelope.event(), timeout); + return f.thenApplyAsync(in -> envelope.offset(), system.dispatcher()); }) .mapAsync(1, offset -> bidProjection.saveProgress(offset)) .runWith(Sink.ignore(), mat); diff --git a/akka-docs/rst/java/code/docs/persistence/PersistenceSchemaEvolutionDocTest.java b/akka-docs/rst/java/code/docs/persistence/PersistenceSchemaEvolutionDocTest.java index 16754c6ca9..32eae902fb 100644 --- a/akka-docs/rst/java/code/docs/persistence/PersistenceSchemaEvolutionDocTest.java +++ b/akka-docs/rst/java/code/docs/persistence/PersistenceSchemaEvolutionDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.persistence; diff --git a/akka-docs/rst/java/code/docs/persistence/query/LeveldbPersistenceQueryDocTest.java b/akka-docs/rst/java/code/docs/persistence/query/LeveldbPersistenceQueryDocTest.java index 5b7d14b5c2..94685bb3ff 100644 --- a/akka-docs/rst/java/code/docs/persistence/query/LeveldbPersistenceQueryDocTest.java +++ b/akka-docs/rst/java/code/docs/persistence/query/LeveldbPersistenceQueryDocTest.java @@ -1,13 +1,13 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.persistence.query; import java.util.HashSet; import java.util.Set; -import scala.runtime.BoxedUnit; +import akka.NotUsed; import akka.actor.ActorSystem; import akka.persistence.journal.WriteEventAdapter; import akka.persistence.journal.Tagged; @@ -37,7 +37,7 @@ public class LeveldbPersistenceQueryDocTest { PersistenceQuery.get(system).getReadJournalFor(LeveldbReadJournal.class, LeveldbReadJournal.Identifier()); - Source source = + Source source = queries.eventsByPersistenceId("some-persistence-id", 0, Long.MAX_VALUE); //#EventsByPersistenceId } @@ -48,7 +48,7 @@ public class LeveldbPersistenceQueryDocTest { PersistenceQuery.get(system).getReadJournalFor(LeveldbReadJournal.class, LeveldbReadJournal.Identifier()); - Source source = queries.allPersistenceIds(); + Source source = queries.allPersistenceIds(); //#AllPersistenceIds } @@ -58,7 +58,7 @@ public class LeveldbPersistenceQueryDocTest { PersistenceQuery.get(system).getReadJournalFor(LeveldbReadJournal.class, LeveldbReadJournal.Identifier()); - Source source = + Source source = queries.eventsByTag("green", 0); //#EventsByTag } diff --git a/akka-docs/rst/java/code/docs/persistence/query/MyEventsByTagJavaPublisher.java b/akka-docs/rst/java/code/docs/persistence/query/MyEventsByTagJavaPublisher.java index f46e92fd5b..b6cc838997 100644 --- a/akka-docs/rst/java/code/docs/persistence/query/MyEventsByTagJavaPublisher.java +++ b/akka-docs/rst/java/code/docs/persistence/query/MyEventsByTagJavaPublisher.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.persistence.query; diff --git a/akka-docs/rst/java/code/docs/remoting/RemoteDeploymentDocTest.java b/akka-docs/rst/java/code/docs/remoting/RemoteDeploymentDocTest.java index b7a8ae40b1..dec2354a99 100644 --- a/akka-docs/rst/java/code/docs/remoting/RemoteDeploymentDocTest.java +++ b/akka-docs/rst/java/code/docs/remoting/RemoteDeploymentDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.remoting; diff --git a/akka-docs/rst/java/code/docs/serialization/SerializationDocTest.java b/akka-docs/rst/java/code/docs/serialization/SerializationDocTest.java index fbe664d973..98f40f9562 100644 --- a/akka-docs/rst/java/code/docs/serialization/SerializationDocTest.java +++ b/akka-docs/rst/java/code/docs/serialization/SerializationDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.serialization; diff --git a/akka-docs/rst/java/code/docs/stream/ActorPublisherDocTest.java b/akka-docs/rst/java/code/docs/stream/ActorPublisherDocTest.java new file mode 100644 index 0000000000..45031232b4 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/ActorPublisherDocTest.java @@ -0,0 +1,150 @@ +/* + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package docs.stream; + +import akka.actor.ActorRef; +import akka.actor.ActorSystem; +import akka.actor.Props; +import akka.japi.pf.ReceiveBuilder; +import akka.stream.ActorMaterializer; +import akka.stream.Materializer; +import akka.stream.actor.AbstractActorPublisher; +import akka.stream.actor.ActorPublisherMessage; +import akka.stream.javadsl.Sink; +import akka.stream.javadsl.Source; +import akka.testkit.JavaTestKit; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.List; + +public class ActorPublisherDocTest { + + static ActorSystem system; + + + @BeforeClass + public static void setup() { + system = ActorSystem.create("ActorPublisherDocTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + //#job-manager + public static class JobManagerProtocol { + final public static class Job { + public final String payload; + + public Job(String payload) { + this.payload = payload; + } + + } + + public static class JobAcceptedMessage { + @Override + public String toString() { + return "JobAccepted"; + } + } + public static final JobAcceptedMessage JobAccepted = new JobAcceptedMessage(); + + public static class JobDeniedMessage { + @Override + public String toString() { + return "JobDenied"; + } + } + public static final JobDeniedMessage JobDenied = new JobDeniedMessage(); + } + public static class JobManager extends AbstractActorPublisher { + + public static Props props() { return Props.create(JobManager.class); } + + private final int MAX_BUFFER_SIZE = 100; + private final List buf = new ArrayList<>(); + + public JobManager() { + receive(ReceiveBuilder. + match(JobManagerProtocol.Job.class, job -> buf.size() == MAX_BUFFER_SIZE, job -> { + sender().tell(JobManagerProtocol.JobDenied, self()); + }). + match(JobManagerProtocol.Job.class, job -> { + sender().tell(JobManagerProtocol.JobAccepted, self()); + + if (buf.isEmpty() && totalDemand() > 0) + onNext(job); + else { + buf.add(job); + deliverBuf(); + } + }). + match(ActorPublisherMessage.Request.class, request -> deliverBuf()). + match(ActorPublisherMessage.Cancel.class, cancel -> context().stop(self())). + build()); + } + + void deliverBuf() { + while (totalDemand() > 0) { + /* + * totalDemand is a Long and could be larger than + * what buf.splitAt can accept + */ + if (totalDemand() <= Integer.MAX_VALUE) { + final List took = + buf.subList(0, Math.min(buf.size(), (int) totalDemand())); + took.forEach(this::onNext); + buf.removeAll(took); + break; + } else { + final List took = + buf.subList(0, Math.min(buf.size(), Integer.MAX_VALUE)); + took.forEach(this::onNext); + buf.removeAll(took); + } + } + } + } + //#job-manager + + @Test + public void demonstrateActorPublisherUsage() { + new JavaTestKit(system) { + private final SilenceSystemOut.System System = SilenceSystemOut.get(getTestActor()); + + { + //#actor-publisher-usage + final Source jobManagerSource = + Source.actorPublisher(JobManager.props()); + + final ActorRef ref = jobManagerSource + .map(job -> job.payload.toUpperCase()) + .map(elem -> { + System.out.println(elem); + return elem; + }) + .to(Sink.ignore()) + .run(mat); + + ref.tell(new JobManagerProtocol.Job("a"), ActorRef.noSender()); + ref.tell(new JobManagerProtocol.Job("b"), ActorRef.noSender()); + ref.tell(new JobManagerProtocol.Job("c"), ActorRef.noSender()); + //#actor-publisher-usage + + expectMsgEquals("A"); + expectMsgEquals("B"); + expectMsgEquals("C"); + } + }; + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/ActorSubscriberDocTest.java b/akka-docs/rst/java/code/docs/stream/ActorSubscriberDocTest.java new file mode 100644 index 0000000000..e8751b1538 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/ActorSubscriberDocTest.java @@ -0,0 +1,233 @@ +/* + * Copyright (C) 2015-2016 Typesafe Inc. + */ + +package docs.stream; + +import akka.actor.AbstractActor; +import akka.actor.ActorRef; +import akka.actor.ActorSystem; +import akka.actor.Props; +import akka.japi.pf.ReceiveBuilder; +import akka.routing.ActorRefRoutee; +import akka.routing.RoundRobinRoutingLogic; +import akka.routing.Routee; +import akka.routing.Router; +import akka.stream.ActorMaterializer; +import akka.stream.Materializer; +import akka.stream.actor.AbstractActorSubscriber; +import akka.stream.actor.ActorSubscriberMessage; +import akka.stream.actor.MaxInFlightRequestStrategy; +import akka.stream.actor.RequestStrategy; +import akka.stream.javadsl.Sink; +import akka.stream.javadsl.Source; +import akka.testkit.JavaTestKit; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.util.*; + +import static org.junit.Assert.assertEquals; + +public class ActorSubscriberDocTest { + + static ActorSystem system; + + + @BeforeClass + public static void setup() { + system = ActorSystem.create("ActorSubscriberDocTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + //#worker-pool + public static class WorkerPoolProtocol { + + public static class Msg { + public final int id; + public final ActorRef replyTo; + + public Msg(int id, ActorRef replyTo) { + this.id = id; + this.replyTo = replyTo; + } + + @Override + public String toString() { + return String.format("Msg(%s, %s)", id, replyTo); + } + } + public static Msg msg(int id, ActorRef replyTo) { + return new Msg(id, replyTo); + } + + + public static class Work { + public final int id; + public Work(int id) { this.id = id; } + + @Override + public String toString() { + return String.format("Work(%s)", id); + } + } + public static Work work(int id) { + return new Work(id); + } + + + public static class Reply { + public final int id; + public Reply(int id) { this.id = id; } + + @Override + public String toString() { + return String.format("Reply(%s)", id); + } + } + public static Reply reply(int id) { + return new Reply(id); + } + + + public static class Done { + public final int id; + public Done(int id) { this.id = id; } + + @Override + public String toString() { + return String.format("Done(%s)", id); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + Done done = (Done) o; + + if (id != done.id) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + return id; + } + } + public static Done done(int id) { + return new Done(id); + } + + } + + public static class WorkerPool extends AbstractActorSubscriber { + + public static Props props() { return Props.create(WorkerPool.class); } + + final int MAX_QUEUE_SIZE = 10; + final Map queue = new HashMap<>(); + + final Router router; + + @Override + public RequestStrategy requestStrategy() { + return new MaxInFlightRequestStrategy(MAX_QUEUE_SIZE) { + @Override + public int inFlightInternally() { + return queue.size(); + } + }; + } + + public WorkerPool() { + final List routees = new ArrayList<>(); + for (int i = 0; i < 3; i++) + routees.add(new ActorRefRoutee(context().actorOf(Props.create(Worker.class)))); + router = new Router(new RoundRobinRoutingLogic(), routees); + + receive(ReceiveBuilder. + match(ActorSubscriberMessage.OnNext.class, on -> on.element() instanceof WorkerPoolProtocol.Msg, + onNext -> { + WorkerPoolProtocol.Msg msg = (WorkerPoolProtocol.Msg) onNext.element(); + queue.put(msg.id, msg.replyTo); + + if (queue.size() > MAX_QUEUE_SIZE) + throw new RuntimeException("queued too many: " + queue.size()); + + router.route(WorkerPoolProtocol.work(msg.id), self()); + }). + match(WorkerPoolProtocol.Reply.class, reply -> { + int id = reply.id; + queue.get(id).tell(WorkerPoolProtocol.done(id), self()); + queue.remove(id); + }). + build()); + } + } + + static class Worker extends AbstractActor { + public Worker() { + receive(ReceiveBuilder. + match(WorkerPoolProtocol.Work.class, work -> { + // ... + sender().tell(WorkerPoolProtocol.reply(work.id), self()); + }).build()); + } + } + //#worker-pool + + @Test + public void demonstrateActorPublisherUsage() { + new JavaTestKit(system) { + + { + final ActorRef replyTo = getTestActor(); + + //#actor-subscriber-usage + final int N = 117; + final List data = new ArrayList<>(N); + for (int i = 0; i < N; i++) { + data.add(i); + } + + Source.from(data) + .map(i -> WorkerPoolProtocol.msg(i, replyTo)) + .runWith(Sink.actorSubscriber(WorkerPool.props()), mat); + //#actor-subscriber-usage + + List got = Arrays.asList(receiveN(N)); + Collections.sort(got, new Comparator() { + @Override + public int compare(Object o1, Object o2) { + if (o1 instanceof WorkerPoolProtocol.Done && o2 instanceof WorkerPoolProtocol.Done) { + return ((WorkerPoolProtocol.Done) o1).id - ((WorkerPoolProtocol.Done) o2).id; + } else return 0; + } + }); + int i = 0; + for (; i < N; i++) { + assertEquals(String.format("Expected %d, but got %s", i, got.get(i)), WorkerPoolProtocol.done(i), got.get(i)); + } + assertEquals(String.format("Expected 117 messages but got %d", i), i, 117); + } + }; + } + + +} diff --git a/akka-docs/rst/java/code/docs/stream/BidiFlowDocTest.java b/akka-docs/rst/java/code/docs/stream/BidiFlowDocTest.java new file mode 100644 index 0000000000..ca58f66d2f --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/BidiFlowDocTest.java @@ -0,0 +1,236 @@ +/** + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package docs.stream; + +import java.nio.ByteOrder; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; + +import akka.NotUsed; +import akka.stream.javadsl.GraphDSL; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import akka.actor.ActorSystem; +import akka.japi.pf.PFBuilder; +import akka.stream.*; +import akka.stream.javadsl.*; +import akka.stream.stage.*; +import akka.testkit.JavaTestKit; +import akka.util.ByteIterator; +import akka.util.ByteString; +import akka.util.ByteStringBuilder; +import scala.concurrent.Await; +import scala.concurrent.Future; +import scala.concurrent.duration.Duration; +import scala.concurrent.duration.FiniteDuration; +import static org.junit.Assert.assertArrayEquals; + +public class BidiFlowDocTest { + + private static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("FlowDocTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + //#codec + static interface Message {} + static class Ping implements Message { + final int id; + public Ping(int id) { this.id = id; } + @Override + public boolean equals(Object o) { + if (o instanceof Ping) { + return ((Ping) o).id == id; + } else return false; + } + @Override + public int hashCode() { + return id; + } + } + static class Pong implements Message { + final int id; + public Pong(int id) { this.id = id; } + @Override + public boolean equals(Object o) { + if (o instanceof Pong) { + return ((Pong) o).id == id; + } else return false; + } + @Override + public int hashCode() { + return id; + } + } + + //#codec-impl + public static ByteString toBytes(Message msg) { + //#implementation-details-elided + if (msg instanceof Ping) { + final int id = ((Ping) msg).id; + return new ByteStringBuilder().putByte((byte) 1) + .putInt(id, ByteOrder.LITTLE_ENDIAN).result(); + } else { + final int id = ((Pong) msg).id; + return new ByteStringBuilder().putByte((byte) 2) + .putInt(id, ByteOrder.LITTLE_ENDIAN).result(); + } + //#implementation-details-elided + } + + public static Message fromBytes(ByteString bytes) { + //#implementation-details-elided + final ByteIterator it = bytes.iterator(); + switch(it.getByte()) { + case 1: + return new Ping(it.getInt(ByteOrder.LITTLE_ENDIAN)); + case 2: + return new Pong(it.getInt(ByteOrder.LITTLE_ENDIAN)); + default: + throw new RuntimeException("message format error"); + } + //#implementation-details-elided + } + //#codec-impl + + //#codec + @SuppressWarnings("unused") + //#codec + public final BidiFlow codecVerbose = + BidiFlow.fromGraph(GraphDSL.create(b -> { + final FlowShape top = + b.add(Flow.of(Message.class).map(BidiFlowDocTest::toBytes)); + final FlowShape bottom = + b.add(Flow.of(ByteString.class).map(BidiFlowDocTest::fromBytes)); + return BidiShape.fromFlows(top, bottom); + })); + + public final BidiFlow codec = + BidiFlow.fromFunctions(BidiFlowDocTest::toBytes, BidiFlowDocTest::fromBytes); + //#codec + + //#framing + public static ByteString addLengthHeader(ByteString bytes) { + final int len = bytes.size(); + return new ByteStringBuilder() + .putInt(len, ByteOrder.LITTLE_ENDIAN) + .append(bytes) + .result(); + } + + public static class FrameParser extends PushPullStage { + // this holds the received but not yet parsed bytes + private ByteString stash = ByteString.empty(); + // this holds the current message length or -1 if at a boundary + private int needed = -1; + + @Override + public SyncDirective onPull(Context ctx) { + return run(ctx); + } + + @Override + public SyncDirective onPush(ByteString bytes, Context ctx) { + stash = stash.concat(bytes); + return run(ctx); + } + + @Override + public TerminationDirective onUpstreamFinish(Context ctx) { + if (stash.isEmpty()) return ctx.finish(); + else return ctx.absorbTermination(); // we still have bytes to emit + } + + private SyncDirective run(Context ctx) { + if (needed == -1) { + // are we at a boundary? then figure out next length + if (stash.size() < 4) return pullOrFinish(ctx); + else { + needed = stash.iterator().getInt(ByteOrder.LITTLE_ENDIAN); + stash = stash.drop(4); + return run(ctx); // cycle back to possibly already emit the next chunk + } + } else if (stash.size() < needed) { + // we are in the middle of a message, need more bytes + return pullOrFinish(ctx); + } else { + // we have enough to emit at least one message, so do it + final ByteString emit = stash.take(needed); + stash = stash.drop(needed); + needed = -1; + return ctx.push(emit); + } + } + + /* + * After having called absorbTermination() we cannot pull any more, so if we need + * more data we will just have to give up. + */ + private SyncDirective pullOrFinish(Context ctx) { + if (ctx.isFinishing()) return ctx.finish(); + else return ctx.pull(); + } + } + + public final BidiFlow framing = + BidiFlow.fromGraph(GraphDSL.create(b -> { + final FlowShape top = + b.add(Flow.of(ByteString.class).map(BidiFlowDocTest::addLengthHeader)); + final FlowShape bottom = + b.add(Flow.of(ByteString.class).transform(() -> new FrameParser())); + return BidiShape.fromFlows(top, bottom); + })); + //#framing + + @Test + public void mustCompose() throws Exception { + //#compose + /* construct protocol stack + * +------------------------------------+ + * | stack | + * | | + * | +-------+ +---------+ | + * ~> O~~o | ~> | o~~O ~> + * Message | | codec | ByteString | framing | | ByteString + * <~ O~~o | <~ | o~~O <~ + * | +-------+ +---------+ | + * +------------------------------------+ + */ + final BidiFlow stack = + codec.atop(framing); + + // test it by plugging it into its own inverse and closing the right end + final Flow pingpong = + Flow.of(Message.class).collect(new PFBuilder() + .match(Ping.class, p -> new Pong(p.id)) + .build() + ); + final Flow flow = + stack.atop(stack.reversed()).join(pingpong); + final CompletionStage> result = Source + .from(Arrays.asList(0, 1, 2)) + . map(id -> new Ping(id)) + .via(flow) + .grouped(10) + .runWith(Sink.> head(), mat); + assertArrayEquals( + new Message[] { new Pong(0), new Pong(1), new Pong(2) }, + result.toCompletableFuture().get(1, TimeUnit.SECONDS).toArray(new Message[0])); + //#compose + } +} diff --git a/akka-docs/rst/java/code/docs/stream/CompositionDocTest.java b/akka-docs/rst/java/code/docs/stream/CompositionDocTest.java new file mode 100644 index 0000000000..60f64c4ad7 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/CompositionDocTest.java @@ -0,0 +1,308 @@ +/** + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package docs.stream; + +import java.util.Arrays; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; + +import akka.NotUsed; +import akka.stream.ClosedShape; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import akka.actor.ActorSystem; +import akka.dispatch.Mapper; +import akka.japi.Pair; +import akka.stream.*; +import akka.stream.javadsl.*; +import akka.stream.javadsl.Tcp.OutgoingConnection; +import akka.testkit.JavaTestKit; +import akka.util.ByteString; +import scala.concurrent.*; + +import scala.Option; + +public class CompositionDocTest { + + private static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("FlowDocTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + @Test + public void nonNestedFlow() throws Exception { + //#non-nested-flow + Source.single(0) + .map(i -> i + 1) + .filter(i -> i != 0) + .map(i -> i - 2) + .to(Sink.fold(0, (acc, i) -> acc + i)); + + // ... where is the nesting? + //#non-nested-flow + } + + @Test + public void nestedFlow() throws Exception { + //#nested-flow + final Source nestedSource = + Source.single(0) // An atomic source + .map(i -> i + 1) // an atomic processing stage + .named("nestedSource"); // wraps up the current Source and gives it a name + + final Flow nestedFlow = + Flow.of(Integer.class).filter(i -> i != 0) // an atomic processing stage + .map(i -> i - 2) // another atomic processing stage + .named("nestedFlow"); // wraps up the Flow, and gives it a name + + final Sink nestedSink = + nestedFlow.to(Sink.fold(0, (acc, i) -> acc + i)) // wire an atomic sink to the nestedFlow + .named("nestedSink"); // wrap it up + + // Create a RunnableGraph + final RunnableGraph runnableGraph = nestedSource.to(nestedSink); + //#nested-flow + } + + @Test + public void reusingComponents() throws Exception { + final Source nestedSource = + Source.single(0) // An atomic source + .map(i -> i + 1) // an atomic processing stage + .named("nestedSource"); // wraps up the current Source and gives it a name + + final Flow nestedFlow = + Flow.of(Integer.class).filter(i -> i != 0) // an atomic processing stage + .map(i -> i - 2) // another atomic processing stage + .named("nestedFlow"); // wraps up the Flow, and gives it a name + + final Sink nestedSink = + nestedFlow.to(Sink.fold(0, (acc, i) -> acc + i)) // wire an atomic sink to the nestedFlow + .named("nestedSink"); // wrap it up + + //#reuse + // Create a RunnableGraph from our components + final RunnableGraph runnableGraph = nestedSource.to(nestedSink); + + // Usage is uniform, no matter if modules are composite or atomic + final RunnableGraph runnableGraph2 = + Source.single(0).to(Sink.fold(0, (acc, i) -> acc + i)); + //#reuse + } + + @Test + public void complexGraph() throws Exception { + //#complex-graph + RunnableGraph.fromGraph( + GraphDSL.create(builder -> { + final Outlet A = builder.add(Source.single(0)).out(); + final UniformFanOutShape B = builder.add(Broadcast.create(2)); + final UniformFanInShape C = builder.add(Merge.create(2)); + final FlowShape D = + builder.add(Flow.of(Integer.class).map(i -> i + 1)); + final UniformFanOutShape E = builder.add(Balance.create(2)); + final UniformFanInShape F = builder.add(Merge.create(2)); + final Inlet G = builder.add(Sink. foreach(System.out::println)).in(); + + builder.from(F).toFanIn(C); + builder.from(A).viaFanOut(B).viaFanIn(C).toFanIn(F); + builder.from(B).via(D).viaFanOut(E).toFanIn(F); + builder.from(E).toInlet(G); + return ClosedShape.getInstance(); + })); + //#complex-graph + + //#complex-graph-alt + RunnableGraph.fromGraph( + GraphDSL.create(builder -> { + final SourceShape A = builder.add(Source.single(0)); + final UniformFanOutShape B = builder.add(Broadcast.create(2)); + final UniformFanInShape C = builder.add(Merge.create(2)); + final FlowShape D = + builder.add(Flow.of(Integer.class).map(i -> i + 1)); + final UniformFanOutShape E = builder.add(Balance.create(2)); + final UniformFanInShape F = builder.add(Merge.create(2)); + final SinkShape G = builder.add(Sink.foreach(System.out::println)); + + builder.from(F.out()).toInlet(C.in(0)); + builder.from(A).toInlet(B.in()); + builder.from(B.out(0)).toInlet(C.in(1)); + builder.from(C.out()).toInlet(F.in(0)); + builder.from(B.out(1)).via(D).toInlet(E.in()); + builder.from(E.out(0)).toInlet(F.in(1)); + builder.from(E.out(1)).to(G); + return ClosedShape.getInstance(); + })); + //#complex-graph-alt + } + + @Test + public void partialGraph() throws Exception { + //#partial-graph + final Graph, NotUsed> partial = + GraphDSL.create(builder -> { + final UniformFanOutShape B = builder.add(Broadcast.create(2)); + final UniformFanInShape C = builder.add(Merge.create(2)); + final UniformFanOutShape E = builder.add(Balance.create(2)); + final UniformFanInShape F = builder.add(Merge.create(2)); + + builder.from(F.out()).toInlet(C.in(0)); + builder.from(B).viaFanIn(C).toFanIn(F); + builder.from(B).via(builder.add(Flow.of(Integer.class).map(i -> i + 1))).viaFanOut(E).toFanIn(F); + + return new FlowShape(B.in(), E.out(1)); + }); + + //#partial-graph + + //#partial-use + Source.single(0).via(partial).to(Sink.ignore()); + //#partial-use + + //#partial-flow-dsl + // Convert the partial graph of FlowShape to a Flow to get + // access to the fluid DSL (for example to be able to call .filter()) + final Flow flow = Flow.fromGraph(partial); + + // Simple way to create a graph backed Source + final Source source = Source.fromGraph( + GraphDSL.create(builder -> { + final UniformFanInShape merge = builder.add(Merge.create(2)); + builder.from(builder.add(Source.single(0))).toFanIn(merge); + builder.from(builder.add(Source.from(Arrays.asList(2, 3, 4)))).toFanIn(merge); + // Exposing exactly one output port + return new SourceShape(merge.out()); + }) + ); + + // Building a Sink with a nested Flow, using the fluid DSL + final Sink sink = Flow.of(Integer.class) + .map(i -> i * 2) + .drop(10) + .named("nestedFlow") + .to(Sink.head()); + + // Putting all together + final RunnableGraph closed = source.via(flow.filter(i -> i > 1)).to(sink); + //#partial-flow-dsl + } + + @Test + public void closedGraph() throws Exception { + //#embed-closed + final RunnableGraph closed1 = + Source.single(0).to(Sink.foreach(System.out::println)); + final RunnableGraph closed2 = + RunnableGraph.fromGraph( + GraphDSL.create(builder -> { + final ClosedShape embeddedClosed = builder.add(closed1); + return embeddedClosed; // Could return ClosedShape.getInstance() + })); + //#embed-closed + } + + //#mat-combine-4a + static class MyClass { + private CompletableFuture> p; + private OutgoingConnection conn; + + public MyClass(CompletableFuture> p, OutgoingConnection conn) { + this.p = p; + this.conn = conn; + } + + public void close() { + p.complete(Optional.empty()); + } + } + + static class Combiner { + static CompletionStage f(CompletableFuture> p, + Pair, CompletionStage> rest) { + return rest.first().thenApply(c -> new MyClass(p, c)); + } + } + //#mat-combine-4a + + @Test + public void materializedValues() throws Exception { + //#mat-combine-1 + // Materializes to Promise (red) + final Source>> source = Source.maybe(); + + // Materializes to BoxedUnit (black) + final Flow flow1 = Flow.of(Integer.class).take(100); + + // Materializes to Promise> (red) + final Source>> nestedSource = + source.viaMat(flow1, Keep.left()).named("nestedSource"); + //#mat-combine-1 + + //#mat-combine-2 + // Materializes to BoxedUnit (orange) + final Flow flow2 = Flow.of(Integer.class) + .map(i -> ByteString.fromString(i.toString())); + + // Materializes to Future (yellow) + final Flow> flow3 = + Tcp.get(system).outgoingConnection("localhost", 8080); + + // Materializes to Future (yellow) + final Flow> nestedFlow = + flow2.viaMat(flow3, Keep.right()).named("nestedFlow"); + //#mat-combine-2 + + //#mat-combine-3 + // Materializes to Future (green) + final Sink> sink = + Sink. fold("", (acc, i) -> acc + i.utf8String()); + + // Materializes to Pair, Future> (blue) + final Sink, CompletionStage>> nestedSink = + nestedFlow.toMat(sink, Keep.both()); + //#mat-combine-3 + + //#mat-combine-4b + // Materializes to Future (purple) + final RunnableGraph> runnableGraph = + nestedSource.toMat(nestedSink, Combiner::f); + //#mat-combine-4b + } + + @Test + public void attributes() throws Exception { + //#attributes-inheritance + final Source nestedSource = + Source.single(0) + .map(i -> i + 1) + .named("nestedSource"); // Wrap, no inputBuffer set + + final Flow nestedFlow = + Flow.of(Integer.class).filter(i -> i != 0) + .via(Flow.of(Integer.class) + .map(i -> i - 2) + .withAttributes(Attributes.inputBuffer(4, 4))) // override + .named("nestedFlow"); // Wrap, no inputBuffer set + + final Sink nestedSink = + nestedFlow.to(Sink.fold(0, (acc, i) -> acc + i)) // wire an atomic sink to the nestedFlow + .withAttributes(Attributes.name("nestedSink") + .and(Attributes.inputBuffer(3, 3))); // override + //#attributes-inheritance + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/FlowDocTest.java b/akka-docs/rst/java/code/docs/stream/FlowDocTest.java new file mode 100644 index 0000000000..3be2f2bd21 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/FlowDocTest.java @@ -0,0 +1,297 @@ +/** + * Copyright (C) 2015-2016 Typesafe + */ +package docs.stream; + +import static org.junit.Assert.assertEquals; + +import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; +import java.util.stream.Stream; + +import akka.NotUsed; +import akka.japi.Pair; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import scala.concurrent.Await; +import scala.concurrent.Future; +import scala.concurrent.Promise; +import scala.concurrent.duration.Duration; +import scala.concurrent.duration.FiniteDuration; +import scala.Option; +import akka.actor.ActorSystem; +import akka.actor.Cancellable; +import akka.dispatch.Futures; +import akka.stream.*; +import akka.stream.javadsl.*; +import akka.testkit.JavaTestKit; + +public class FlowDocTest { + + private static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("FlowDocTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + @Test + public void sourceIsImmutable() throws Exception { + //#source-immutable + final Source source = + Source.from(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)); + source.map(x -> 0); // has no effect on source, since it's immutable + source.runWith(Sink.fold(0, (agg, next) -> agg + next), mat); // 55 + + // returns new Source, with `map()` appended + final Source zeroes = source.map(x -> 0); + final Sink> fold = + Sink. fold(0, (agg, next) -> agg + next); + zeroes.runWith(fold, mat); // 0 + //#source-immutable + + int result = zeroes.runWith(fold, mat).toCompletableFuture().get(3, TimeUnit.SECONDS); + assertEquals(0, result); + } + + @Test + public void materializationInSteps() throws Exception { + //#materialization-in-steps + final Source source = + Source.from(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)); + // note that the Future is scala.concurrent.Future + final Sink> sink = + Sink. fold(0, (aggr, next) -> aggr + next); + + // connect the Source to the Sink, obtaining a RunnableFlow + final RunnableGraph> runnable = + source.toMat(sink, Keep.right()); + + // materialize the flow + final CompletionStage sum = runnable.run(mat); + //#materialization-in-steps + + int result = sum.toCompletableFuture().get(3, TimeUnit.SECONDS); + assertEquals(55, result); + } + + @Test + public void materializationRunWith() throws Exception { + //#materialization-runWith + final Source source = + Source.from(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)); + final Sink> sink = + Sink. fold(0, (aggr, next) -> aggr + next); + + // materialize the flow, getting the Sinks materialized value + final CompletionStage sum = source.runWith(sink, mat); + //#materialization-runWith + + int result = sum.toCompletableFuture().get(3, TimeUnit.SECONDS); + assertEquals(55, result); + } + + @Test + public void materializedMapUnique() throws Exception { + //#stream-reuse + // connect the Source to the Sink, obtaining a RunnableGraph + final Sink> sink = + Sink. fold(0, (aggr, next) -> aggr + next); + final RunnableGraph> runnable = + Source.from(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)).toMat(sink, Keep.right()); + + // get the materialized value of the FoldSink + final CompletionStage sum1 = runnable.run(mat); + final CompletionStage sum2 = runnable.run(mat); + + // sum1 and sum2 are different Futures! + //#stream-reuse + + int result1 = sum1.toCompletableFuture().get(3, TimeUnit.SECONDS); + assertEquals(55, result1); + int result2 = sum2.toCompletableFuture().get(3, TimeUnit.SECONDS); + assertEquals(55, result2); + } + + @Test + @SuppressWarnings("unused") + public void compoundSourceCannotBeUsedAsKey() throws Exception { + //#compound-source-is-not-keyed-runWith + final Object tick = new Object(); + + final FiniteDuration oneSecond = Duration.create(1, TimeUnit.SECONDS); + //akka.actor.Cancellable + final Source timer = + Source.tick(oneSecond, oneSecond, tick); + + Sink.ignore().runWith(timer, mat); + + final Source timerMap = timer.map(t -> "tick"); + // WRONG: returned type is not the timers Cancellable! + // Cancellable timerCancellable = Sink.ignore().runWith(timerMap, mat); + //#compound-source-is-not-keyed-runWith + + //#compound-source-is-not-keyed-run + // retain the materialized map, in order to retrieve the timer's Cancellable + final Cancellable timerCancellable = timer.to(Sink.ignore()).run(mat); + timerCancellable.cancel(); + //#compound-source-is-not-keyed-run + } + + @Test + public void creatingSourcesSinks() throws Exception { + //#source-sink + // Create a source from an Iterable + List list = new LinkedList(); + list.add(1); + list.add(2); + list.add(3); + Source.from(list); + + // Create a source form a Future + Source.fromFuture(Futures.successful("Hello Streams!")); + + // Create a source from a single element + Source.single("only one element"); + + // an empty source + Source.empty(); + + // Sink that folds over the stream and returns a Future + // of the final result in the MaterializedMap + Sink.fold(0, (Integer aggr, Integer next) -> aggr + next); + + // Sink that returns a Future in the MaterializedMap, + // containing the first element of the stream + Sink.head(); + + // A Sink that consumes a stream without doing anything with the elements + Sink.ignore(); + + // A Sink that executes a side-effecting call for every element of the stream + Sink.foreach(System.out::println); + //#source-sink + } + + @Test + public void variousWaysOfConnecting() throws Exception { + //#flow-connecting + // Explicitly creating and wiring up a Source, Sink and Flow + Source.from(Arrays.asList(1, 2, 3, 4)) + .via(Flow.of(Integer.class).map(elem -> elem * 2)) + .to(Sink.foreach(System.out::println)); + + // Starting from a Source + final Source source = Source.from(Arrays.asList(1, 2, 3, 4)) + .map(elem -> elem * 2); + source.to(Sink.foreach(System.out::println)); + + // Starting from a Sink + final Sink sink = Flow.of(Integer.class) + .map(elem -> elem * 2).to(Sink.foreach(System.out::println)); + Source.from(Arrays.asList(1, 2, 3, 4)).to(sink); + //#flow-connecting + } + + @Test + public void transformingMaterialized() throws Exception { + + FiniteDuration oneSecond = FiniteDuration.apply(1, TimeUnit.SECONDS); + Flow throttler = + Flow.fromGraph(GraphDSL.create( + Source.tick(oneSecond, oneSecond, ""), + (b, tickSource) -> { + FanInShape2 zip = b.add(ZipWith.create(Keep.right())); + b.from(tickSource).toInlet(zip.in0()); + return FlowShape.of(zip.in1(), zip.out()); + })); + + //#flow-mat-combine + + // An empty source that can be shut down explicitly from the outside + Source>> source = Source.maybe(); + + // A flow that internally throttles elements to 1/second, and returns a Cancellable + // which can be used to shut down the stream + Flow flow = throttler; + + // A sink that returns the first element of a stream in the returned Future + Sink> sink = Sink.head(); + + + // By default, the materialized value of the leftmost stage is preserved + RunnableGraph>> r1 = source.via(flow).to(sink); + + // Simple selection of materialized values by using Keep.right + RunnableGraph r2 = source.viaMat(flow, Keep.right()).to(sink); + RunnableGraph> r3 = source.via(flow).toMat(sink, Keep.right()); + + // Using runWith will always give the materialized values of the stages added + // by runWith() itself + CompletionStage r4 = source.via(flow).runWith(sink, mat); + CompletableFuture> r5 = flow.to(sink).runWith(source, mat); + Pair>, CompletionStage> r6 = flow.runWith(source, sink, mat); + + // Using more complext combinations + RunnableGraph>, Cancellable>> r7 = + source.viaMat(flow, Keep.both()).to(sink); + + RunnableGraph>, CompletionStage>> r8 = + source.via(flow).toMat(sink, Keep.both()); + + RunnableGraph>, Cancellable>, CompletionStage>> r9 = + source.viaMat(flow, Keep.both()).toMat(sink, Keep.both()); + + RunnableGraph>> r10 = + source.viaMat(flow, Keep.right()).toMat(sink, Keep.both()); + + // It is also possible to map over the materialized values. In r9 we had a + // doubly nested pair, but we want to flatten it out + + + RunnableGraph r11 = + r9.mapMaterializedValue( (nestedTuple) -> { + CompletableFuture> p = nestedTuple.first().first(); + Cancellable c = nestedTuple.first().second(); + CompletionStage f = nestedTuple.second(); + + // Picking the Cancellable, but we could also construct a domain class here + return c; + }); + //#flow-mat-combine + } + + public void fusingAndAsync() { + //#explicit-fusing + Flow flow = + Flow.of(Integer.class).map(x -> x * 2).filter(x -> x > 500); + Graph, NotUsed> fused = + akka.stream.Fusing.aggressive(flow); + + Source.fromIterator(() -> Stream.iterate(0, x -> x + 1).iterator()) + .via(fused) + .take(1000); + //#explicit-fusing + + //#flow-async + Source.range(1, 3) + .map(x -> x + 1) + .withAttributes(Attributes.asyncBoundary()) + .map(x -> x * 2) + .to(Sink.ignore()); + //#flow-async + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/FlowErrorDocTest.java b/akka-docs/rst/java/code/docs/stream/FlowErrorDocTest.java new file mode 100644 index 0000000000..537cdd6ee7 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/FlowErrorDocTest.java @@ -0,0 +1,142 @@ +/** + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package docs.stream; + +import static org.junit.Assert.assertEquals; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; + +import akka.NotUsed; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import scala.concurrent.Await; +import scala.concurrent.Future; +import scala.concurrent.duration.Duration; + +import akka.actor.ActorSystem; +import akka.stream.ActorMaterializer; +import akka.stream.ActorMaterializerSettings; +import akka.stream.Materializer; +import akka.stream.Supervision; +import akka.stream.javadsl.Flow; +import akka.stream.ActorAttributes; +import akka.stream.javadsl.Sink; +import akka.stream.javadsl.Source; +import akka.japi.function.Function; +import akka.testkit.JavaTestKit; + +public class FlowErrorDocTest { + + private static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("FlowDocTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + @Test(expected = ArithmeticException.class) + public void demonstrateFailStream() throws Exception { + //#stop + final Materializer mat = ActorMaterializer.create(system); + final Source source = Source.from(Arrays.asList(0, 1, 2, 3, 4, 5)) + .map(elem -> 100 / elem); + final Sink> fold = + Sink. fold(0, (acc, elem) -> acc + elem); + final CompletionStage result = source.runWith(fold, mat); + // division by zero will fail the stream and the + // result here will be a Future completed with Failure(ArithmeticException) + //#stop + + result.toCompletableFuture().get(3, TimeUnit.SECONDS); + } + + @Test + public void demonstrateResumeStream() throws Exception { + //#resume + final Function decider = exc -> { + if (exc instanceof ArithmeticException) + return Supervision.resume(); + else + return Supervision.stop(); + }; + final Materializer mat = ActorMaterializer.create( + ActorMaterializerSettings.create(system).withSupervisionStrategy(decider), + system); + final Source source = Source.from(Arrays.asList(0, 1, 2, 3, 4, 5)) + .map(elem -> 100 / elem); + final Sink> fold = + Sink.fold(0, (acc, elem) -> acc + elem); + final CompletionStage result = source.runWith(fold, mat); + // the element causing division by zero will be dropped + // result here will be a Future completed with Success(228) + //#resume + + assertEquals(Integer.valueOf(228), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); + } + + @Test + public void demonstrateResumeSectionStream() throws Exception { + //#resume-section + final Materializer mat = ActorMaterializer.create(system); + final Function decider = exc -> { + if (exc instanceof ArithmeticException) + return Supervision.resume(); + else + return Supervision.stop(); + }; + final Flow flow = + Flow.of(Integer.class).filter(elem -> 100 / elem < 50).map(elem -> 100 / (5 - elem)) + .withAttributes(ActorAttributes.withSupervisionStrategy(decider)); + final Source source = Source.from(Arrays.asList(0, 1, 2, 3, 4, 5)) + .via(flow); + final Sink> fold = + Sink. fold(0, (acc, elem) -> acc + elem); + final CompletionStage result = source.runWith(fold, mat); + // the elements causing division by zero will be dropped + // result here will be a Future completed with Success(150) + //#resume-section + + assertEquals(Integer.valueOf(150), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); + } + + @Test + public void demonstrateRestartSectionStream() throws Exception { + //#restart-section + final Materializer mat = ActorMaterializer.create(system); + final Function decider = exc -> { + if (exc instanceof IllegalArgumentException) + return Supervision.restart(); + else + return Supervision.stop(); + }; + final Flow flow = + Flow.of(Integer.class).scan(0, (acc, elem) -> { + if (elem < 0) throw new IllegalArgumentException("negative not allowed"); + else return acc + elem; + }) + .withAttributes(ActorAttributes.withSupervisionStrategy(decider)); + final Source source = Source.from(Arrays.asList(1, 3, -1, 5, 7)) + .via(flow); + final CompletionStage> result = source.grouped(1000) + .runWith(Sink.>head(), mat); + // the negative element cause the scan stage to be restarted, + // i.e. start from 0 again + // result here will be a Future completed with Success(List(0, 1, 4, 0, 5, 12)) + //#restart-section + + assertEquals( + Arrays.asList(0, 1, 4, 0, 5, 12), + result.toCompletableFuture().get(3, TimeUnit.SECONDS)); + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/FlowGraphDocTest.java b/akka-docs/rst/java/code/docs/stream/FlowGraphDocTest.java new file mode 100644 index 0000000000..e30d9dd759 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/FlowGraphDocTest.java @@ -0,0 +1,171 @@ +/** + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package docs.stream; + +import static org.junit.Assert.*; + +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; + +import akka.NotUsed; +import akka.stream.ClosedShape; +import akka.stream.SourceShape; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import scala.concurrent.Await; +import scala.concurrent.Future; +import scala.concurrent.duration.Duration; +import akka.actor.ActorSystem; +import akka.japi.Pair; +import akka.stream.*; +import akka.stream.javadsl.*; +import akka.testkit.JavaTestKit; + +public class FlowGraphDocTest { + + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("FlowGraphDocTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + @Test + public void demonstrateBuildSimpleGraph() throws Exception { + //#simple-flow-graph + final Source in = Source.from(Arrays.asList(1, 2, 3, 4, 5)); + final Sink, CompletionStage>> sink = Sink.head(); + final Sink, CompletionStage>> sink2 = Sink.head(); + final Flow f1 = Flow.of(Integer.class).map(elem -> elem + 10); + final Flow f2 = Flow.of(Integer.class).map(elem -> elem + 20); + final Flow f3 = Flow.of(Integer.class).map(elem -> elem.toString()); + final Flow f4 = Flow.of(Integer.class).map(elem -> elem + 30); + + final RunnableGraph>> result = + RunnableGraph.>>fromGraph( + GraphDSL + .create( + sink, + (builder, out) -> { + final UniformFanOutShape bcast = builder.add(Broadcast.create(2)); + final UniformFanInShape merge = builder.add(Merge.create(2)); + + final Outlet source = builder.add(in).out(); + builder.from(source).via(builder.add(f1)) + .viaFanOut(bcast).via(builder.add(f2)).viaFanIn(merge) + .via(builder.add(f3.grouped(1000))).to(out); + builder.from(bcast).via(builder.add(f4)).toFanIn(merge); + return ClosedShape.getInstance(); + })); + //#simple-flow-graph + final List list = result.run(mat).toCompletableFuture().get(3, TimeUnit.SECONDS); + final String[] res = list.toArray(new String[] {}); + Arrays.sort(res, null); + assertArrayEquals(new String[] { "31", "32", "33", "34", "35", "41", "42", "43", "44", "45" }, res); + } + + @Test + @SuppressWarnings("unused") + public void demonstrateConnectErrors() { + try { + //#simple-graph + final RunnableGraph g = + RunnableGraph.fromGraph( + GraphDSL + .create((b) -> { + final SourceShape source1 = b.add(Source.from(Arrays.asList(1, 2, 3, 4, 5))); + final SourceShape source2 = b.add(Source.from(Arrays.asList(1, 2, 3, 4, 5))); + final FanInShape2> zip = b.add(Zip.create()); + b.from(source1).toInlet(zip.in0()); + b.from(source2).toInlet(zip.in1()); + return ClosedShape.getInstance(); + } + ) + ); + // unconnected zip.out (!) => "The inlets [] and outlets [] must correspond to the inlets [] and outlets [ZipWith2.out]" + //#simple-graph + fail("expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertTrue(e != null && e.getMessage() != null && e.getMessage().contains("must correspond to")); + } + } + + @Test + public void demonstrateReusingFlowInGraph() throws Exception { + //#flow-graph-reusing-a-flow + final Sink> topHeadSink = Sink.head(); + final Sink> bottomHeadSink = Sink.head(); + final Flow sharedDoubler = Flow.of(Integer.class).map(elem -> elem * 2); + + final RunnableGraph, CompletionStage>> g = + RunnableGraph., CompletionStage>>fromGraph( + GraphDSL.create( + topHeadSink, // import this sink into the graph + bottomHeadSink, // and this as well + Keep.both(), + (b, top, bottom) -> { + final UniformFanOutShape bcast = + b.add(Broadcast.create(2)); + + b.from(b.add(Source.single(1))).viaFanOut(bcast) + .via(b.add(sharedDoubler)).to(top); + b.from(bcast).via(b.add(sharedDoubler)).to(bottom); + return ClosedShape.getInstance(); + } + ) + ); + //#flow-graph-reusing-a-flow + final Pair, CompletionStage> pair = g.run(mat); + assertEquals(Integer.valueOf(2), pair.first().toCompletableFuture().get(3, TimeUnit.SECONDS)); + assertEquals(Integer.valueOf(2), pair.second().toCompletableFuture().get(3, TimeUnit.SECONDS)); + } + + @Test + public void demonstrateMatValue() throws Exception { + //#flow-graph-matvalue + final Sink> foldSink = Sink. fold(0, (a, b) -> { + return a + b; + }); + + final Flow, Integer, NotUsed> flatten = + Flow.>create().mapAsync(4, x -> x); + + final Flow> foldingFlow = Flow.fromGraph( + GraphDSL.create(foldSink, + (b, fold) -> { + return FlowShape.of( + fold.in(), + b.from(b.materializedValue()).via(b.add(flatten)).out()); + })); + //#flow-graph-matvalue + + //#flow-graph-matvalue-cycle + // This cannot produce any value: + final Source> cyclicSource = Source.fromGraph( + GraphDSL.create(foldSink, + (b, fold) -> { + // - Fold cannot complete until its upstream mapAsync completes + // - mapAsync cannot complete until the materialized Future produced by + // fold completes + // As a result this Source will never emit anything, and its materialited + // Future will never complete + b.from(b.materializedValue()).via(b.add(flatten)).to(fold); + return SourceShape.of(b.from(b.materializedValue()).via(b.add(flatten)).out()); + })); + + //#flow-graph-matvalue-cycle + } +} diff --git a/akka-docs/rst/java/code/docs/stream/FlowParallelismDocTest.java b/akka-docs/rst/java/code/docs/stream/FlowParallelismDocTest.java new file mode 100644 index 0000000000..34693fc2f2 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/FlowParallelismDocTest.java @@ -0,0 +1,146 @@ +/** + * Copyright (C) 2015-2016 Typesafe + */ +package docs.stream; + + +import static org.junit.Assert.assertEquals; + +import akka.NotUsed; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import akka.actor.ActorSystem; +import akka.stream.*; +import akka.stream.javadsl.*; +import akka.testkit.JavaTestKit; + +public class FlowParallelismDocTest { + + private static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("FlowDocTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + static class ScoopOfBatter {} + static class HalfCookedPancake {} + static class Pancake {} + + //#pipelining + Flow fryingPan1 = + Flow.of(ScoopOfBatter.class).map(batter -> new HalfCookedPancake()); + + Flow fryingPan2 = + Flow.of(HalfCookedPancake.class).map(halfCooked -> new Pancake()); + //#pipelining + + @Test + public void demonstratePipelining() { + //#pipelining + + // With the two frying pans we can fully cook pancakes + Flow pancakeChef = fryingPan1.via(fryingPan2); + //#pipelining + } + + @Test + public void demonstrateParallelism() { + //#parallelism + Flow fryingPan = + Flow.of(ScoopOfBatter.class).map(batter -> new Pancake()); + + Flow pancakeChef = + Flow.fromGraph(GraphDSL.create(b -> { + final UniformFanInShape mergePancakes = + b.add(Merge.create(2)); + final UniformFanOutShape dispatchBatter = + b.add(Balance.create(2)); + + // Using two frying pans in parallel, both fully cooking a pancake from the batter. + // We always put the next scoop of batter to the first frying pan that becomes available. + b.from(dispatchBatter.out(0)).via(b.add(fryingPan)).toInlet(mergePancakes.in(0)); + // Notice that we used the "fryingPan" flow without importing it via builder.add(). + // Flows used this way are auto-imported, which in this case means that the two + // uses of "fryingPan" mean actually different stages in the graph. + b.from(dispatchBatter.out(1)).via(b.add(fryingPan)).toInlet(mergePancakes.in(1)); + + return FlowShape.of(dispatchBatter.in(), mergePancakes.out()); + })); + //#parallelism + } + + @Test + public void parallelPipeline() { + //#parallel-pipeline + Flow pancakeChef = + Flow.fromGraph(GraphDSL.create(b -> { + final UniformFanInShape mergePancakes = + b.add(Merge.create(2)); + final UniformFanOutShape dispatchBatter = + b.add(Balance.create(2)); + + // Using two pipelines, having two frying pans each, in total using + // four frying pans + b.from(dispatchBatter.out(0)) + .via(b.add(fryingPan1)) + .via(b.add(fryingPan2)) + .toInlet(mergePancakes.in(0)); + + b.from(dispatchBatter.out(1)) + .via(b.add(fryingPan1)) + .via(b.add(fryingPan2)) + .toInlet(mergePancakes.in(1)); + + return FlowShape.of(dispatchBatter.in(), mergePancakes.out()); + })); + //#parallel-pipeline + } + + @Test + public void pipelinedParallel() { + //#pipelined-parallel + Flow pancakeChefs1 = + Flow.fromGraph(GraphDSL.create(b -> { + final UniformFanInShape mergeHalfCooked = + b.add(Merge.create(2)); + final UniformFanOutShape dispatchBatter = + b.add(Balance.create(2)); + + // Two chefs work with one frying pan for each, half-frying the pancakes then putting + // them into a common pool + b.from(dispatchBatter.out(0)).via(b.add(fryingPan1)).toInlet(mergeHalfCooked.in(0)); + b.from(dispatchBatter.out(1)).via(b.add(fryingPan1)).toInlet(mergeHalfCooked.in(1)); + + return FlowShape.of(dispatchBatter.in(), mergeHalfCooked.out()); + })); + + Flow pancakeChefs2 = + Flow.fromGraph(GraphDSL.create(b -> { + final UniformFanInShape mergePancakes = + b.add(Merge.create(2)); + final UniformFanOutShape dispatchHalfCooked = + b.add(Balance.create(2)); + + // Two chefs work with one frying pan for each, finishing the pancakes then putting + // them into a common pool + b.from(dispatchHalfCooked.out(0)).via(b.add(fryingPan2)).toInlet(mergePancakes.in(0)); + b.from(dispatchHalfCooked.out(1)).via(b.add(fryingPan2)).toInlet(mergePancakes.in(1)); + + return FlowShape.of(dispatchHalfCooked.in(), mergePancakes.out()); + })); + + Flow kitchen = + pancakeChefs1.via(pancakeChefs2); + //#pipelined-parallel + } +} \ No newline at end of file diff --git a/akka-docs/rst/java/code/docs/stream/FlowStagesDocTest.java b/akka-docs/rst/java/code/docs/stream/FlowStagesDocTest.java new file mode 100644 index 0000000000..f486e4e420 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/FlowStagesDocTest.java @@ -0,0 +1,257 @@ +/** + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package docs.stream; + +import static org.junit.Assert.assertEquals; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; +import java.util.function.Predicate; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import scala.concurrent.Await; +import scala.concurrent.Future; +import scala.concurrent.duration.Duration; +import scala.concurrent.duration.FiniteDuration; + +import akka.actor.ActorSystem; +import akka.japi.Pair; +import akka.stream.*; +import akka.stream.javadsl.*; +import akka.stream.stage.*; +import akka.stream.testkit.*; +import akka.stream.testkit.javadsl.*; +import akka.testkit.JavaTestKit; + +public class FlowStagesDocTest { + + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("FlowStagesDocTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + static //#one-to-one + public class Map extends PushPullStage { + private final Function f; + public Map(Function f) { + this.f = f; + } + + @Override public SyncDirective onPush(A elem, Context ctx) { + return ctx.push(f.apply(elem)); + } + + @Override public SyncDirective onPull(Context ctx) { + return ctx.pull(); + } + } + //#one-to-one + + static //#many-to-one + public class Filter extends PushPullStage { + private final Predicate p; + public Filter(Predicate p) { + this.p = p; + } + + @Override public SyncDirective onPush(A elem, Context ctx) { + if (p.test(elem)) return ctx.push(elem); + else return ctx.pull(); + } + + @Override public SyncDirective onPull(Context ctx) { + return ctx.pull(); + } + } + //#many-to-one + + //#one-to-many + class Duplicator extends PushPullStage { + private A lastElem = null; + private boolean oneLeft = false; + + @Override public SyncDirective onPush(A elem, Context ctx) { + lastElem = elem; + oneLeft = true; + return ctx.push(elem); + } + + @Override public SyncDirective onPull(Context ctx) { + if (!ctx.isFinishing()) { + // the main pulling logic is below as it is demonstrated on the illustration + if (oneLeft) { + oneLeft = false; + return ctx.push(lastElem); + } else + return ctx.pull(); + } else { + // If we need to emit a final element after the upstream + // finished + if (oneLeft) return ctx.pushAndFinish(lastElem); + else return ctx.finish(); + } + } + + @Override public TerminationDirective onUpstreamFinish(Context ctx) { + return ctx.absorbTermination(); + } + + } + //#one-to-many + + static//#pushstage + public class Map2 extends PushStage { + private final Function f; + public Map2(Function f) { + this.f = f; + } + + @Override public SyncDirective onPush(A elem, Context ctx) { + return ctx.push(f.apply(elem)); + } + } + + public class Filter2 extends PushStage { + private final Predicate p; + public Filter2(Predicate p) { + this.p = p; + } + + @Override public SyncDirective onPush(A elem, Context ctx) { + if (p.test(elem)) return ctx.push(elem); + else return ctx.pull(); + } + } + //#pushstage + + static //#doubler-stateful + public class Duplicator2 extends StatefulStage { + @Override public StageState initial() { + return new StageState() { + @Override public SyncDirective onPush(A elem, Context ctx) { + return emit(Arrays.asList(elem, elem).iterator(), ctx); + } + }; + } + } + //#doubler-stateful + + @Test + public void demonstrateVariousPushPullStages() throws Exception { + final Sink>> sink = + Flow.of(Integer.class).grouped(10).toMat(Sink.head(), Keep.right()); + + //#stage-chain + final RunnableGraph>> runnable = + Source + .from(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)) + .transform(() -> new Filter(elem -> elem % 2 == 0)) + .transform(() -> new Duplicator()) + .transform(() -> new Map(elem -> elem / 2)) + .toMat(sink, Keep.right()); + //#stage-chain + + assertEquals(Arrays.asList(1, 1, 2, 2, 3, 3, 4, 4, 5, 5), + runnable.run(mat).toCompletableFuture().get(3, TimeUnit.SECONDS)); + } + + //#detached + class Buffer2 extends DetachedStage { + final private Integer SIZE = 2; + final private List buf = new ArrayList<>(SIZE); + private Integer capacity = SIZE; + + private boolean isFull() { + return capacity == 0; + } + + private boolean isEmpty() { + return capacity == SIZE; + } + + private T dequeue() { + capacity += 1; + return buf.remove(0); + } + + private void enqueue(T elem) { + capacity -= 1; + buf.add(elem); + } + + public DownstreamDirective onPull(DetachedContext ctx) { + if (isEmpty()) { + if (ctx.isFinishing()) return ctx.finish(); // No more elements will arrive + else return ctx.holdDownstream(); // waiting until new elements + } else { + final T next = dequeue(); + if (ctx.isHoldingUpstream()) return ctx.pushAndPull(next); // release upstream + else return ctx.push(next); + } + } + + public UpstreamDirective onPush(T elem, DetachedContext ctx) { + enqueue(elem); + if (isFull()) return ctx.holdUpstream(); // Queue is now full, wait until new empty slot + else { + if (ctx.isHoldingDownstream()) return ctx.pushAndPull(dequeue()); // Release downstream + else return ctx.pull(); + } + } + + public TerminationDirective onUpstreamFinish(DetachedContext ctx) { + if (!isEmpty()) return ctx.absorbTermination(); // still need to flush from buffer + else return ctx.finish(); // already empty, finishing + } + } + //#detached + + @Test + public void demonstrateDetachedStage() throws Exception { + final Pair,TestSubscriber.Probe> pair = + TestSource.probe(system) + .transform(() -> new Buffer2()) + .toMat(TestSink.probe(system), Keep.both()) + .run(mat); + + final TestPublisher.Probe pub = pair.first(); + final TestSubscriber.Probe sub = pair.second(); + + final FiniteDuration timeout = Duration.create(100, TimeUnit.MILLISECONDS); + + sub.request(2); + sub.expectNoMsg(timeout); + + pub.sendNext(1); + pub.sendNext(2); + sub.expectNext(1, 2); + + pub.sendNext(3); + pub.sendNext(4); + sub.expectNoMsg(timeout); + + sub.request(2); + sub.expectNext(3, 4); + + pub.sendComplete(); + sub.expectComplete(); + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/GraphCyclesDocTest.java b/akka-docs/rst/java/code/docs/stream/GraphCyclesDocTest.java new file mode 100644 index 0000000000..a5609a8fa2 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/GraphCyclesDocTest.java @@ -0,0 +1,159 @@ +package docs.stream; + +import java.util.Arrays; + +import akka.NotUsed; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import akka.actor.ActorSystem; +import akka.stream.*; +import akka.stream.javadsl.*; +import akka.stream.scaladsl.MergePreferred.MergePreferredShape; +import akka.testkit.JavaTestKit; + + +public class GraphCyclesDocTest { + + static ActorSystem system; + + + @BeforeClass + public static void setup() { + system = ActorSystem.create("GraphCyclesDocTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + final static SilenceSystemOut.System System = SilenceSystemOut.get(); + + final Source source = Source.from(Arrays.asList(1, 2, 3, 4, 5)); + + @Test + public void demonstrateDeadlockedCycle() { + //#deadlocked + // WARNING! The graph below deadlocks! + final Flow printFlow = + Flow.of(Integer.class).map(s -> { + System.out.println(s); + return s; + }); + + RunnableGraph.fromGraph(GraphDSL.create(b -> { + final UniformFanInShape merge = b.add(Merge.create(2)); + final UniformFanOutShape bcast = b.add(Broadcast.create(2)); + final Outlet src = b.add(source).out(); + final FlowShape printer = b.add(printFlow); + final SinkShape ignore = b.add(Sink.ignore()); + + b.from(src).viaFanIn(merge).via(printer).viaFanOut(bcast).to(ignore); + b.to(merge) .fromFanOut(bcast); + return ClosedShape.getInstance(); + })); + //#deadlocked + } + + @Test + public void demonstrateUnfairCycle() { + final Flow printFlow = + Flow.of(Integer.class).map(s -> { + System.out.println(s); + return s; + }); + //#unfair + // WARNING! The graph below stops consuming from "source" after a few steps + RunnableGraph.fromGraph(GraphDSL.create(b -> { + final MergePreferredShape merge = b.add(MergePreferred.create(1)); + final UniformFanOutShape bcast = b.add(Broadcast.create(2)); + final Outlet src = b.add(source).out(); + final FlowShape printer = b.add(printFlow); + final SinkShape ignore = b.add(Sink.ignore()); + + b.from(src).viaFanIn(merge).via(printer).viaFanOut(bcast).to(ignore); + b.to(merge.preferred()).fromFanOut(bcast); + return ClosedShape.getInstance(); + })); + //#unfair + } + + @Test + public void demonstrateDroppingCycle() { + final Flow printFlow = + Flow.of(Integer.class).map(s -> { + System.out.println(s); + return s; + }); + //#dropping + RunnableGraph.fromGraph(GraphDSL.create(b -> { + final UniformFanInShape merge = b.add(Merge.create(2)); + final UniformFanOutShape bcast = b.add(Broadcast.create(2)); + final FlowShape droppyFlow = b.add( + Flow.of(Integer.class).buffer(10, OverflowStrategy.dropHead())); + final Outlet src = b.add(source).out(); + final FlowShape printer = b.add(printFlow); + final SinkShape ignore = b.add(Sink.ignore()); + + b.from(src).viaFanIn(merge).via(printer).viaFanOut(bcast).to(ignore); + b.to(merge).via(droppyFlow).fromFanOut(bcast); + return ClosedShape.getInstance(); + })); + //#dropping + } + + @Test + public void demonstrateZippingCycle() { + final Flow printFlow = + Flow.of(Integer.class).map(s -> { + System.out.println(s); + return s; + }); + //#zipping-dead + // WARNING! The graph below never processes any elements + RunnableGraph.fromGraph(GraphDSL.create(b -> { + final FanInShape2 zip = + b.add(ZipWith.create((Integer left, Integer right) -> left)); + final UniformFanOutShape bcast = b.add(Broadcast.create(2)); + final FlowShape printer = b.add(printFlow); + final SinkShape ignore = b.add(Sink.ignore()); + + b.from(b.add(source)).toInlet(zip.in0()); + b.from(zip.out()).via(printer).viaFanOut(bcast).to(ignore); + b.to(zip.in1()) .fromFanOut(bcast); + return ClosedShape.getInstance(); + })); + //#zipping-dead + } + + @Test + public void demonstrateLiveZippingCycle() { + final Flow printFlow = + Flow.of(Integer.class).map(s -> { + System.out.println(s); + return s; + }); + //#zipping-live + RunnableGraph.fromGraph(GraphDSL.create(b -> { + final FanInShape2 zip = + b.add(ZipWith.create((Integer left, Integer right) -> left)); + final UniformFanOutShape bcast = b.add(Broadcast.create(2)); + final UniformFanInShape concat = b.add(Concat.create()); + final FlowShape printer = b.add(printFlow); + final SinkShape ignore = b.add(Sink.ignore()); + + b.from(b.add(source)).toInlet(zip.in0()); + b.from(zip.out()).via(printer).viaFanOut(bcast).to(ignore); + b.to(zip.in1()).viaFanIn(concat).from(b.add(Source.single(1))); + b.to(concat).fromFanOut(bcast); + return ClosedShape.getInstance(); + })); + //#zipping-live + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/GraphStageDocTest.java b/akka-docs/rst/java/code/docs/stream/GraphStageDocTest.java new file mode 100644 index 0000000000..9d3425c5e6 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/GraphStageDocTest.java @@ -0,0 +1,703 @@ +package docs.stream; + +import akka.Done; +import akka.NotUsed; +import akka.actor.ActorSystem; +//#imports +import akka.dispatch.Futures; +import akka.dispatch.Mapper; +import akka.dispatch.OnSuccess; +import akka.japi.Option; +import akka.japi.Predicate; +import akka.japi.function.Procedure; +import akka.stream.*; +import akka.stream.javadsl.*; +import akka.stream.stage.*; +//#imports +import akka.stream.testkit.TestPublisher; +import akka.stream.testkit.TestSubscriber; +import akka.testkit.JavaTestKit; +import akka.japi.Function; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.reactivestreams.Subscription; +import scala.Tuple2; +import scala.concurrent.Await; +import scala.concurrent.ExecutionContext; +import scala.concurrent.Future; +import scala.concurrent.Promise; +import scala.concurrent.duration.Duration; +import scala.concurrent.duration.FiniteDuration; + +import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; + +import static org.junit.Assert.assertEquals; + +public class GraphStageDocTest { + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("FlowGraphDocTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + + //#simple-source + public class NumbersSource extends GraphStage> { + // Define the (sole) output port of this stage + public final Outlet out = Outlet.create("NumbersSource.out"); + + // Define the shape of this stage, which is SourceShape with the port we defined above + private final SourceShape shape = SourceShape.of(out); + @Override + public SourceShape shape() { + return shape; + } + + // This is where the actual (possibly stateful) logic is created + @Override + public GraphStageLogic createLogic(Attributes inheritedAttributes) { + return new GraphStageLogic(shape()) { + // All state MUST be inside the GraphStageLogic, + // never inside the enclosing GraphStage. + // This state is safe to access and modify from all the + // callbacks that are provided by GraphStageLogic and the + // registered handlers. + private int counter = 1; + + { + setHandler(out, new AbstractOutHandler() { + @Override + public void onPull() { + push(out, counter); + counter += 1; + } + }); + } + + }; + } + + } + //#simple-source + + + @Test + public void demonstrateCustomSourceUsage() throws Exception { + //#simple-source-usage + // A GraphStage is a proper Graph, just like what GraphDSL.create would return + Graph, NotUsed> sourceGraph = new NumbersSource(); + + // Create a Source from the Graph to access the DSL + Source mySource = Source.fromGraph(sourceGraph); + + // Returns 55 + CompletionStage result1 = mySource.take(10).runFold(0, (sum, next) -> sum + next, mat); + + // The source is reusable. This returns 5050 + CompletionStage result2 = mySource.take(100).runFold(0, (sum, next) -> sum + next, mat); + //#simple-source-usage + + assertEquals(result1.toCompletableFuture().get(3, TimeUnit.SECONDS), (Integer) 55); + assertEquals(result2.toCompletableFuture().get(3, TimeUnit.SECONDS), (Integer) 5050); + } + + + //#one-to-one + public class Map extends GraphStage> { + + private final Function f; + + public Map(Function f) { + this.f = f; + } + + public final Inlet in = Inlet.create("Map.in"); + public final Outlet out = Outlet.create("Map.out"); + + private final FlowShape shape = FlowShape.of(in, out); + @Override + public FlowShape shape() { + return shape; + } + + @Override + public GraphStageLogic createLogic(Attributes inheritedAttributes) { + return new GraphStageLogic(shape) { + + { + setHandler(in, new AbstractInHandler() { + @Override + public void onPush() { + try { + push(out, f.apply(grab(in))); + } catch (Exception ex) { + failStage(ex); + } + } + }); + setHandler(out, new AbstractOutHandler() { + @Override + public void onPull() { + pull(in); + } + }); + } + }; + } + + } + //#one-to-one + + @Test + public void demonstrateOneToOne() throws Exception { + // tests: + final Graph, NotUsed> stringLength = + Flow.fromGraph(new Map(new Function() { + @Override + public Integer apply(String str) { + return str.length(); + } + })); + + CompletionStage result = + Source.from(Arrays.asList("one", "two", "three")) + .via(stringLength) + .runFold(0, (sum, n) -> sum + n, mat); + + assertEquals(new Integer(11), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); + } + + //#many-to-one + public final class Filter extends GraphStage> { + + private final Predicate p; + + public Filter(Predicate p) { + this.p = p; + } + + public final Inlet in = Inlet.create("Filter.in"); + public final Outlet out = Outlet.create("Filter.out"); + + private final FlowShape shape = FlowShape.of(in, out); + + @Override + public FlowShape shape() { + return shape; + } + + public GraphStageLogic createLogic(Attributes inheritedAttributes) { + return new GraphStageLogic(shape) { + { + + setHandler(in, new AbstractInHandler() { + @Override + public void onPush() { + A elem = grab(in); + if (p.test(elem)) { + push(out, elem); + } else { + pull(in); + } + } + }); + + setHandler(out, new AbstractOutHandler() { + @Override + public void onPull() { + pull(in); + } + }); + } + }; + } + } + //#many-to-one + + @Test + public void demonstrateAManyToOneElementGraphStage() throws Exception { + + // tests: + Graph, NotUsed> evenFilter = + Flow.fromGraph(new Filter(n -> n % 2 == 0)); + + CompletionStage result = + Source.from(Arrays.asList(1, 2, 3, 4, 5, 6)) + .via(evenFilter) + .runFold(0, (elem, sum) -> sum + elem, mat); + + assertEquals(new Integer(12), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); + } + + //#one-to-many + public class Duplicator extends GraphStage> { + + public final Inlet in = Inlet.create("Duplicator.in"); + public final Outlet out = Outlet.create("Duplicator.out"); + + private final FlowShape shape = FlowShape.of(in, out); + + @Override + public FlowShape shape() { + return shape; + } + + public GraphStageLogic createLogic(Attributes inheritedAttributes) { + return new GraphStageLogic(shape) { + // Again: note that all mutable state + // MUST be inside the GraphStageLogic + Option lastElem = Option.none(); + + { + setHandler(in, new AbstractInHandler() { + @Override + public void onPush() { + A elem = grab(in); + lastElem = Option.some(elem); + push(out, elem); + } + + @Override + public void onUpstreamFinish() { + if (lastElem.isDefined()) { + emit(out, lastElem.get()); + } + complete(out); + } + }); + + + setHandler(out, new AbstractOutHandler() { + @Override + public void onPull() { + if (lastElem.isDefined()) { + push(out, lastElem.get()); + lastElem = Option.none(); + } else { + pull(in); + } + } + }); + } + }; + } + } + //#one-to-many + + @Test + public void demonstrateAOneToManyElementGraphStage() throws Exception { + // tests: + Graph, NotUsed> duplicator = + Flow.fromGraph(new Duplicator()); + + CompletionStage result = + Source.from(Arrays.asList(1, 2, 3)) + .via(duplicator) + .runFold(0, (n, sum) -> n + sum, mat); + + assertEquals(new Integer(12), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); + + } + + //#simpler-one-to-many + public class Duplicator2 extends GraphStage> { + + public final Inlet in = Inlet.create("Duplicator.in"); + public final Outlet out = Outlet.create("Duplicator.out"); + + private final FlowShape shape = FlowShape.of(in, out); + + @Override + public FlowShape shape() { + return shape; + } + + @Override + public GraphStageLogic createLogic(Attributes inheritedAttributes) { + return new GraphStageLogic(shape) { + + { + setHandler(in, new AbstractInHandler() { + @Override + public void onPush() { + A elem = grab(in); + // this will temporarily suspend this handler until the two elems + // are emitted and then reinstates it + emitMultiple(out, Arrays.asList(elem, elem).iterator()); + } + }); + + setHandler(out, new AbstractOutHandler() { + @Override + public void onPull() { + pull(in); + } + }); + } + }; + } + } + //#simpler-one-to-many + + + + @Test + public void demonstrateASimplerOneToManyStage() throws Exception { + // tests: + Graph, NotUsed> duplicator = + Flow.fromGraph(new Duplicator2()); + + CompletionStage result = + Source.from(Arrays.asList(1, 2, 3)) + .via(duplicator) + .runFold(0, (n, sum) -> n + sum, mat); + + assertEquals(new Integer(12), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); + } + + @Test + public void demonstrateChainingOfGraphStages() throws Exception { + Graph, CompletionStage> sink = Sink.fold("", (acc, n) -> acc + n.toString()); + + //#graph-stage-chain + CompletionStage resultFuture = Source.from(Arrays.asList(1,2,3,4,5)) + .via(new Filter((n) -> n % 2 == 0)) + .via(new Duplicator()) + .via(new Map((n) -> n / 2)) + .runWith(sink, mat); + + //#graph-stage-chain + + assertEquals("1122", resultFuture.toCompletableFuture().get(3, TimeUnit.SECONDS)); + } + + + //#async-side-channel + // will close upstream when the future completes + public class KillSwitch extends GraphStage> { + + private final CompletionStage switchF; + + public KillSwitch(CompletionStage switchF) { + this.switchF = switchF; + } + + public final Inlet in = Inlet.create("KillSwitch.in"); + public final Outlet out = Outlet.create("KillSwitch.out"); + + private final FlowShape shape = FlowShape.of(in, out); + @Override + public FlowShape shape() { + return shape; + } + + @Override + public GraphStageLogic createLogic(Attributes inheritedAttributes) { + return new GraphStageLogic(shape) { + + { + setHandler(in, new AbstractInHandler() { + @Override + public void onPush() { + push(out, grab(in)); + } + }); + setHandler(out, new AbstractOutHandler() { + @Override + public void onPull() { + pull(in); + } + }); + } + + @Override + public void preStart() { + AsyncCallback callback = createAsyncCallback(new Procedure() { + @Override + public void apply(Done param) throws Exception { + completeStage(); + } + }); + + ExecutionContext ec = system.dispatcher(); + switchF.thenAccept(callback::invoke); + } + }; + } + } + //#async-side-channel + + @Test + public void demonstrateAnAsynchronousSideChannel() throws Exception{ + + // tests: + CompletableFuture switchF = new CompletableFuture<>(); + Graph, NotUsed> killSwitch = + Flow.fromGraph(new KillSwitch<>(switchF)); + + ExecutionContext ec = system.dispatcher(); + + CompletionStage valueAfterKill = switchF.thenApply(in -> 4); + + + CompletionStage result = + Source.from(Arrays.asList(1, 2, 3)).concat(Source.fromCompletionStage(valueAfterKill)) + .via(killSwitch) + .runFold(0, (n, sum) -> n + sum, mat); + + switchF.complete(Done.getInstance()); + + assertEquals(new Integer(6), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); + } + + + //#timed + // each time an event is pushed through it will trigger a period of silence + public class TimedGate extends GraphStage> { + + private final FiniteDuration silencePeriod; + + public TimedGate(FiniteDuration silencePeriod) { + this.silencePeriod = silencePeriod; + } + + public final Inlet in = Inlet.create("TimedGate.in"); + public final Outlet out = Outlet.create("TimedGate.out"); + + private final FlowShape shape = FlowShape.of(in, out); + @Override + public FlowShape shape() { + return shape; + } + + @Override + public GraphStageLogic createLogic(Attributes inheritedAttributes) { + return new TimerGraphStageLogic(shape) { + + private boolean open = false; + + { + setHandler(in, new AbstractInHandler() { + @Override + public void onPush() { + A elem = grab(in); + if (open) pull(in); + else { + push(out, elem); + open = true; + scheduleOnce("key", silencePeriod); + } + } + }); + setHandler(out, new AbstractOutHandler() { + @Override + public void onPull() { + pull(in); + } + }); + } + + @Override + public void onTimer(Object key) { + if (key.equals("key")) { + open = false; + } + } + }; + } + } + //#timed + + public void demonstrateAGraphStageWithATimer() throws Exception { + // tests: + CompletionStage result = + Source.from(Arrays.asList(1, 2, 3)) + .via(new TimedGate<>(Duration.create(2, "seconds"))) + .takeWithin(Duration.create(250, "millis")) + .runFold(0, (n, sum) -> n + sum, mat); + + assertEquals(new Integer(1), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); + } + + + //#materialized + public class FirstValue extends GraphStageWithMaterializedValue, CompletionStage> { + + public final Inlet in = Inlet.create("FirstValue.in"); + public final Outlet out = Outlet.create("FirstValue.out"); + + private final FlowShape shape = FlowShape.of(in, out); + @Override + public FlowShape shape() { + return shape; + } + + @Override + public Tuple2> createLogicAndMaterializedValue(Attributes inheritedAttributes) { + Promise promise = Futures.promise(); + + GraphStageLogic logic = new GraphStageLogic(shape) { + { + setHandler(in, new AbstractInHandler() { + @Override + public void onPush() { + A elem = grab(in); + promise.success(elem); + push(out, elem); + + // replace handler with one just forwarding + setHandler(in, new AbstractInHandler() { + @Override + public void onPush() { + push(out, grab(in)); + } + }); + } + }); + + setHandler(out, new AbstractOutHandler() { + @Override + public void onPull() { + pull(in); + } + }); + } + }; + + return new Tuple2(logic, promise.future()); + } + } + //#materialized + + public void demonstrateACustomMaterializedValue() throws Exception { + // tests: + RunnableGraph> flow = Source.from(Arrays.asList(1, 2, 3)) + .viaMat(new FirstValue(), Keep.right()) + .to(Sink.ignore()); + + CompletionStage result = flow.run(mat); + + assertEquals(new Integer(1), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); + } + + + //#detached + public class TwoBuffer extends GraphStage> { + + public final Inlet in = Inlet.create("TwoBuffer.in"); + public final Outlet out = Outlet.create("TwoBuffer.out"); + + private final FlowShape shape = FlowShape.of(in, out); + + @Override + public FlowShape shape() { + return shape; + } + + @Override + public GraphStageLogic createLogic(Attributes inheritedAttributes) { + return new GraphStageLogic(shape) { + + private final int SIZE = 2; + private Queue buffer = new ArrayDeque<>(SIZE); + private boolean downstreamWaiting = false; + + private boolean isBufferFull() { + return buffer.size() == SIZE; + } + + @Override + public void preStart() { + // a detached stage needs to start upstream demand + // itself as it is not triggered by downstream demand + pull(in); + } + + { + setHandler(in, new AbstractInHandler() { + @Override + public void onPush() { + A elem = grab(in); + buffer.add(elem); + if (downstreamWaiting) { + downstreamWaiting = false; + A bufferedElem = buffer.poll(); + push(out, bufferedElem); + } + if (!isBufferFull()) { + pull(in); + } + } + + @Override + public void onUpstreamFinish() { + if (!buffer.isEmpty()) { + // emit the rest if possible + emitMultiple(out, buffer.iterator()); + } + completeStage(); + } + }); + + + setHandler(out, new AbstractOutHandler() { + @Override + public void onPull() { + if (buffer.isEmpty()) { + downstreamWaiting = true; + } else { + A elem = buffer.poll(); + push(out, elem); + } + if (!isBufferFull() && !hasBeenPulled(in)) { + pull(in); + } + } + }); + } + }; + + } + } + //#detached + + + public void demonstrateADetachedGraphStage() throws Exception { + // tests: + CompletionStage result1 = Source.from(Arrays.asList(1, 2, 3)) + .via(new TwoBuffer<>()) + .runFold(0, (acc, n) -> acc + n, mat); + + assertEquals(new Integer(6), result1.toCompletableFuture().get(3, TimeUnit.SECONDS)); + + TestSubscriber.ManualProbe subscriber = TestSubscriber.manualProbe(system); + TestPublisher.Probe publisher = TestPublisher.probe(0, system); + RunnableGraph flow2 = + Source.fromPublisher(publisher) + .via(new TwoBuffer<>()) + .to(Sink.fromSubscriber(subscriber)); + + flow2.run(mat); + + Subscription sub = subscriber.expectSubscription(); + // this happens even though the subscriber has not signalled any demand + publisher.sendNext(1); + publisher.sendNext(2); + + sub.cancel(); + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/IntegrationDocTest.java b/akka-docs/rst/java/code/docs/stream/IntegrationDocTest.java new file mode 100644 index 0000000000..3c785de436 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/IntegrationDocTest.java @@ -0,0 +1,592 @@ +/* + * Copyright (C) 2015-2016 Typesafe Inc. + */ + +package docs.stream; + +import akka.NotUsed; +import akka.actor.*; +import akka.japi.pf.ReceiveBuilder; +import akka.stream.*; +import akka.stream.javadsl.*; +import akka.testkit.JavaTestKit; +import akka.testkit.TestProbe; + +import com.typesafe.config.Config; +import com.typesafe.config.ConfigFactory; +import docs.stream.TwitterStreamQuickstartDocTest.Model.Author; +import docs.stream.TwitterStreamQuickstartDocTest.Model.Tweet; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.Executor; +import java.util.concurrent.atomic.AtomicInteger; +import static akka.pattern.PatternsCS.ask; +import static docs.stream.TwitterStreamQuickstartDocTest.Model.AKKA; +import static docs.stream.TwitterStreamQuickstartDocTest.Model.tweets; +import static junit.framework.TestCase.assertTrue; + +public class IntegrationDocTest { + + private static final SilenceSystemOut.System System = SilenceSystemOut.get(); + + static ActorSystem system; + + @BeforeClass + public static void setup() { + final Config config = ConfigFactory.parseString("" + + "blocking-dispatcher { \n" + + " executor = thread-pool-executor \n" + + " thread-pool-executor { \n" + + " core-pool-size-min = 10 \n" + + " core-pool-size-max = 10 \n" + + " } \n" + + "} \n" + + "akka.actor.default-mailbox.mailbox-type = akka.dispatch.UnboundedMailbox\n"); + + system = ActorSystem.create("ActorPublisherDocTest", config); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + class AddressSystem { + //#email-address-lookup + public CompletionStage> lookupEmail(String handle) + //#email-address-lookup + { + return CompletableFuture.completedFuture(Optional.of(handle + "@somewhere.com")); + } + + //#phone-lookup + public CompletionStage> lookupPhoneNumber(String handle) + //#phone-lookup + { + return CompletableFuture.completedFuture(Optional.of("" + handle.hashCode())); + } + } + + class AddressSystem2 { + //#email-address-lookup2 + public CompletionStage lookupEmail(String handle) + //#email-address-lookup2 + { + return CompletableFuture.completedFuture(handle + "@somewhere.com"); + } + } + + static class Email { + public final String to; + public final String title; + public final String body; + + public Email(String to, String title, String body) { + this.to = to; + this.title = title; + this.body = body; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + Email email = (Email) o; + + if (body != null ? !body.equals(email.body) : email.body != null) { + return false; + } + if (title != null ? !title.equals(email.title) : email.title != null) { + return false; + } + if (to != null ? !to.equals(email.to) : email.to != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = to != null ? to.hashCode() : 0; + result = 31 * result + (title != null ? title.hashCode() : 0); + result = 31 * result + (body != null ? body.hashCode() : 0); + return result; + } + } + + static class TextMessage { + public final String to; + public final String body; + + TextMessage(String to, String body) { + this.to = to; + this.body = body; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + TextMessage that = (TextMessage) o; + + if (body != null ? !body.equals(that.body) : that.body != null) { + return false; + } + if (to != null ? !to.equals(that.to) : that.to != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = to != null ? to.hashCode() : 0; + result = 31 * result + (body != null ? body.hashCode() : 0); + return result; + } + } + + static class EmailServer { + public final ActorRef probe; + + public EmailServer(ActorRef probe) { + this.probe = probe; + } + + //#email-server-send + public CompletionStage send(Email email) { + // ... + //#email-server-send + probe.tell(email.to, ActorRef.noSender()); + return CompletableFuture.completedFuture(email); + //#email-server-send + } + //#email-server-send + } + + + static class SmsServer { + public final ActorRef probe; + + public SmsServer(ActorRef probe) { + this.probe = probe; + } + + //#sms-server-send + public boolean send(TextMessage text) { + // ... + //#sms-server-send + probe.tell(text.to, ActorRef.noSender()); + //#sms-server-send + return true; + } + //#sms-server-send + } + + static class Save { + public final Tweet tweet; + + Save(Tweet tweet) { + this.tweet = tweet; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + Save save = (Save) o; + + if (tweet != null ? !tweet.equals(save.tweet) : save.tweet != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + return tweet != null ? tweet.hashCode() : 0; + } + } + static class SaveDone { + public static SaveDone INSTANCE = new SaveDone(); + private SaveDone() { + } + } + + + static class DatabaseService extends AbstractActor { + public final ActorRef probe; + + DatabaseService(ActorRef probe) { + this.probe = probe; + + receive(ReceiveBuilder.match(Save.class, s -> { + probe.tell(s.tweet.author.handle, ActorRef.noSender()); + sender().tell(SaveDone.INSTANCE, self()); + }).build()); + } + } + + //#sometimes-slow-service + static class SometimesSlowService { + private final Executor ec; + + public SometimesSlowService(Executor ec) { + this.ec = ec; + } + + private final AtomicInteger runningCount = new AtomicInteger(); + + public CompletionStage convert(String s) { + System.out.println("running: " + s + "(" + runningCount.incrementAndGet() + ")"); + return CompletableFuture.supplyAsync(() -> { + if (!s.isEmpty() && Character.isLowerCase(s.charAt(0))) + try { Thread.sleep(500); } catch (InterruptedException e) {} + else + try { Thread.sleep(20); } catch (InterruptedException e) {} + System.out.println("completed: " + s + "(" + runningCount.decrementAndGet() + ")"); + return s.toUpperCase(); + }, ec); + } + } + //#sometimes-slow-service + + + @Test + public void callingExternalServiceWithMapAsync() throws Exception { + new JavaTestKit(system) { + final TestProbe probe = new TestProbe(system); + final AddressSystem addressSystem = new AddressSystem(); + final EmailServer emailServer = new EmailServer(probe.ref()); + + { + //#tweet-authors + final Source authors = tweets + .filter(t -> t.hashtags().contains(AKKA)) + .map(t -> t.author); + + //#tweet-authors + + //#email-addresses-mapAsync + final Source emailAddresses = authors + .mapAsync(4, author -> addressSystem.lookupEmail(author.handle)) + .filter(o -> o.isPresent()) + .map(o -> o.get()); + + //#email-addresses-mapAsync + + //#send-emails + final RunnableGraph sendEmails = emailAddresses + .mapAsync(4, address -> + emailServer.send(new Email(address, "Akka", "I like your tweet"))) + .to(Sink.ignore()); + + sendEmails.run(mat); + //#send-emails + + probe.expectMsg("rolandkuhn@somewhere.com"); + probe.expectMsg("patriknw@somewhere.com"); + probe.expectMsg("bantonsson@somewhere.com"); + probe.expectMsg("drewhk@somewhere.com"); + probe.expectMsg("ktosopl@somewhere.com"); + probe.expectMsg("mmartynas@somewhere.com"); + probe.expectMsg("akkateam@somewhere.com"); + } + }; + } + + @Test + @SuppressWarnings("unused") + public void callingExternalServiceWithMapAsyncAndSupervision() throws Exception { + new JavaTestKit(system) { + final AddressSystem2 addressSystem = new AddressSystem2(); + + { + final Source authors = tweets + .filter(t -> t.hashtags().contains(AKKA)) + .map(t -> t.author); + + //#email-addresses-mapAsync-supervision + final Attributes resumeAttrib = + ActorAttributes.withSupervisionStrategy(Supervision.getResumingDecider()); + final Flow lookupEmail = + Flow.of(Author.class) + .mapAsync(4, author -> addressSystem.lookupEmail(author.handle)) + .withAttributes(resumeAttrib); + final Source emailAddresses = authors.via(lookupEmail); + + //#email-addresses-mapAsync-supervision + } + }; + } + + @Test + public void callingExternalServiceWithMapAsyncUnordered() throws Exception { + new JavaTestKit(system) { + final TestProbe probe = new TestProbe(system); + final AddressSystem addressSystem = new AddressSystem(); + final EmailServer emailServer = new EmailServer(probe.ref()); + + { + //#external-service-mapAsyncUnordered + final Source authors = + tweets + .filter(t -> t.hashtags().contains(AKKA)) + .map(t -> t.author); + + final Source emailAddresses = + authors + .mapAsyncUnordered(4, author -> addressSystem.lookupEmail(author.handle)) + .filter(o -> o.isPresent()) + .map(o -> o.get()); + + final RunnableGraph sendEmails = + emailAddresses + .mapAsyncUnordered(4, address -> + emailServer.send(new Email(address, "Akka", "I like your tweet"))) + .to(Sink.ignore()); + + sendEmails.run(mat); + //#external-service-mapAsyncUnordered + } + }; + } + + @Test + public void carefulManagedBlockingWithMapAsync() throws Exception { + new JavaTestKit(system) { + final AddressSystem addressSystem = new AddressSystem(); + final EmailServer emailServer = new EmailServer(getRef()); + final SmsServer smsServer = new SmsServer(getRef()); + + { + final Source authors = + tweets + .filter(t -> t.hashtags().contains(AKKA)) + .map(t -> t.author); + + final Source phoneNumbers = authors.mapAsync(4, author -> addressSystem.lookupPhoneNumber(author.handle)) + .filter(o -> o.isPresent()) + .map(o -> o.get()); + + //#blocking-mapAsync + final Executor blockingEc = system.dispatchers().lookup("blocking-dispatcher"); + + final RunnableGraph sendTextMessages = + phoneNumbers + .mapAsync(4, phoneNo -> CompletableFuture.supplyAsync(() -> + smsServer.send(new TextMessage(phoneNo, "I like your tweet")), blockingEc)) + .to(Sink.ignore()); + + sendTextMessages.run(mat); + //#blocking-mapAsync + + final Object[] got = receiveN(7); + final Set set = new HashSet<>(Arrays.asList(got)); + + assertTrue(set.contains(String.valueOf("rolandkuhn".hashCode()))); + assertTrue(set.contains(String.valueOf("patriknw".hashCode()))); + assertTrue(set.contains(String.valueOf("bantonsson".hashCode()))); + assertTrue(set.contains(String.valueOf("drewhk".hashCode()))); + assertTrue(set.contains(String.valueOf("ktosopl".hashCode()))); + assertTrue(set.contains(String.valueOf("mmartynas".hashCode()))); + assertTrue(set.contains(String.valueOf("akkateam".hashCode()))); + } + }; + } + + @Test + public void carefulManagedBlockingWithMap() throws Exception { + new JavaTestKit(system) { + final TestProbe probe = new TestProbe(system); + final AddressSystem addressSystem = new AddressSystem(); + final EmailServer emailServer = new EmailServer(probe.ref()); + final SmsServer smsServer = new SmsServer(probe.ref()); + + { + final Source authors = + tweets + .filter(t -> t.hashtags().contains(AKKA)) + .map(t -> t.author); + + final Source phoneNumbers = authors.mapAsync(4, author -> addressSystem.lookupPhoneNumber(author.handle)) + .filter(o -> o.isPresent()) + .map(o -> o.get()); + + //#blocking-map + final Flow send = + Flow.of(String.class) + .map(phoneNo -> smsServer.send(new TextMessage(phoneNo, "I like your tweet"))) + .withAttributes(ActorAttributes.dispatcher("blocking-dispatcher")); + final RunnableGraph sendTextMessages = + phoneNumbers.via(send).to(Sink.ignore()); + + sendTextMessages.run(mat); + //#blocking-map + + probe.expectMsg(String.valueOf("rolandkuhn".hashCode())); + probe.expectMsg(String.valueOf("patriknw".hashCode())); + probe.expectMsg(String.valueOf("bantonsson".hashCode())); + probe.expectMsg(String.valueOf("drewhk".hashCode())); + probe.expectMsg(String.valueOf("ktosopl".hashCode())); + probe.expectMsg(String.valueOf("mmartynas".hashCode())); + probe.expectMsg(String.valueOf("akkateam".hashCode())); + } + }; + } + + @Test + public void callingActorServiceWithMapAsync() throws Exception { + new JavaTestKit(system) { + final TestProbe probe = new TestProbe(system); + final EmailServer emailServer = new EmailServer(probe.ref()); + + final ActorRef database = system.actorOf(Props.create(DatabaseService.class, probe.ref()), "db"); + + { + //#save-tweets + final Source akkaTweets = tweets.filter(t -> t.hashtags().contains(AKKA)); + + final RunnableGraph saveTweets = + akkaTweets + .mapAsync(4, tweet -> ask(database, new Save(tweet), 300)) + .to(Sink.ignore()); + //#save-tweets + + saveTweets.run(mat); + + probe.expectMsg("rolandkuhn"); + probe.expectMsg("patriknw"); + probe.expectMsg("bantonsson"); + probe.expectMsg("drewhk"); + probe.expectMsg("ktosopl"); + probe.expectMsg("mmartynas"); + probe.expectMsg("akkateam"); + } + }; + } + + @Test + public void illustrateOrderingAndParallelismOfMapAsync() throws Exception { + new JavaTestKit(system) { + final TestProbe probe = new TestProbe(system); + final EmailServer emailServer = new EmailServer(probe.ref()); + + class MockSystem { + class Println { + public void println(T s) { + if (s.toString().startsWith("after:")) + probe.ref().tell(s, ActorRef.noSender()); + } + } + + public final Println out = new Println(); + } + private final MockSystem System = new MockSystem(); + + { + //#sometimes-slow-mapAsync + final Executor blockingEc = system.dispatchers().lookup("blocking-dispatcher"); + final SometimesSlowService service = new SometimesSlowService(blockingEc); + + final ActorMaterializer mat = ActorMaterializer.create( + ActorMaterializerSettings.create(system).withInputBuffer(4, 4), system); + + Source.from(Arrays.asList("a", "B", "C", "D", "e", "F", "g", "H", "i", "J")) + .map(elem -> { System.out.println("before: " + elem); return elem; }) + .mapAsync(4, service::convert) + .runForeach(elem -> System.out.println("after: " + elem), mat); + //#sometimes-slow-mapAsync + + probe.expectMsg("after: A"); + probe.expectMsg("after: B"); + probe.expectMsg("after: C"); + probe.expectMsg("after: D"); + probe.expectMsg("after: E"); + probe.expectMsg("after: F"); + probe.expectMsg("after: G"); + probe.expectMsg("after: H"); + probe.expectMsg("after: I"); + probe.expectMsg("after: J"); + } + }; + } + + @Test + public void illustrateOrderingAndParallelismOfMapAsyncUnordered() throws Exception { + new JavaTestKit(system) { + final EmailServer emailServer = new EmailServer(getRef()); + + class MockSystem { + class Println { + public void println(T s) { + if (s.toString().startsWith("after:")) + getRef().tell(s, ActorRef.noSender()); + } + } + + public final Println out = new Println(); + } + private final MockSystem System = new MockSystem(); + + { + //#sometimes-slow-mapAsyncUnordered + final Executor blockingEc = system.dispatchers().lookup("blocking-dispatcher"); + final SometimesSlowService service = new SometimesSlowService(blockingEc); + + final ActorMaterializer mat = ActorMaterializer.create( + ActorMaterializerSettings.create(system).withInputBuffer(4, 4), system); + + Source.from(Arrays.asList("a", "B", "C", "D", "e", "F", "g", "H", "i", "J")) + .map(elem -> { System.out.println("before: " + elem); return elem; }) + .mapAsyncUnordered(4, service::convert) + .runForeach(elem -> System.out.println("after: " + elem), mat); + //#sometimes-slow-mapAsyncUnordered + + final Object[] got = receiveN(10); + final Set set = new HashSet<>(Arrays.asList(got)); + + assertTrue(set.contains("after: A")); + assertTrue(set.contains("after: B")); + assertTrue(set.contains("after: C")); + assertTrue(set.contains("after: D")); + assertTrue(set.contains("after: E")); + assertTrue(set.contains("after: F")); + assertTrue(set.contains("after: G")); + assertTrue(set.contains("after: H")); + assertTrue(set.contains("after: I")); + assertTrue(set.contains("after: J")); + } + }; + } + + +} diff --git a/akka-docs/rst/java/code/docs/stream/MigrationsJava.java b/akka-docs/rst/java/code/docs/stream/MigrationsJava.java new file mode 100644 index 0000000000..7ba4fd2516 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/MigrationsJava.java @@ -0,0 +1,32 @@ +/** + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package docs.stream; + +import java.util.stream.Stream; + +import akka.japi.Pair; +import akka.stream.javadsl.*; +//#asPublisher-import +import static akka.stream.javadsl.AsPublisher.*; +//#asPublisher-import + +public class MigrationsJava { + + public static void main(String[] args) { + //#expand-continually + Flow.of(Integer.class).expand(in -> Stream.iterate(in, i -> i).iterator()); + //#expand-continually + //#expand-state + Flow.of(Integer.class).expand(in -> + Stream.iterate(new Pair<>(in, 0), + p -> new Pair<>(in, p.second() + 1)).iterator()); + //#expand-state + + //#asPublisher + Sink.asPublisher(WITH_FANOUT); // instead of Sink.asPublisher(true) + Sink.asPublisher(WITHOUT_FANOUT); // instead of Sink.asPublisher(false) + //#asPublisher + } + +} \ No newline at end of file diff --git a/akka-docs/rst/java/code/docs/stream/RateTransformationDocTest.java b/akka-docs/rst/java/code/docs/stream/RateTransformationDocTest.java new file mode 100644 index 0000000000..99426ad81b --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/RateTransformationDocTest.java @@ -0,0 +1,159 @@ +/* + * Copyright (C) 2015-2016 Typesafe Inc. + */ + +package docs.stream; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.DoubleStream; +import java.util.stream.Stream; + +import akka.NotUsed; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import static org.junit.Assert.*; + +import akka.actor.ActorSystem; +import akka.japi.Pair; +import akka.japi.tuple.Tuple3; +import akka.stream.*; +import akka.stream.javadsl.*; +import akka.stream.testkit.TestPublisher; +import akka.stream.testkit.TestSubscriber; +import akka.stream.testkit.javadsl.TestSink; +import akka.stream.testkit.javadsl.TestSource; +import akka.testkit.JavaTestKit; +import akka.testkit.TestLatch; +import scala.collection.Iterator; +import scala.concurrent.Await; +import scala.concurrent.Future; +import scala.concurrent.duration.Duration; +import scala.util.Random; + +public class RateTransformationDocTest { + + private static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("RateTransformationDocTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + final Random r = new Random(); + + @Test + public void conflateShouldSummarize() throws Exception { + //#conflate-summarize + final Flow, NotUsed> statsFlow = + Flow.of(Double.class) + .conflate(elem -> Collections.singletonList(elem), (acc, elem) -> { + return Stream + .concat(acc.stream(), Collections.singletonList(elem).stream()) + .collect(Collectors.toList()); + }) + .map(s -> { + final Double mean = s.stream().mapToDouble(d -> d).sum() / s.size(); + final DoubleStream se = s.stream().mapToDouble(x -> Math.pow(x - mean, 2)); + final Double stdDev = Math.sqrt(se.sum() / s.size()); + return new Tuple3<>(stdDev, mean, s.size()); + }); + //#conflate-summarize + + final CompletionStage>> fut = Source.repeat(0).map(i -> r.nextGaussian()) + .via(statsFlow) + .grouped(10) + .runWith(Sink.head(), mat); + + fut.toCompletableFuture().get(1, TimeUnit.SECONDS); + } + + @Test + public void conflateShouldSample() throws Exception { + //#conflate-sample + final Double p = 0.01; + final Flow sampleFlow = Flow.of(Double.class) + .conflate(elem -> Collections.singletonList(elem), (acc, elem) -> { + if (r.nextDouble() < p) { + return Stream + .concat(acc.stream(), Collections.singletonList(elem).stream()) + .collect(Collectors.toList()); + } + return acc; + }) + .mapConcat(d -> d); + //#conflate-sample + + final CompletionStage fut = Source.from(new ArrayList(Collections.nCopies(1000, 1.0))) + .via(sampleFlow) + .runWith(Sink.fold(0.0, (agg, next) -> agg + next), mat); + + final Double count = fut.toCompletableFuture().get(1, TimeUnit.SECONDS); + } + + @Test + public void expandShouldRepeatLast() throws Exception { + //#expand-last + final Flow lastFlow = Flow.of(Double.class) + .expand(in -> Stream.iterate(in, i -> i).iterator()); + //#expand-last + + final Pair, CompletionStage>> probeFut = TestSource. probe(system) + .via(lastFlow) + .grouped(10) + .toMat(Sink.head(), Keep.both()) + .run(mat); + + final TestPublisher.Probe probe = probeFut.first(); + final CompletionStage> fut = probeFut.second(); + probe.sendNext(1.0); + final List expanded = fut.toCompletableFuture().get(1, TimeUnit.SECONDS); + assertEquals(expanded.size(), 10); + assertEquals(expanded.stream().mapToDouble(d -> d).sum(), 10, 0.1); + } + + @Test + public void expandShouldTrackDrift() throws Exception { + @SuppressWarnings("unused") + //#expand-drift + final Flow, NotUsed> driftFlow = Flow.of(Double.class) + .expand(d -> Stream.iterate(0, i -> i + 1).map(i -> new Pair<>(d, i)).iterator()); + //#expand-drift + final TestLatch latch = new TestLatch(2, system); + final Flow, NotUsed> realDriftFlow = Flow.of(Double.class) + .expand(d -> { latch.countDown(); return Stream.iterate(0, i -> i + 1).map(i -> new Pair<>(d, i)).iterator(); }); + + final Pair, TestSubscriber.Probe>> pubSub = TestSource. probe(system) + .via(realDriftFlow) + .toMat(TestSink.> probe(system), Keep.both()) + .run(mat); + + final TestPublisher.Probe pub = pubSub.first(); + final TestSubscriber.Probe> sub = pubSub.second(); + + sub.request(1); + pub.sendNext(1.0); + sub.expectNext(new Pair<>(1.0, 0)); + + sub.requestNext(new Pair<>(1.0, 1)); + sub.requestNext(new Pair<>(1.0, 2)); + + pub.sendNext(2.0); + Await.ready(latch, Duration.create(1, TimeUnit.SECONDS)); + sub.requestNext(new Pair<>(2.0, 0)); + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/ReactiveStreamsDocTest.java b/akka-docs/rst/java/code/docs/stream/ReactiveStreamsDocTest.java new file mode 100644 index 0000000000..04bf0f3a3a --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/ReactiveStreamsDocTest.java @@ -0,0 +1,266 @@ +/* + * Copyright (C) 2015-2016 Typesafe Inc. + */ + +package docs.stream; + +import akka.NotUsed; +import akka.actor.ActorRef; +import akka.actor.ActorSystem; +import akka.japi.Pair; +import akka.japi.function.Creator; +import akka.stream.*; +import akka.stream.javadsl.*; +import akka.testkit.JavaTestKit; +import akka.testkit.TestProbe; +import docs.stream.TwitterStreamQuickstartDocTest.Model.Author; +import docs.stream.TwitterStreamQuickstartDocTest.Model.Tweet; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +//#imports +import org.reactivestreams.Publisher; +import org.reactivestreams.Subscriber; +import org.reactivestreams.Processor; +//#imports +import org.reactivestreams.Subscription; + + +import java.lang.Exception; + +import static docs.stream.ReactiveStreamsDocTest.Fixture.Data.authors; +import static docs.stream.TwitterStreamQuickstartDocTest.Model.AKKA; + +public class ReactiveStreamsDocTest { + + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("ReactiveStreamsDocTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + + static class Fixture { + // below class additionally helps with aligning code includes nicely + static class Data { + + static //#authors + final Flow authors = Flow.of(Tweet.class) + .filter(t -> t.hashtags().contains(AKKA)) + .map(t -> t.author); + + //#authors + } + + static interface RS { + //#tweets-publisher + Publisher tweets(); + //#tweets-publisher + + //#author-storage-subscriber + Subscriber storage(); + //#author-storage-subscriber + + //#author-alert-subscriber + Subscriber alert(); + //#author-alert-subscriber + } + } + + final TestProbe storageProbe = TestProbe.apply(system); + final TestProbe alertProbe = TestProbe.apply(system); + + final Fixture.RS rs = new Fixture.RS() { + @Override + public Publisher tweets() { + return TwitterStreamQuickstartDocTest.Model.tweets.runWith(Sink.asPublisher(AsPublisher.WITHOUT_FANOUT), mat); + } + + /** + * This is a minimal version of SubscriberProbe, + * which lives in akka-stream-testkit (test scope) and for + * now wanted to avoid setting up (test -> compile) dependency for maven). + * + * TODO: Once SubscriberProbe is easily used here replace this MPS with it. + */ + class MinimalProbeSubscriber implements Subscriber { + + private final ActorRef ref; + + public MinimalProbeSubscriber(ActorRef ref) { + this.ref = ref; + } + + @Override + public void onSubscribe(Subscription s) { + s.request(Long.MAX_VALUE); + } + + @Override + public void onNext(T t) { + ref.tell(t, ActorRef.noSender()); + } + + @Override + public void onError(Throwable t) { + ref.tell(t, ActorRef.noSender()); + } + + @Override + public void onComplete() { + ref.tell("complete", ActorRef.noSender()); + } + } + + @Override + public Subscriber storage() { + return new MinimalProbeSubscriber<>(storageProbe.ref()); + } + + @Override + public Subscriber alert() { + return new MinimalProbeSubscriber<>(alertProbe.ref()); + } + }; + + + @Test + public void reactiveStreamsPublisherViaFlowToSubscriber() throws Exception { + new JavaTestKit(system) { + final TestProbe probe = new TestProbe(system); + + { + //#connect-all + Source.fromPublisher(rs.tweets()) + .via(authors) + .to(Sink.fromSubscriber(rs.storage())); + //#connect-all + } + }; + } + + @Test + public void flowAsPublisherAndSubscriber() throws Exception { + new JavaTestKit(system) { + final TestProbe probe = new TestProbe(system); + + { + //#flow-publisher-subscriber + final Processor processor = + authors.toProcessor().run(mat); + + + rs.tweets().subscribe(processor); + processor.subscribe(rs.storage()); + //#flow-publisher-subscriber + + assertStorageResult(); + } + }; + } + + @Test + public void sourceAsPublisher() throws Exception { + new JavaTestKit(system) { + final TestProbe probe = new TestProbe(system); + + { + //#source-publisher + final Publisher authorPublisher = + Source.fromPublisher(rs.tweets()) + .via(authors) + .runWith(Sink.asPublisher(AsPublisher.WITHOUT_FANOUT), mat); + + authorPublisher.subscribe(rs.storage()); + //#source-publisher + + assertStorageResult(); + } + }; + } + + @Test + public void sourceAsFanoutPublisher() throws Exception { + new JavaTestKit(system) { + final TestProbe probe = new TestProbe(system); + + { + //#source-fanoutPublisher + final Publisher authorPublisher = + Source.fromPublisher(rs.tweets()) + .via(authors) + .runWith(Sink.asPublisher(AsPublisher.WITH_FANOUT), mat); + + authorPublisher.subscribe(rs.storage()); + authorPublisher.subscribe(rs.alert()); + //#source-fanoutPublisher + + assertStorageResult(); + } + }; + } + + @Test + public void sinkAsSubscriber() throws Exception { + new JavaTestKit(system) { + final TestProbe probe = new TestProbe(system); + + { + //#sink-subscriber + final Subscriber storage = rs.storage(); + + final Subscriber tweetSubscriber = + authors + .to(Sink.fromSubscriber(storage)) + .runWith(Source.asSubscriber(), mat); + + rs.tweets().subscribe(tweetSubscriber); + //#sink-subscriber + + assertStorageResult(); + } + }; + } + + @Test + public void useProcessor() throws Exception { + new JavaTestKit(system) { + { + //#use-processor + // An example Processor factory + final Creator> factory = + new Creator>() { + public Processor create() { + return Flow.of(Integer.class).toProcessor().run(mat); + } + }; + + final Flow flow = Flow.fromProcessor(factory); + + //#use-processor + } + }; + } + + void assertStorageResult() { + storageProbe.expectMsg(new Author("rolandkuhn")); + storageProbe.expectMsg(new Author("patriknw")); + storageProbe.expectMsg(new Author("bantonsson")); + storageProbe.expectMsg(new Author("drewhk")); + storageProbe.expectMsg(new Author("ktosopl")); + storageProbe.expectMsg(new Author("mmartynas")); + storageProbe.expectMsg(new Author("akkateam")); + storageProbe.expectMsg("complete"); + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/SilenceSystemOut.java b/akka-docs/rst/java/code/docs/stream/SilenceSystemOut.java new file mode 100644 index 0000000000..b4b17162ea --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/SilenceSystemOut.java @@ -0,0 +1,64 @@ +package docs.stream; + +import akka.actor.ActorRef; + +import java.util.function.Predicate; + +/** + * Acts as if `System.out.println()` yet swallows all messages. Useful for putting printlines in examples yet without poluting the build with them. + */ +public class SilenceSystemOut { + + private SilenceSystemOut() { + } + + public static System get() { + return new System(new System.Println() { + @Override + public void println(String s) { + // ignore + } + }); + } + + public static System get(ActorRef probe) { + return new System(new System.Println() { + @Override + public void println(String s) { + probe.tell(s, ActorRef.noSender()); + } + }); + } + + public static System get(Predicate filter, ActorRef probe) { + return new System(new System.Println() { + @Override + public void println(String s) { + if (filter.test(s)) + probe.tell(s, ActorRef.noSender()); + } + }); + } + + public static class System { + public final Println out; + + public System(Println out) { + this.out = out; + } + + public static abstract class Println { + public abstract void println(String s); + + public void println(Object s) { + println(s.toString()); + } + + public void printf(String format, Object... args) { + println(String.format(format, args)); + } + } + + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/StreamBuffersRateDocTest.java b/akka-docs/rst/java/code/docs/stream/StreamBuffersRateDocTest.java new file mode 100644 index 0000000000..a82d94e42b --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/StreamBuffersRateDocTest.java @@ -0,0 +1,128 @@ +/** + * Copyright (C) 2014-2016 Typesafe Inc. + */ +package docs.stream; + +import java.util.Arrays; +import java.util.concurrent.TimeUnit; + +import akka.NotUsed; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import scala.concurrent.duration.FiniteDuration; +import akka.actor.ActorSystem; +import akka.actor.Cancellable; +import akka.stream.*; +import akka.stream.javadsl.*; +import akka.testkit.JavaTestKit; + +public class StreamBuffersRateDocTest { + + static class Job {} + + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("StreamBufferRateDocTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + final SilenceSystemOut.System System = SilenceSystemOut.get(); + + @Test + public void demonstratePipelining() { + //#pipelining + Source.from(Arrays.asList(1, 2, 3)) + .map(i -> {System.out.println("A: " + i); return i;}) + .map(i -> {System.out.println("B: " + i); return i;}) + .map(i -> {System.out.println("C: " + i); return i;}) + .runWith(Sink.ignore(), mat); + //#pipelining + } + + @Test + @SuppressWarnings("unused") + public void demonstrateBufferSizes() { + //#materializer-buffer + final Materializer materializer = ActorMaterializer.create( + ActorMaterializerSettings.create(system) + .withInputBuffer(64, 64), system); + //#materializer-buffer + + //#section-buffer + final Flow flow1 = + Flow.of(Integer.class) + .map(elem -> elem * 2) // the buffer size of this map is 1 + .withAttributes(Attributes.inputBuffer(1, 1)); + final Flow flow2 = + flow1.via( + Flow.of(Integer.class) + .map(elem -> elem / 2)); // the buffer size of this map is the default + //#section-buffer + } + + @Test + public void demonstrateBufferAbstractionLeak() { + //#buffering-abstraction-leak + final FiniteDuration oneSecond = + FiniteDuration.create(1, TimeUnit.SECONDS); + final Source msgSource = + Source.tick(oneSecond, oneSecond, "message!"); + final Source tickSource = + Source.tick(oneSecond.mul(3), oneSecond.mul(3), "tick"); + final Flow conflate = + Flow.of(String.class).conflate( + first -> 1, (count, elem) -> count + 1); + + RunnableGraph.fromGraph(GraphDSL.create(b -> { + final FanInShape2 zipper = + b.add(ZipWith.create((String tick, Integer count) -> count)); + b.from(b.add(msgSource)).via(b.add(conflate)).toInlet(zipper.in1()); + b.from(b.add(tickSource)).toInlet(zipper.in0()); + b.from(zipper.out()).to(b.add(Sink.foreach(elem -> System.out.println(elem)))); + return ClosedShape.getInstance(); + })).run(mat); + //#buffering-abstraction-leak + } + + @Test + public void demonstrateExplicitBuffers() { + final Source inboundJobsConnector = Source.empty(); + //#explicit-buffers-backpressure + // Getting a stream of jobs from an imaginary external system as a Source + final Source jobs = inboundJobsConnector; + jobs.buffer(1000, OverflowStrategy.backpressure()); + //#explicit-buffers-backpressure + + //#explicit-buffers-droptail + jobs.buffer(1000, OverflowStrategy.dropTail()); + //#explicit-buffers-droptail + + //#explicit-buffers-dropnew + jobs.buffer(1000, OverflowStrategy.dropNew()); + //#explicit-buffers-dropnew + + //#explicit-buffers-drophead + jobs.buffer(1000, OverflowStrategy.dropHead()); + //#explicit-buffers-drophead + + //#explicit-buffers-dropbuffer + jobs.buffer(1000, OverflowStrategy.dropBuffer()); + //#explicit-buffers-dropbuffer + + //#explicit-buffers-fail + jobs.buffer(1000, OverflowStrategy.fail()); + //#explicit-buffers-fail + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/StreamPartialFlowGraphDocTest.java b/akka-docs/rst/java/code/docs/stream/StreamPartialFlowGraphDocTest.java new file mode 100644 index 0000000000..c1504b65b8 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/StreamPartialFlowGraphDocTest.java @@ -0,0 +1,178 @@ +/** + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package docs.stream; + +import static org.junit.Assert.assertEquals; + +import java.util.*; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; + +import akka.Done; +import akka.NotUsed; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import scala.concurrent.Await; +import scala.concurrent.Future; +import scala.concurrent.duration.Duration; +import akka.actor.*; +import akka.japi.Pair; +import akka.stream.*; +import akka.stream.javadsl.*; +import akka.testkit.JavaTestKit; + +public class StreamPartialFlowGraphDocTest { + + static ActorSystem system; + + + @BeforeClass + public static void setup() { + system = ActorSystem.create("StreamPartialFlowGraphDocTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + @Test + public void demonstrateBuildWithOpenPorts() throws Exception { + //#simple-partial-flow-graph + final Graph, NotUsed> zip = + ZipWith.create((Integer left, Integer right) -> Math.max(left, right)); + + final Graph, NotUsed> pickMaxOfThree = + GraphDSL.create(builder -> { + final FanInShape2 zip1 = builder.add(zip); + final FanInShape2 zip2 = builder.add(zip); + + builder.from(zip1.out()).toInlet(zip2.in0()); + // return the shape, which has three inputs and one output + return new UniformFanInShape(zip2.out(), + new Inlet[] {zip1.in0(), zip1.in1(), zip2.in1()}); + }); + + final Sink> resultSink = Sink.head(); + + final RunnableGraph> g = + RunnableGraph.>fromGraph( + GraphDSL.create(resultSink, (builder, sink) -> { + // import the partial flow graph explicitly + final UniformFanInShape pm = builder.add(pickMaxOfThree); + + builder.from(builder.add(Source.single(1))).toInlet(pm.in(0)); + builder.from(builder.add(Source.single(2))).toInlet(pm.in(1)); + builder.from(builder.add(Source.single(3))).toInlet(pm.in(2)); + builder.from(pm.out()).to(sink); + return ClosedShape.getInstance(); + })); + + final CompletionStage max = g.run(mat); + //#simple-partial-flow-graph + assertEquals(Integer.valueOf(3), max.toCompletableFuture().get(3, TimeUnit.SECONDS)); + } + + //#source-from-partial-flow-graph + // first create an indefinite source of integer numbers + class Ints implements Iterator { + private int next = 0; + @Override + public boolean hasNext() { + return true; + } + @Override + public Integer next() { + return next++; + } + } + //#source-from-partial-flow-graph + + @Test + public void demonstrateBuildSourceFromPartialFlowGraphCreate() throws Exception { + //#source-from-partial-flow-graph + final Source ints = Source.fromIterator(() -> new Ints()); + + final Source, NotUsed> pairs = Source.fromGraph( + GraphDSL.create( + builder -> { + final FanInShape2> zip = + builder.add(Zip.create()); + + builder.from(builder.add(ints.filter(i -> i % 2 == 0))).toInlet(zip.in0()); + builder.from(builder.add(ints.filter(i -> i % 2 == 1))).toInlet(zip.in1()); + + return SourceShape.of(zip.out()); + })); + + final CompletionStage> firstPair = + pairs.runWith(Sink.>head(), mat); + //#source-from-partial-flow-graph + assertEquals(new Pair<>(0, 1), firstPair.toCompletableFuture().get(3, TimeUnit.SECONDS)); + } + + @Test + public void demonstrateBuildFlowFromPartialFlowGraphCreate() throws Exception { + //#flow-from-partial-flow-graph + final Flow, NotUsed> pairs = Flow.fromGraph(GraphDSL.create( + b -> { + final UniformFanOutShape bcast = b.add(Broadcast.create(2)); + final FanInShape2> zip = + b.add(Zip.create()); + + b.from(bcast).toInlet(zip.in0()); + b.from(bcast).via(b.add(Flow.of(Integer.class).map(i -> i.toString()))).toInlet(zip.in1()); + + return FlowShape.of(bcast.in(), zip.out()); + })); + + //#flow-from-partial-flow-graph + final CompletionStage> matSink = + //#flow-from-partial-flow-graph + Source.single(1).via(pairs).runWith(Sink.>head(), mat); + //#flow-from-partial-flow-graph + + assertEquals(new Pair<>(1, "1"), matSink.toCompletableFuture().get(3, TimeUnit.SECONDS)); + } + + + @Test + public void demonstrateBuildSourceWithCombine() throws Exception { + //#source-combine + Source source1 = Source.single(1); + Source source2 = Source.single(2); + + final Source sources = Source.combine(source1, source2, new ArrayList<>(), + i -> Merge.create(i)); + //#source-combine + final CompletionStage result= + //#source-combine + sources.runWith(Sink.fold(0, (a,b) -> a + b), mat); + //#source-combine + + assertEquals(Integer.valueOf(3), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); + } + + @Test + public void demonstrateBuildSinkWithCombine() throws Exception { + final JavaTestKit probe = new JavaTestKit(system); + ActorRef actorRef = probe.getRef(); + + //#sink-combine + Sink sendRemotely = Sink.actorRef(actorRef, "Done"); + Sink> localProcessing = Sink.foreach(a -> { /*do something useful*/ } ); + Sink sinks = Sink.combine(sendRemotely,localProcessing, new ArrayList<>(), a -> Broadcast.create(a)); + + Source.from(Arrays.asList(new Integer[]{0, 1, 2})).runWith(sinks, mat); + //#sink-combine + probe.expectMsgEquals(0); + probe.expectMsgEquals(1); + probe.expectMsgEquals(2); + } +} diff --git a/akka-docs/rst/java/code/docs/stream/StreamTestKitDocTest.java b/akka-docs/rst/java/code/docs/stream/StreamTestKitDocTest.java new file mode 100644 index 0000000000..fe906a9514 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/StreamTestKitDocTest.java @@ -0,0 +1,233 @@ +/** + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package docs.stream; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; + +import akka.NotUsed; +import org.junit.*; +import static org.junit.Assert.assertEquals; + +import akka.actor.*; +import akka.testkit.*; +import akka.japi.Pair; +import akka.stream.*; +import akka.stream.javadsl.*; +import akka.stream.testkit.*; +import akka.stream.testkit.javadsl.*; +import akka.testkit.TestProbe; +import scala.util.*; +import scala.concurrent.Await; +import scala.concurrent.duration.Duration; +import scala.concurrent.duration.FiniteDuration; + + +public class StreamTestKitDocTest { + + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("StreamTestKitDocTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + @Test + public void strictCollection() throws Exception { + //#strict-collection + final Sink> sinkUnderTest = Flow.of(Integer.class) + .map(i -> i * 2) + .toMat(Sink.fold(0, (agg, next) -> agg + next), Keep.right()); + + final CompletionStage future = Source.from(Arrays.asList(1, 2, 3, 4)) + .runWith(sinkUnderTest, mat); + final Integer result = future.toCompletableFuture().get(1, TimeUnit.SECONDS); + assert(result == 20); + //#strict-collection + } + + @Test + public void groupedPartOfInfiniteStream() throws Exception { + //#grouped-infinite + final Source sourceUnderTest = Source.repeat(1) + .map(i -> i * 2); + + final CompletionStage> future = sourceUnderTest + .grouped(10) + .runWith(Sink.head(), mat); + final List result = future.toCompletableFuture().get(1, TimeUnit.SECONDS); + assertEquals(result, Collections.nCopies(10, 2)); + //#grouped-infinite + } + + @Test + public void foldedStream() throws Exception { + //#folded-stream + final Flow flowUnderTest = Flow.of(Integer.class) + .takeWhile(i -> i < 5); + + final CompletionStage future = Source.from(Arrays.asList(1, 2, 3, 4, 5, 6)) + .via(flowUnderTest).runWith(Sink.fold(0, (agg, next) -> agg + next), mat); + final Integer result = future.toCompletableFuture().get(1, TimeUnit.SECONDS); + assert(result == 10); + //#folded-stream + } + + @Test + public void pipeToTestProbe() throws Exception { + //#pipeto-testprobe + final Source, NotUsed> sourceUnderTest = Source + .from(Arrays.asList(1, 2, 3, 4)) + .grouped(2); + + final TestProbe probe = new TestProbe(system); + final CompletionStage>> future = sourceUnderTest + .grouped(2) + .runWith(Sink.head(), mat); + akka.pattern.PatternsCS.pipe(future, system.dispatcher()).to(probe.ref()); + probe.expectMsg(Duration.create(1, TimeUnit.SECONDS), + Arrays.asList(Arrays.asList(1, 2), Arrays.asList(3, 4)) + ); + //#pipeto-testprobe + } + + public enum Tick { TOCK, COMPLETED }; + + @Test + public void sinkActorRef() throws Exception { + //#sink-actorref + final Source sourceUnderTest = Source.tick( + FiniteDuration.create(0, TimeUnit.MILLISECONDS), + FiniteDuration.create(200, TimeUnit.MILLISECONDS), + Tick.TOCK); + + final TestProbe probe = new TestProbe(system); + final Cancellable cancellable = sourceUnderTest + .to(Sink.actorRef(probe.ref(), Tick.COMPLETED)).run(mat); + probe.expectMsg(Duration.create(1, TimeUnit.SECONDS), Tick.TOCK); + probe.expectNoMsg(Duration.create(100, TimeUnit.MILLISECONDS)); + probe.expectMsg(Duration.create(1, TimeUnit.SECONDS), Tick.TOCK); + cancellable.cancel(); + probe.expectMsg(Duration.create(1, TimeUnit.SECONDS), Tick.COMPLETED); + //#sink-actorref + } + + @Test + public void sourceActorRef() throws Exception { + //#source-actorref + final Sink> sinkUnderTest = Flow.of(Integer.class) + .map(i -> i.toString()) + .toMat(Sink.fold("", (agg, next) -> agg + next), Keep.right()); + + final Pair> refAndCompletionStage = + Source.actorRef(8, OverflowStrategy.fail()) + .toMat(sinkUnderTest, Keep.both()) + .run(mat); + final ActorRef ref = refAndCompletionStage.first(); + final CompletionStage future = refAndCompletionStage.second(); + + ref.tell(1, ActorRef.noSender()); + ref.tell(2, ActorRef.noSender()); + ref.tell(3, ActorRef.noSender()); + ref.tell(new akka.actor.Status.Success("done"), ActorRef.noSender()); + + final String result = future.toCompletableFuture().get(1, TimeUnit.SECONDS); + assertEquals(result, "123"); + //#source-actorref + } + + @Test + public void testSinkProbe() { + //#test-sink-probe + final Source sourceUnderTest = Source.from(Arrays.asList(1, 2, 3, 4)) + .filter(elem -> elem % 2 == 0) + .map(elem -> elem * 2); + + sourceUnderTest + .runWith(TestSink.probe(system), mat) + .request(2) + .expectNext(4, 8) + .expectComplete(); + //#test-sink-probe + } + + @Test + public void testSourceProbe() { + //#test-source-probe + final Sink sinkUnderTest = Sink.cancelled(); + + TestSource.probe(system) + .toMat(sinkUnderTest, Keep.left()) + .run(mat) + .expectCancellation(); + //#test-source-probe + } + + @Test + public void injectingFailure() throws Exception { + //#injecting-failure + final Sink> sinkUnderTest = Sink.head(); + + final Pair, CompletionStage> probeAndCompletionStage = + TestSource.probe(system) + .toMat(sinkUnderTest, Keep.both()) + .run(mat); + final TestPublisher.Probe probe = probeAndCompletionStage.first(); + final CompletionStage future = probeAndCompletionStage.second(); + probe.sendError(new Exception("boom")); + + try { + future.toCompletableFuture().get(1, TimeUnit.SECONDS); + assert false; + } catch (ExecutionException ee) { + final Throwable exception = ee.getCause(); + assertEquals(exception.getMessage(), "boom"); + } + //#injecting-failure + } + + @Test + public void testSourceAndTestSink() throws Exception { + //#test-source-and-sink + final Flow flowUnderTest = Flow.of(Integer.class) + .mapAsyncUnordered(2, sleep -> akka.pattern.PatternsCS.after( + Duration.create(10, TimeUnit.MILLISECONDS), + system.scheduler(), + system.dispatcher(), + CompletableFuture.completedFuture(sleep) + )); + + final Pair, TestSubscriber.Probe> pubAndSub = + TestSource.probe(system) + .via(flowUnderTest) + .toMat(TestSink.probe(system), Keep.both()) + .run(mat); + final TestPublisher.Probe pub = pubAndSub.first(); + final TestSubscriber.Probe sub = pubAndSub.second(); + + sub.request(3); + pub.sendNext(3); + pub.sendNext(2); + pub.sendNext(1); + sub.expectNextUnordered(1, 2, 3); + + pub.sendError(new Exception("Power surge in the linear subroutine C-47!")); + final Throwable ex = sub.expectError(); + assert(ex.getMessage().contains("C-47")); + //#test-source-and-sink + } +} diff --git a/akka-docs/rst/java/code/docs/stream/TwitterStreamQuickstartDocTest.java b/akka-docs/rst/java/code/docs/stream/TwitterStreamQuickstartDocTest.java new file mode 100644 index 0000000000..6789e3d572 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/TwitterStreamQuickstartDocTest.java @@ -0,0 +1,362 @@ +/** + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package docs.stream; + +import akka.Done; +import akka.NotUsed; +import akka.actor.ActorSystem; +import akka.dispatch.Foreach; +import akka.japi.JavaPartialFunction; +import akka.testkit.JavaTestKit; +import akka.stream.*; +import akka.stream.javadsl.*; +import docs.stream.TwitterStreamQuickstartDocTest.Model.Author; +import docs.stream.TwitterStreamQuickstartDocTest.Model.Hashtag; +import docs.stream.TwitterStreamQuickstartDocTest.Model.Tweet; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import scala.concurrent.Await; +import scala.concurrent.Future; +import scala.concurrent.duration.FiniteDuration; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Set; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.stream.Collectors; + +import static docs.stream.TwitterStreamQuickstartDocTest.Model.AKKA; +import static docs.stream.TwitterStreamQuickstartDocTest.Model.tweets; + +@SuppressWarnings("unused") +public class TwitterStreamQuickstartDocTest { + + static ActorSystem system; + + + @BeforeClass + public static void setup() { + system = ActorSystem.create("SampleActorTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + static abstract class Model { + //#model + public static class Author { + public final String handle; + + public Author(String handle) { + this.handle = handle; + } + + // ... + + //#model + + @Override + public String toString() { + return "Author(" + handle + ")"; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + Author author = (Author) o; + + if (handle != null ? !handle.equals(author.handle) : author.handle != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + return handle != null ? handle.hashCode() : 0; + } + //#model + } + //#model + + //#model + + public static class Hashtag { + public final String name; + + public Hashtag(String name) { + this.name = name; + } + + // ... + //#model + + @Override + public int hashCode() { + return name.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Hashtag other = (Hashtag) obj; + return name.equals(other.name); + } + + @Override + public String toString() { + return "Hashtag(" + name + ")"; + } + //#model + } + //#model + + //#model + + public static class Tweet { + public final Author author; + public final long timestamp; + public final String body; + + public Tweet(Author author, long timestamp, String body) { + this.author = author; + this.timestamp = timestamp; + this.body = body; + } + + public Set hashtags() { + return Arrays.asList(body.split(" ")).stream() + .filter(a -> a.startsWith("#")) + .map(a -> new Hashtag(a)) + .collect(Collectors.toSet()); + } + + // ... + //#model + + @Override + public String toString() { + return "Tweet(" + author + "," + timestamp + "," + body + ")"; + } + + //#model + } + //#model + + //#model + + public static final Hashtag AKKA = new Hashtag("#akka"); + //#model + + public static final Source tweets = Source.from( + Arrays.asList(new Tweet[] { + new Tweet(new Author("rolandkuhn"), System.currentTimeMillis(), "#akka rocks!"), + new Tweet(new Author("patriknw"), System.currentTimeMillis(), "#akka !"), + new Tweet(new Author("bantonsson"), System.currentTimeMillis(), "#akka !"), + new Tweet(new Author("drewhk"), System.currentTimeMillis(), "#akka !"), + new Tweet(new Author("ktosopl"), System.currentTimeMillis(), "#akka on the rocks!"), + new Tweet(new Author("mmartynas"), System.currentTimeMillis(), "wow #akka !"), + new Tweet(new Author("akkateam"), System.currentTimeMillis(), "#akka rocks!"), + new Tweet(new Author("bananaman"), System.currentTimeMillis(), "#bananas rock!"), + new Tweet(new Author("appleman"), System.currentTimeMillis(), "#apples rock!"), + new Tweet(new Author("drama"), System.currentTimeMillis(), "we compared #apples to #oranges!") + })); + } + + static abstract class Example0 { + //#tweet-source + Source tweets; + //#tweet-source + } + + static abstract class Example1 { + //#first-sample + //#materializer-setup + final ActorSystem system = ActorSystem.create("reactive-tweets"); + final Materializer mat = ActorMaterializer.create(system); + //#first-sample + //#materializer-setup + } + + static class Example2 { + public void run(final Materializer mat) throws TimeoutException, InterruptedException, ExecutionException { + //#backpressure-by-readline + final CompletionStage completion = + Source.from(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)) + .map(i -> { System.out.println("map => " + i); return i; }) + .runForeach(i -> System.console().readLine("Element = %s continue reading? [press enter]\n", i), mat); + + completion.toCompletableFuture().get(1, TimeUnit.SECONDS); + //#backpressure-by-readline + } + } + + + final Materializer mat = ActorMaterializer.create(system); + + @Test + public void demonstrateFilterAndMap() { + final SilenceSystemOut.System System = SilenceSystemOut.get(); + + //#first-sample + + //#authors-filter-map + final Source authors = + tweets + .filter(t -> t.hashtags().contains(AKKA)) + .map(t -> t.author); + //#first-sample + //#authors-filter-map + + new Object() { + //#authors-collect + JavaPartialFunction collectFunction = + new JavaPartialFunction() { + public Author apply(Tweet t, boolean isCheck) { + if (t.hashtags().contains(AKKA)) { + if (isCheck) return null; // to spare the expensive or side-effecting code + return t.author; + } else { + throw noMatch(); + } + } + }; + + final Source authors = + tweets.collect(collectFunction); + //#authors-collect + }; + + //#first-sample + + //#authors-foreachsink-println + authors.runWith(Sink.foreach(a -> System.out.println(a)), mat); + //#first-sample + //#authors-foreachsink-println + + //#authors-foreach-println + authors.runForeach(a -> System.out.println(a), mat); + //#authors-foreach-println + } + + @Test + public void demonstrateMapConcat() { + //#hashtags-mapConcat + final Source hashtags = + tweets.mapConcat(t -> new ArrayList(t.hashtags())); + //#hashtags-mapConcat + } + + static abstract class HiddenDefinitions { + //#flow-graph-broadcast + Sink writeAuthors; + Sink writeHashtags; + //#flow-graph-broadcast + } + + @Test + public void demonstrateBroadcast() { + final Sink> writeAuthors = Sink.ignore(); + final Sink> writeHashtags = Sink.ignore(); + + //#flow-graph-broadcast + RunnableGraph.fromGraph(GraphDSL.create(b -> { + final UniformFanOutShape bcast = b.add(Broadcast.create(2)); + final FlowShape toAuthor = + b.add(Flow.of(Tweet.class).map(t -> t.author)); + final FlowShape toTags = + b.add(Flow.of(Tweet.class).mapConcat(t -> new ArrayList(t.hashtags()))); + final SinkShape authors = b.add(writeAuthors); + final SinkShape hashtags = b.add(writeHashtags); + + b.from(b.add(tweets)).viaFanOut(bcast).via(toAuthor).to(authors); + b.from(bcast).via(toTags).to(hashtags); + return ClosedShape.getInstance(); + })).run(mat); + //#flow-graph-broadcast + } + + long slowComputation(Tweet t) { + try { + // act as if performing some heavy computation + Thread.sleep(500); + } catch (InterruptedException e) {} + return 42; + } + + @Test + public void demonstrateSlowProcessing() { + //#tweets-slow-consumption-dropHead + tweets + .buffer(10, OverflowStrategy.dropHead()) + .map(t -> slowComputation(t)) + .runWith(Sink.ignore(), mat); + //#tweets-slow-consumption-dropHead + } + + @Test + public void demonstrateCountOnFiniteStream() { + //#tweets-fold-count + final Sink> sumSink = + Sink.fold(0, (acc, elem) -> acc + elem); + + final RunnableGraph> counter = + tweets.map(t -> 1).toMat(sumSink, Keep.right()); + + final CompletionStage sum = counter.run(mat); + + sum.thenAcceptAsync(c -> System.out.println("Total tweets processed: " + c), + system.dispatcher()); + //#tweets-fold-count + + new Object() { + //#tweets-fold-count-oneline + final CompletionStage sum = tweets.map(t -> 1).runWith(sumSink, mat); + //#tweets-fold-count-oneline + }; + } + + @Test + public void demonstrateMaterializeMultipleTimes() { + final Source tweetsInMinuteFromNow = tweets; // not really in second, just acting as if + + //#tweets-runnable-flow-materialized-twice + final Sink> sumSink = + Sink.fold(0, (acc, elem) -> acc + elem); + final RunnableGraph> counterRunnableGraph = + tweetsInMinuteFromNow + .filter(t -> t.hashtags().contains(AKKA)) + .map(t -> 1) + .toMat(sumSink, Keep.right()); + + // materialize the stream once in the morning + final CompletionStage morningTweetsCount = counterRunnableGraph.run(mat); + // and once in the evening, reusing the blueprint + final CompletionStage eveningTweetsCount = counterRunnableGraph.run(mat); + //#tweets-runnable-flow-materialized-twice + + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/io/StreamFileDocTest.java b/akka-docs/rst/java/code/docs/stream/io/StreamFileDocTest.java new file mode 100644 index 0000000000..1febdd92aa --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/io/StreamFileDocTest.java @@ -0,0 +1,86 @@ +/** + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package docs.stream.io; + +import java.io.File; +import java.io.IOException; +import java.util.concurrent.CompletionStage; + +import akka.Done; +import akka.actor.ActorSystem; +import akka.stream.ActorAttributes; +import akka.stream.io.IOResult; +import akka.stream.javadsl.Sink; +import akka.stream.javadsl.FileIO; +import docs.stream.SilenceSystemOut; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import scala.concurrent.Future; + +import akka.stream.*; +import akka.testkit.JavaTestKit; +import akka.util.ByteString; + +public class StreamFileDocTest { + + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("StreamFileDocTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + final SilenceSystemOut.System System = SilenceSystemOut.get(); + + { + //#file-source + final File file = new File("example.csv"); + //#file-source + } + + @Test + public void demonstrateMaterializingBytesWritten() throws IOException { + final File file = File.createTempFile(getClass().getName(), ".tmp"); + + try { + //#file-source + Sink> printlnSink = + Sink. foreach(chunk -> System.out.println(chunk.utf8String())); + + CompletionStage ioResult = + FileIO.fromFile(file) + .to(printlnSink) + .run(mat); + //#file-source + } finally { + file.delete(); + } + } + + @Test + public void demonstrateSettingDispatchersInCode() throws IOException { + final File file = File.createTempFile(getClass().getName(), ".tmp"); + + try { + Sink> fileSink = + //#custom-dispatcher-code + FileIO.toFile(file) + .withAttributes(ActorAttributes.dispatcher("custom-blocking-io-dispatcher")); + //#custom-dispatcher-code + } finally { + file.delete(); + } + } + + +} diff --git a/akka-docs/rst/java/code/docs/stream/io/StreamTcpDocTest.java b/akka-docs/rst/java/code/docs/stream/io/StreamTcpDocTest.java new file mode 100644 index 0000000000..c9367fdf1e --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/io/StreamTcpDocTest.java @@ -0,0 +1,185 @@ +/** + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package docs.stream.io; + +import java.util.concurrent.CompletionStage; +import java.util.concurrent.ConcurrentLinkedQueue; + +import akka.NotUsed; +import akka.stream.io.Framing; +import docs.stream.SilenceSystemOut; +import java.net.InetSocketAddress; + +import docs.util.SocketUtils; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import scala.concurrent.Future; + +import akka.actor.ActorSystem; +import akka.stream.*; +import akka.stream.javadsl.*; +import akka.stream.javadsl.Tcp.*; +import akka.stream.stage.*; +import akka.testkit.JavaTestKit; +import akka.testkit.TestProbe; +import akka.util.ByteString; + +public class StreamTcpDocTest { + + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("StreamTcpDocTest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + final SilenceSystemOut.System System = SilenceSystemOut.get(); + + private final ConcurrentLinkedQueue input = new ConcurrentLinkedQueue(); + { + input.add("Hello world"); + input.add("What a lovely day"); + } + + private String readLine(String prompt) { + String s = input.poll(); + return (s == null ? "q": s); + } + + @Test + public void demonstrateSimpleServerConnection() { + { + //#echo-server-simple-bind + // IncomingConnection and ServerBinding imported from Tcp + final Source> connections = + Tcp.get(system).bind("127.0.0.1", 8889); + //#echo-server-simple-bind + } + { + + final InetSocketAddress localhost = SocketUtils.temporaryServerAddress(); + final Source> connections = + Tcp.get(system).bind(localhost.getHostName(), localhost.getPort()); // TODO getHostString in Java7 + + //#echo-server-simple-handle + connections.runForeach(connection -> { + System.out.println("New connection from: " + connection.remoteAddress()); + + final Flow echo = Flow.of(ByteString.class) + .via(Framing.delimiter(ByteString.fromString("\n"), 256, false)) + .map(bytes -> bytes.utf8String()) + .map(s -> s + "!!!\n") + .map(s -> ByteString.fromString(s)); + + connection.handleWith(echo, mat); + }, mat); + //#echo-server-simple-handle + } + } + + @Test + public void actuallyWorkingClientServerApp() { + + final InetSocketAddress localhost = SocketUtils.temporaryServerAddress(); + + final TestProbe serverProbe = new TestProbe(system); + + final Source> connections = + Tcp.get(system).bind(localhost.getHostName(), localhost.getPort()); // TODO getHostString in Java7 + //#welcome-banner-chat-server + connections.runForeach(connection -> { + // server logic, parses incoming commands + final PushStage commandParser = new PushStage() { + @Override public SyncDirective onPush(String elem, Context ctx) { + if (elem.equals("BYE")) + return ctx.finish(); + else + return ctx.push(elem + "!"); + } + }; + + final String welcomeMsg = "Welcome to: " + connection.localAddress() + + " you are: " + connection.remoteAddress() + "!\n"; + + final Source welcome = + Source.single(ByteString.fromString(welcomeMsg)); + final Flow echoFlow = + Flow.of(ByteString.class) + .via(Framing.delimiter(ByteString.fromString("\n"), 256, false)) + .map(bytes -> bytes.utf8String()) + //#welcome-banner-chat-server + .map(command -> { + serverProbe.ref().tell(command, null); + return command; + }) + //#welcome-banner-chat-server + .transform(() -> commandParser) + .map(s -> s + "\n") + .map(s -> ByteString.fromString(s)); + + final Flow serverLogic = + Flow.fromGraph(GraphDSL.create(builder -> { + final UniformFanInShape concat = + builder.add(Concat.create()); + final FlowShape echo = builder.add(echoFlow); + + builder + .from(builder.add(welcome)).toFanIn(concat) + .from(echo).toFanIn(concat); + + return FlowShape.of(echo.in(), concat.out()); + })); + + connection.handleWith(serverLogic, mat); + }, mat); + + //#welcome-banner-chat-server + + { + //#repl-client + final Flow> connection = + Tcp.get(system).outgoingConnection("127.0.0.1", 8889); + //#repl-client + } + + { + final Flow> connection = + Tcp.get(system).outgoingConnection(localhost.getHostString(), localhost.getPort()); + //#repl-client + + final PushStage replParser = new PushStage() { + @Override public SyncDirective onPush(String elem, Context ctx) { + if (elem.equals("q")) + return ctx.pushAndFinish(ByteString.fromString("BYE\n")); + else + return ctx.push(ByteString.fromString(elem + "\n")); + } + }; + + final Flow repl = Flow.of(ByteString.class) + .via(Framing.delimiter(ByteString.fromString("\n"), 256, false)) + .map(bytes -> bytes.utf8String()) + .map(text -> {System.out.println("Server: " + text); return "next";}) + .map(elem -> readLine("> ")) + .transform(() -> replParser); + + connection.join(repl).run(mat); + //#repl-client + } + + serverProbe.expectMsg("Hello world"); + serverProbe.expectMsg("What a lovely day"); + serverProbe.expectMsg("BYE"); + } + +} \ No newline at end of file diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeByteStrings.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeByteStrings.java new file mode 100644 index 0000000000..14b278fea7 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeByteStrings.java @@ -0,0 +1,202 @@ +/** + * Copyright (C) 2015-2016 Typesafe + */ +package docs.stream.javadsl.cookbook; + +import akka.NotUsed; +import akka.actor.ActorSystem; +import akka.stream.ActorMaterializer; +import akka.stream.Materializer; +import akka.stream.javadsl.Flow; +import akka.stream.javadsl.Sink; +import akka.stream.javadsl.Source; +import akka.stream.stage.Context; +import akka.stream.stage.PushPullStage; +import akka.stream.stage.PushStage; +import akka.stream.stage.SyncDirective; +import akka.testkit.JavaTestKit; +import akka.util.ByteString; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import scala.Tuple2; + +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +public class RecipeByteStrings extends RecipeTest { + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("RecipeByteStrings"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + final Source rawBytes = Source.from(Arrays.asList( + ByteString.fromArray(new byte[] { 1, 2 }), + ByteString.fromArray(new byte[] { 3 }), + ByteString.fromArray(new byte[] { 4, 5, 6 }), + ByteString.fromArray(new byte[] { 7, 8, 9 }))); + + @Test + public void chunker() throws Exception { + new JavaTestKit(system) { + final int CHUNK_LIMIT = 2; + + //#bytestring-chunker + class Chunker extends PushPullStage { + private final int chunkSize; + private ByteString buffer = ByteString.empty(); + + public Chunker(int chunkSize) { + this.chunkSize = chunkSize; + } + + @Override + public SyncDirective onPush(ByteString elem, Context ctx) { + buffer = buffer.concat(elem); + return emitChunkOrPull(ctx); + } + + @Override + public SyncDirective onPull(Context ctx) { + return emitChunkOrPull(ctx); + } + + public SyncDirective emitChunkOrPull(Context ctx) { + if (buffer.isEmpty()) { + return ctx.pull(); + } else { + Tuple2 split = buffer.splitAt(chunkSize); + ByteString emit = split._1(); + buffer = split._2(); + return ctx.push(emit); + } + } + } + //#bytestring-chunker + + { + //#bytestring-chunker2 + Source chunksStream = + rawBytes.transform(() -> new Chunker(CHUNK_LIMIT)); + //#bytestring-chunker2 + + CompletionStage> chunksFuture = chunksStream.grouped(10).runWith(Sink.head(), mat); + + List chunks = chunksFuture.toCompletableFuture().get(3, TimeUnit.SECONDS); + + for (ByteString chunk : chunks) { + assertTrue(chunk.size() <= 2); + } + + ByteString sum = ByteString.empty(); + for (ByteString chunk : chunks) { + sum = sum.concat(chunk); + } + assertEquals(sum, ByteString.fromArray(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 })); + } + + }; + } + + @Test + public void limiterShouldWork() throws Exception { + new JavaTestKit(system) { + final int SIZE_LIMIT = 9; + + //#bytes-limiter + class ByteLimiter extends PushStage { + final long maximumBytes; + private int count = 0; + + public ByteLimiter(long maximumBytes) { + this.maximumBytes = maximumBytes; + } + + @Override + public SyncDirective onPush(ByteString chunk, Context ctx) { + count += chunk.size(); + if (count > maximumBytes) { + return ctx.fail(new IllegalStateException("Too much bytes")); + } else { + return ctx.push(chunk); + } + } + } + //#bytes-limiter + + { + //#bytes-limiter2 + Flow limiter = + Flow.of(ByteString.class).transform(() -> new ByteLimiter(SIZE_LIMIT)); + //#bytes-limiter2 + + final Source bytes1 = Source.from(Arrays.asList( + ByteString.fromArray(new byte[] { 1, 2 }), + ByteString.fromArray(new byte[] { 3 }), + ByteString.fromArray(new byte[] { 4, 5, 6 }), + ByteString.fromArray(new byte[] { 7, 8, 9 }))); + + final Source bytes2 = Source.from(Arrays.asList( + ByteString.fromArray(new byte[] { 1, 2 }), + ByteString.fromArray(new byte[] { 3 }), + ByteString.fromArray(new byte[] { 4, 5, 6 }), + ByteString.fromArray(new byte[] { 7, 8, 9, 10 }))); + + List got = bytes1.via(limiter).grouped(10).runWith(Sink.head(), mat).toCompletableFuture().get(3, TimeUnit.SECONDS); + ByteString acc = ByteString.empty(); + for (ByteString b : got) { + acc = acc.concat(b); + } + assertEquals(acc, ByteString.fromArray(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 })); + + boolean thrown = false; + try { + bytes2.via(limiter).grouped(10).runWith(Sink.head(), mat).toCompletableFuture().get(3, TimeUnit.SECONDS); + } catch (IllegalStateException ex) { + thrown = true; + } + + assertTrue("Expected IllegalStateException to be thrown", thrown); + } + }; + } + + @Test + public void compacting() throws Exception { + new JavaTestKit(system) { + { + final Source rawBytes = Source.from(Arrays.asList( + ByteString.fromArray(new byte[] { 1, 2 }), + ByteString.fromArray(new byte[] { 3 }), + ByteString.fromArray(new byte[] { 4, 5, 6 }), + ByteString.fromArray(new byte[] { 7, 8, 9 }))); + + //#compacting-bytestrings + Source compacted = rawBytes.map(bs -> bs.compact()); + //#compacting-bytestrings + + List got = compacted.grouped(10).runWith(Sink.head(), mat).toCompletableFuture().get(3, TimeUnit.SECONDS); + + for (ByteString byteString : got) { + assertTrue(byteString.isCompact()); + } + } + }; + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeDigest.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeDigest.java new file mode 100644 index 0000000000..0a9c606f6c --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeDigest.java @@ -0,0 +1,103 @@ +/** + * Copyright (C) 2015-2016 Typesafe + */ +package docs.stream.javadsl.cookbook; + +import akka.NotUsed; +import akka.actor.ActorSystem; +import akka.stream.ActorMaterializer; +import akka.stream.Materializer; +import akka.stream.javadsl.Sink; +import akka.stream.javadsl.Source; +import akka.stream.stage.Context; +import akka.stream.stage.PushPullStage; +import akka.stream.stage.SyncDirective; +import akka.stream.stage.TerminationDirective; +import akka.testkit.JavaTestKit; +import akka.util.ByteString; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.Arrays; +import java.util.concurrent.TimeUnit; + +import static org.junit.Assert.assertEquals; + +public class RecipeDigest extends RecipeTest { + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("RecipeDigest"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + @Test + public void work() throws Exception { + new JavaTestKit(system) { + //#calculating-digest + public PushPullStage digestCalculator(String algorithm) + throws NoSuchAlgorithmException { + return new PushPullStage() { + final MessageDigest digest = MessageDigest.getInstance(algorithm); + + @Override + public SyncDirective onPush(ByteString chunk, Context ctx) { + digest.update(chunk.toArray()); + return ctx.pull(); + } + + @Override + public SyncDirective onPull(Context ctx) { + if (ctx.isFinishing()) { + return ctx.pushAndFinish(ByteString.fromArray(digest.digest())); + } else { + return ctx.pull(); + } + } + + @Override + public TerminationDirective onUpstreamFinish(Context ctx) { + // If the stream is finished, we need to emit the last element in the onPull block. + // It is not allowed to directly emit elements from a termination block + // (onUpstreamFinish or onUpstreamFailure) + return ctx.absorbTermination(); + } + }; + } + //#calculating-digest + + { + Source data = Source.from(Arrays.asList( + ByteString.fromString("abcdbcdecdef"), + ByteString.fromString("defgefghfghighijhijkijkljklmklmnlmnomnopnopq"))); + + //#calculating-digest2 + final Source digest = data + .transform(() -> digestCalculator("SHA-256")); + //#calculating-digest2 + + ByteString got = digest.runWith(Sink.head(), mat).toCompletableFuture().get(3, TimeUnit.SECONDS); + assertEquals(ByteString.fromInts( + 0x24, 0x8d, 0x6a, 0x61, + 0xd2, 0x06, 0x38, 0xb8, + 0xe5, 0xc0, 0x26, 0x93, + 0x0c, 0x3e, 0x60, 0x39, + 0xa3, 0x3c, 0xe4, 0x59, + 0x64, 0xff, 0x21, 0x67, + 0xf6, 0xec, 0xed, 0xd4, + 0x19, 0xdb, 0x06, 0xc1), got); + } + }; + } +} diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeDroppyBroadcast.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeDroppyBroadcast.java new file mode 100644 index 0000000000..e8824f28f8 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeDroppyBroadcast.java @@ -0,0 +1,77 @@ +/** + * Copyright (C) 2015-2016 Typesafe + */ +package docs.stream.javadsl.cookbook; + +import akka.Done; +import akka.NotUsed; +import akka.actor.ActorSystem; +import akka.stream.*; +import akka.stream.javadsl.*; +import akka.testkit.JavaTestKit; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import scala.concurrent.Future; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CompletionStage; + +public class RecipeDroppyBroadcast extends RecipeTest { + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("RecipeLoggingElements"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + @Test + public void work() throws Exception { + new JavaTestKit(system) { + //#droppy-bcast + // Makes a sink drop elements if too slow + public Sink> droppySink(Sink> sink, int size) { + return Flow. create() + .buffer(size, OverflowStrategy.dropHead()) + .toMat(sink, Keep.right()); + } + //#droppy-bcast + + { + final List nums = new ArrayList<>(); + for (int i = 0; i < 100; i++) { + nums.add(i + 1); + } + + final Sink> mySink1 = Sink.ignore(); + final Sink> mySink2 = Sink.ignore(); + final Sink> mySink3 = Sink.ignore(); + + final Source myData = Source.from(nums); + + //#droppy-bcast2 + RunnableGraph.fromGraph(GraphDSL.create(builder -> { + final int outputCount = 3; + final UniformFanOutShape bcast = + builder.add(Broadcast.create(outputCount)); + builder.from(builder.add(myData)).toFanOut(bcast); + builder.from(bcast).to(builder.add(droppySink(mySink1, 10))); + builder.from(bcast).to(builder.add(droppySink(mySink2, 10))); + builder.from(bcast).to(builder.add(droppySink(mySink3, 10))); + return ClosedShape.getInstance(); + })); + //#droppy-bcast2 + } + }; + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeFlattenList.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeFlattenList.java new file mode 100644 index 0000000000..c04ed24cb7 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeFlattenList.java @@ -0,0 +1,59 @@ +/** + * Copyright (C) 2015-2016 Typesafe + */ +package docs.stream.javadsl.cookbook; + +import akka.NotUsed; +import akka.actor.ActorSystem; +import akka.stream.ActorMaterializer; +import akka.stream.Materializer; +import akka.stream.javadsl.Sink; +import akka.stream.javadsl.Source; +import akka.testkit.JavaTestKit; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import static org.junit.Assert.assertEquals; + +public class RecipeFlattenList extends RecipeTest { + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("RecipeFlattenList"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + @Test + public void workWithMapConcat() throws Exception { + new JavaTestKit(system) { + { + Source, NotUsed> someDataSource = Source + .from(Arrays.asList(Arrays.asList(new Message("1")), Arrays.asList(new Message("2"), new Message("3")))); + + //#flattening-lists + Source, NotUsed> myData = someDataSource; + Source flattened = myData.mapConcat(i -> i); + //#flattening-lists + + List got = flattened.grouped(10).runWith(Sink.head(), mat).toCompletableFuture().get(1, TimeUnit.SECONDS); + assertEquals(got.get(0), new Message("1")); + assertEquals(got.get(1), new Message("2")); + assertEquals(got.get(2), new Message("3")); + } + }; + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeGlobalRateLimit.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeGlobalRateLimit.java new file mode 100644 index 0000000000..13655198ef --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeGlobalRateLimit.java @@ -0,0 +1,234 @@ +/** + * Copyright (C) 2015-2016 Typesafe + */ +package docs.stream.javadsl.cookbook; + +import akka.NotUsed; +import akka.actor.*; +import akka.dispatch.Mapper; +import akka.japi.pf.ReceiveBuilder; +import akka.pattern.PatternsCS; +import akka.stream.*; +import akka.stream.javadsl.*; +import akka.stream.testkit.TestSubscriber; +import akka.stream.testkit.javadsl.TestSink; +import akka.testkit.JavaTestKit; +import akka.util.Timeout; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import scala.PartialFunction; +import scala.concurrent.Future; +import scala.concurrent.duration.Duration; +import scala.concurrent.duration.FiniteDuration; +import scala.runtime.BoxedUnit; + + +import java.util.*; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; + +import static junit.framework.TestCase.assertTrue; + +public class RecipeGlobalRateLimit extends RecipeTest { + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("RecipeGlobalRateLimit"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + static + //#global-limiter-actor + public class Limiter extends AbstractActor { + + public static class WantToPass {} + public static final WantToPass WANT_TO_PASS = new WantToPass(); + + public static class MayPass {} + public static final MayPass MAY_PASS = new MayPass(); + + public static class ReplenishTokens {} + public static final ReplenishTokens REPLENISH_TOKENS = new ReplenishTokens(); + + private final int maxAvailableTokens; + private final FiniteDuration tokenRefreshPeriod; + private final int tokenRefreshAmount; + + private final List waitQueue = new ArrayList<>(); + private final Cancellable replenishTimer; + + private int permitTokens; + + public static Props props(int maxAvailableTokens, FiniteDuration tokenRefreshPeriod, + int tokenRefreshAmount) { + return Props.create(Limiter.class, maxAvailableTokens, tokenRefreshPeriod, + tokenRefreshAmount); + } + + private Limiter(int maxAvailableTokens, FiniteDuration tokenRefreshPeriod, + int tokenRefreshAmount) { + this.maxAvailableTokens = maxAvailableTokens; + this.tokenRefreshPeriod = tokenRefreshPeriod; + this.tokenRefreshAmount = tokenRefreshAmount; + this.permitTokens = maxAvailableTokens; + + this.replenishTimer = system.scheduler().schedule( + this.tokenRefreshPeriod, + this.tokenRefreshPeriod, + self(), + REPLENISH_TOKENS, + context().system().dispatcher(), + self()); + + receive(open()); + } + + PartialFunction open() { + return ReceiveBuilder + .match(ReplenishTokens.class, rt -> { + permitTokens = Math.min(permitTokens + tokenRefreshAmount, maxAvailableTokens); + }) + .match(WantToPass.class, wtp -> { + permitTokens -= 1; + sender().tell(MAY_PASS, self()); + if (permitTokens == 0) { + context().become(closed()); + } + }).build(); + } + + PartialFunction closed() { + return ReceiveBuilder + .match(ReplenishTokens.class, rt -> { + permitTokens = Math.min(permitTokens + tokenRefreshAmount, maxAvailableTokens); + releaseWaiting(); + }) + .match(WantToPass.class, wtp -> { + waitQueue.add(sender()); + }) + .build(); + } + + private void releaseWaiting() { + final List toBeReleased = new ArrayList<>(permitTokens); + for (int i = 0; i < permitTokens && i < waitQueue.size(); i++) { + toBeReleased.add(waitQueue.remove(i)); + } + + permitTokens -= toBeReleased.size(); + toBeReleased.stream().forEach(ref -> ref.tell(MAY_PASS, self())); + if (permitTokens > 0) { + context().become(open()); + } + } + + @Override + public void postStop() { + replenishTimer.cancel(); + waitQueue.stream().forEach(ref -> { + ref.tell(new Status.Failure(new IllegalStateException("limiter stopped")), self()); + }); + } + } + //#global-limiter-actor + + @Test + public void work() throws Exception { + new JavaTestKit(system) { + //#global-limiter-flow + public Flow limitGlobal(ActorRef limiter, FiniteDuration maxAllowedWait) { + final int parallelism = 4; + final Flow f = Flow.create(); + + return f.mapAsync(parallelism, element -> { + final Timeout triggerTimeout = new Timeout(maxAllowedWait); + final CompletionStage limiterTriggerFuture = + PatternsCS.ask(limiter, Limiter.WANT_TO_PASS, triggerTimeout); + return limiterTriggerFuture.thenApplyAsync(response -> element, system.dispatcher()); + }); + } + //#global-limiter-flow + + { + // Use a large period and emulate the timer by hand instead + ActorRef limiter = system.actorOf(Limiter.props(2, new FiniteDuration(100, TimeUnit.DAYS), 1), "limiter"); + + final Iterator e1 = new Iterator() { + @Override + public boolean hasNext() { + return true; + } + + @Override + public String next() { + return "E1"; + } + }; + final Iterator e2 = new Iterator() { + @Override + public boolean hasNext() { + return true; + } + + @Override + public String next() { + return "E2"; + } + }; + + final FiniteDuration twoSeconds = Duration.create(2, TimeUnit.SECONDS); + + final Sink> sink = TestSink.probe(system); + final TestSubscriber.Probe probe = + RunnableGraph.>fromGraph( + GraphDSL.create(sink, (builder, s) -> { + final int inputPorts = 2; + final UniformFanInShape merge = builder.add(Merge.create(inputPorts)); + + final SourceShape source1 = + builder.add(Source.fromIterator(() -> e1).via(limitGlobal(limiter, twoSeconds))); + final SourceShape source2 = + builder.add(Source.fromIterator(() -> e2).via(limitGlobal(limiter, twoSeconds))); + + builder.from(source1).toFanIn(merge); + builder.from(source2).toFanIn(merge); + builder.from(merge).to(s); + return ClosedShape.getInstance(); + }) + ).run(mat); + + probe.expectSubscription().request(1000); + + FiniteDuration fiveHundredMillis = FiniteDuration.create(500, TimeUnit.MILLISECONDS); + + assertTrue(probe.expectNext().startsWith("E")); + assertTrue(probe.expectNext().startsWith("E")); + probe.expectNoMsg(fiveHundredMillis); + + limiter.tell(Limiter.REPLENISH_TOKENS, getTestActor()); + assertTrue(probe.expectNext().startsWith("E")); + probe.expectNoMsg(fiveHundredMillis); + + final Set resultSet = new HashSet<>(); + for (int i = 0; i < 100; i++) { + limiter.tell(Limiter.REPLENISH_TOKENS, getTestActor()); + resultSet.add(probe.expectNext()); + } + + assertTrue(resultSet.contains("E1")); + assertTrue(resultSet.contains("E2")); + + probe.expectError(); + } + }; + } +} diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeHold.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeHold.java new file mode 100644 index 0000000000..dd851b4e11 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeHold.java @@ -0,0 +1,154 @@ +/** + * Copyright (C) 2015-2016 Typesafe + */ +package docs.stream.javadsl.cookbook; + +import akka.actor.ActorSystem; +import akka.japi.Pair; +import akka.stream.ActorMaterializer; +import akka.stream.Materializer; +import akka.stream.javadsl.Keep; +import akka.stream.javadsl.Sink; +import akka.stream.javadsl.Source; +import akka.stream.stage.DetachedContext; +import akka.stream.stage.DetachedStage; +import akka.stream.stage.DownstreamDirective; +import akka.stream.stage.UpstreamDirective; +import akka.stream.testkit.TestPublisher; +import akka.stream.testkit.TestSubscriber; +import akka.stream.testkit.javadsl.TestSink; +import akka.stream.testkit.javadsl.TestSource; +import akka.testkit.JavaTestKit; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import scala.concurrent.duration.FiniteDuration; + +import java.util.concurrent.TimeUnit; + +public class RecipeHold extends RecipeTest { + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("RecipeMultiGroupBy"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + //#hold-version-1 + class HoldWithInitial extends DetachedStage { + private T currentValue; + + public HoldWithInitial(T initial) { + currentValue = initial; + } + + @Override + public UpstreamDirective onPush(T elem, DetachedContext ctx) { + currentValue = elem; + return ctx.pull(); + } + + @Override + public DownstreamDirective onPull(DetachedContext ctx) { + return ctx.push(currentValue); + } + } + //#hold-version-1 + + //#hold-version-2 + class HoldWithWait extends DetachedStage { + private T currentValue = null; + private boolean waitingFirstValue = true; + + @Override + public UpstreamDirective onPush(T elem, DetachedContext ctx) { + currentValue = elem; + waitingFirstValue = false; + if (ctx.isHoldingDownstream()) { + return ctx.pushAndPull(currentValue); + } else { + return ctx.pull(); + } + } + + @Override + public DownstreamDirective onPull(DetachedContext ctx) { + if (waitingFirstValue) { + return ctx.holdDownstream(); + } else { + return ctx.push(currentValue); + } + } + } + //#hold-version-2 + + @Test + public void workForVersion1() throws Exception { + new JavaTestKit(system) { + { + final Source> source = TestSource.probe(system); + final Sink> sink = TestSink.probe(system); + + Pair, TestSubscriber.Probe> pubSub = + source.transform(() -> new HoldWithInitial<>(0)).toMat(sink, Keep.both()).run(mat); + TestPublisher.Probe pub = pubSub.first(); + TestSubscriber.Probe sub = pubSub.second(); + + sub.requestNext(0); + sub.requestNext(0); + + pub.sendNext(1); + pub.sendNext(2); + + sub.request(2); + sub.expectNext(2, 2); + + pub.sendComplete(); + sub.request(1); + sub.expectComplete(); + } + }; + } + + @Test + public void workForVersion2() throws Exception { + new JavaTestKit(system) { + { + final Source> source = TestSource.probe(system); + final Sink> sink = TestSink.probe(system); + + Pair, TestSubscriber.Probe> pubSub = + source.transform(() -> new HoldWithWait<>()).toMat(sink, Keep.both()).run(mat); + TestPublisher.Probe pub = pubSub.first(); + TestSubscriber.Probe sub = pubSub.second(); + + FiniteDuration timeout = FiniteDuration.create(200, TimeUnit.MILLISECONDS); + + sub.request(1); + sub.expectNoMsg(timeout); + + pub.sendNext(1); + sub.expectNext(1); + + pub.sendNext(2); + pub.sendNext(3); + + sub.request(2); + sub.expectNext(3, 3); + + pub.sendComplete(); + sub.request(1); + sub.expectComplete(); + } + }; + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeKeepAlive.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeKeepAlive.java new file mode 100644 index 0000000000..5d5ee3d622 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeKeepAlive.java @@ -0,0 +1,58 @@ +/** + * Copyright (C) 2015-2016 Typesafe + */ +package docs.stream.javadsl.cookbook; + +import akka.NotUsed; +import akka.actor.ActorSystem; +import akka.stream.ActorMaterializer; +import akka.stream.Materializer; +import akka.stream.javadsl.Flow; +import akka.testkit.JavaTestKit; +import akka.util.ByteString; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.util.concurrent.TimeUnit; + +public class RecipeKeepAlive extends RecipeTest { + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("RecipeKeepAlive"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + class Tick {} + public final Tick TICK = new Tick(); + + @Test + public void workForVersion1() throws Exception { + new JavaTestKit(system) { + { + final ByteString keepAliveMessage = ByteString.fromArray(new byte[]{11}); + + //@formatter:off + //#inject-keepalive + Flow keepAliveInject = + Flow.of(ByteString.class).keepAlive( + scala.concurrent.duration.Duration.create(1, TimeUnit.SECONDS), + () -> keepAliveMessage); + //#inject-keepalive + //@formatter:on + + // Enough to compile, tested elsewhere as a built-in stage + } + }; + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeLoggingElements.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeLoggingElements.java new file mode 100644 index 0000000000..051416675f --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeLoggingElements.java @@ -0,0 +1,96 @@ +/** + * Copyright (C) 2015-2016 Typesafe + */ +package docs.stream.javadsl.cookbook; + +import akka.NotUsed; +import akka.actor.ActorSystem; +import akka.event.Logging; +import akka.event.LoggingAdapter; +import akka.stream.ActorMaterializer; +import akka.stream.Attributes; +import akka.stream.Materializer; +import akka.stream.javadsl.Sink; +import akka.stream.javadsl.Source; +import akka.testkit.DebugFilter; +import akka.testkit.JavaTestKit; +import com.typesafe.config.ConfigFactory; +import docs.stream.SilenceSystemOut; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import scala.runtime.AbstractFunction0; + +import java.util.Arrays; + +public class RecipeLoggingElements extends RecipeTest { + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("RecipeLoggingElements", ConfigFactory.parseString("akka.loglevel=DEBUG\nakka.loggers = [akka.testkit.TestEventListener]")); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + @Test + public void workWithPrintln() throws Exception { + new JavaTestKit(system) { + final SilenceSystemOut.System System = SilenceSystemOut.get(getTestActor()); + + { + final Source mySource = Source.from(Arrays.asList("1", "2", "3")); + + //#println-debug + mySource.map(elem -> { + System.out.println(elem); + return elem; + }); + //#println-debug + } + }; + } + + @Test + public void workWithLog() throws Exception { + new JavaTestKit(system) { + private T analyse(T i) { + return i; + } + + { + final Source mySource = Source.from(Arrays.asList("1", "2", "3")); + + final int onElement = Logging.WarningLevel(); + final int onFinish = Logging.ErrorLevel(); + final int onFailure = Logging.ErrorLevel(); + + //#log-custom + // customise log levels + mySource.log("before-map") + .withAttributes(Attributes.createLogLevels(onElement, onFinish, onFailure)) + .map(i -> analyse(i)); + + // or provide custom logging adapter + final LoggingAdapter adapter = Logging.getLogger(system, "customLogger"); + mySource.log("custom", adapter); + //#log-custom + + + new DebugFilter("customLogger", "[custom] Element: ", false, false, 3).intercept(new AbstractFunction0 () { + public Void apply() { + mySource.log("custom", adapter).runWith(Sink.ignore(), mat); + return null; + } + }, system); + } + }; + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeManualTrigger.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeManualTrigger.java new file mode 100644 index 0000000000..c06fd76667 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeManualTrigger.java @@ -0,0 +1,153 @@ +/** + * Copyright (C) 2015-2016 Typesafe + */ +package docs.stream.javadsl.cookbook; + +import akka.actor.ActorSystem; +import akka.japi.Pair; +import akka.stream.*; +import akka.stream.javadsl.*; +import akka.stream.testkit.TestPublisher; +import akka.stream.testkit.TestSubscriber; +import akka.stream.testkit.javadsl.TestSink; +import akka.stream.testkit.javadsl.TestSource; +import akka.testkit.JavaTestKit; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import scala.concurrent.duration.FiniteDuration; + +import java.util.Arrays; +import java.util.concurrent.TimeUnit; + +public class RecipeManualTrigger extends RecipeTest { + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("RecipeKeepAlive"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + class Trigger { + } + + public final Trigger TRIGGER = new Trigger(); + + @Test + public void zipped() throws Exception { + new JavaTestKit(system) { + { + final Source> triggerSource = TestSource.probe(system); + final Sink> messageSink = TestSink.probe(system); + + //#manually-triggered-stream + final RunnableGraph, TestSubscriber.Probe>> g = + RunnableGraph., TestSubscriber.Probe>>fromGraph( + GraphDSL.create( + triggerSource, + messageSink, + (p, s) -> new Pair<>(p, s), + (builder, source, sink) -> { + SourceShape elements = + builder.add(Source.from(Arrays.asList("1", "2", "3", "4")).map(t -> new Message(t))); + FlowShape, Message> takeMessage = + builder.add(Flow.>create().map(p -> p.first())); + final FanInShape2> zip = + builder.add(Zip.create()); + builder.from(elements).toInlet(zip.in0()); + builder.from(source).toInlet(zip.in1()); + builder.from(zip.out()).via(takeMessage).to(sink); + return ClosedShape.getInstance(); + } + ) + ); + //#manually-triggered-stream + + Pair, TestSubscriber.Probe> pubSub = g.run(mat); + TestPublisher.Probe pub = pubSub.first(); + TestSubscriber.Probe sub = pubSub.second(); + + FiniteDuration timeout = FiniteDuration.create(100, TimeUnit.MILLISECONDS); + sub.expectSubscription().request(1000); + sub.expectNoMsg(timeout); + + pub.sendNext(TRIGGER); + sub.expectNext(new Message("1")); + sub.expectNoMsg(timeout); + + pub.sendNext(TRIGGER); + pub.sendNext(TRIGGER); + sub.expectNext(new Message("2")); + sub.expectNext(new Message("3")); + sub.expectNoMsg(timeout); + + pub.sendNext(TRIGGER); + sub.expectNext(new Message("4")); + sub.expectComplete(); + } + }; + } + + @Test + public void zipWith() throws Exception { + new JavaTestKit(system) { + { + final Source> triggerSource = TestSource.probe(system); + final Sink> messageSink = TestSink.probe(system); + + //#manually-triggered-stream-zipwith + final RunnableGraph, TestSubscriber.Probe>> g = + RunnableGraph., TestSubscriber.Probe>>fromGraph( + GraphDSL.create( + triggerSource, + messageSink, + (p, s) -> new Pair<>(p, s), + (builder, source, sink) -> { + final SourceShape elements = + builder.add(Source.from(Arrays.asList("1", "2", "3", "4")).map(t -> new Message(t))); + final FanInShape2 zipWith = + builder.add(ZipWith.create((msg, trigger) -> msg)); + builder.from(elements).toInlet(zipWith.in0()); + builder.from(source).toInlet(zipWith.in1()); + builder.from(zipWith.out()).to(sink); + return ClosedShape.getInstance(); + } + ) + ); + //#manually-triggered-stream-zipwith + + Pair, TestSubscriber.Probe> pubSub = g.run(mat); + TestPublisher.Probe pub = pubSub.first(); + TestSubscriber.Probe sub = pubSub.second(); + + FiniteDuration timeout = FiniteDuration.create(100, TimeUnit.MILLISECONDS); + sub.expectSubscription().request(1000); + sub.expectNoMsg(timeout); + + pub.sendNext(TRIGGER); + sub.expectNext(new Message("1")); + sub.expectNoMsg(timeout); + + pub.sendNext(TRIGGER); + pub.sendNext(TRIGGER); + sub.expectNext(new Message("2")); + sub.expectNext(new Message("3")); + sub.expectNoMsg(timeout); + + pub.sendNext(TRIGGER); + sub.expectNext(new Message("4")); + sub.expectComplete(); + + } + }; + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeMissedTicks.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeMissedTicks.java new file mode 100644 index 0000000000..c16522cbbc --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeMissedTicks.java @@ -0,0 +1,97 @@ +/** + * Copyright (C) 2015-2016 Typesafe + */ +package docs.stream.javadsl.cookbook; + +import akka.NotUsed; +import akka.actor.ActorSystem; +import akka.japi.Pair; +import akka.stream.ActorMaterializer; +import akka.stream.Materializer; +import akka.stream.javadsl.Flow; +import akka.stream.javadsl.Keep; +import akka.stream.javadsl.Sink; +import akka.stream.javadsl.Source; +import akka.stream.testkit.TestPublisher; +import akka.stream.testkit.TestSubscriber; +import akka.stream.testkit.javadsl.TestSink; +import akka.stream.testkit.javadsl.TestSource; +import akka.testkit.JavaTestKit; +import akka.testkit.TestLatch; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import scala.concurrent.Await; +import scala.concurrent.duration.Duration; +import scala.concurrent.duration.FiniteDuration; + +import java.util.concurrent.TimeUnit; + +public class RecipeMissedTicks extends RecipeTest { + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("RecipeMultiGroupBy"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + @Test + public void work() throws Exception { + new JavaTestKit(system) { + class Tick { + } + + final Tick Tick = new Tick(); + + { + final Source> tickStream = TestSource.probe(system); + final Sink> sink = TestSink.probe(system); + + @SuppressWarnings("unused") + //#missed-ticks + final Flow missedTicks = + Flow.of(Tick.class).conflate(tick -> 0, (missed, tick) -> missed + 1); + //#missed-ticks + final TestLatch latch = new TestLatch(3, system); + final Flow realMissedTicks = + Flow.of(Tick.class).conflate(tick -> 0, (missed, tick) -> { latch.countDown(); return missed + 1; }); + + Pair, TestSubscriber.Probe> pubSub = + tickStream.via(realMissedTicks).toMat(sink, Keep.both()).run(mat); + TestPublisher.Probe pub = pubSub.first(); + TestSubscriber.Probe sub = pubSub.second(); + + pub.sendNext(Tick); + pub.sendNext(Tick); + pub.sendNext(Tick); + pub.sendNext(Tick); + + FiniteDuration timeout = FiniteDuration.create(200, TimeUnit.MILLISECONDS); + + Await.ready(latch, Duration.create(1, TimeUnit.SECONDS)); + + sub.request(1); + sub.expectNext(3); + sub.request(1); + sub.expectNoMsg(timeout); + + pub.sendNext(Tick); + sub.expectNext(0); + + pub.sendComplete(); + sub.request(1); + sub.expectComplete(); + + } + }; + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeMultiGroupByTest.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeMultiGroupByTest.java new file mode 100644 index 0000000000..73080f89ff --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeMultiGroupByTest.java @@ -0,0 +1,155 @@ +/** + * Copyright (C) 2015-2016 Typesafe + */ +package docs.stream.javadsl.cookbook; + +import akka.NotUsed; +import akka.actor.ActorSystem; +import akka.japi.Function; +import akka.japi.Pair; +import akka.stream.ActorMaterializer; +import akka.stream.Materializer; +import akka.stream.javadsl.Sink; +import akka.stream.javadsl.Source; +import akka.stream.javadsl.SubSource; +import akka.testkit.JavaTestKit; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import scala.concurrent.Await; +import scala.concurrent.Future; +import scala.concurrent.duration.FiniteDuration; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; + +import static java.util.stream.Collectors.toList; +import static junit.framework.TestCase.assertTrue; + +public class RecipeMultiGroupByTest extends RecipeTest { + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("RecipeMultiGroupBy"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + static class Topic { + private final String name; + + public Topic(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + Topic topic = (Topic) o; + + if (name != null ? !name.equals(topic.name) : topic.name != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + return name != null ? name.hashCode() : 0; + } + } + + @Test + public void work() throws Exception { + new JavaTestKit(system) { + final List extractTopics(Message m) { + final List topics = new ArrayList<>(2); + + if (m.msg.startsWith("1")) { + topics.add(new Topic("1")); + } else { + topics.add(new Topic("1")); + topics.add(new Topic("2")); + } + + return topics; + } + + { + + final Source elems = Source + .from(Arrays.asList("1: a", "1: b", "all: c", "all: d", "1: e")) + .map(s -> new Message(s)); + + //#multi-groupby + final Function> topicMapper = m -> extractTopics(m); + + final Source, NotUsed> messageAndTopic = elems + .mapConcat((Message msg) -> { + List topicsForMessage = topicMapper.apply(msg); + // Create a (Msg, Topic) pair for each of the topics + + // the message belongs to + return topicsForMessage + .stream() + .map(topic -> new Pair(msg, topic)) + .collect(toList()); + }); + + SubSource, NotUsed> multiGroups = messageAndTopic + .groupBy(2, pair -> pair.second()) + .map(pair -> { + Message message = pair.first(); + Topic topic = pair.second(); + + // do what needs to be done + //#multi-groupby + return pair; + //#multi-groupby + }); + //#multi-groupby + + CompletionStage> result = multiGroups + .grouped(10) + .mergeSubstreams() + .map(pair -> { + Topic topic = pair.get(0).second(); + return topic.name + mkString(pair.stream().map(p -> p.first().msg).collect(toList()), "[", ", ", "]"); + }) + .grouped(10) + .runWith(Sink.head(), mat); + + List got = result.toCompletableFuture().get(3, TimeUnit.SECONDS); + assertTrue(got.contains("1[1: a, 1: b, all: c, all: d, 1: e]")); + assertTrue(got.contains("2[all: c, all: d]")); + } + }; + } + + public static final String mkString(List l, String start, String separate, String end) { + StringBuilder sb = new StringBuilder(start); + for (String s : l) { + sb.append(s).append(separate); + } + return sb + .delete(sb.length() - separate.length(), sb.length()) + .append(end).toString(); + } +} diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeParseLines.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeParseLines.java new file mode 100644 index 0000000000..ec502e6506 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeParseLines.java @@ -0,0 +1,57 @@ +/** + * Copyright (C) 2015-2016 Typesafe + */ +package docs.stream.javadsl.cookbook; + +import akka.NotUsed; +import akka.actor.ActorSystem; +import akka.stream.ActorMaterializer; +import akka.stream.Materializer; +import akka.stream.io.Framing; +import akka.stream.javadsl.Sink; +import akka.stream.javadsl.Source; +import akka.testkit.JavaTestKit; +import akka.util.ByteString; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.util.Arrays; +import java.util.concurrent.TimeUnit; + +public class RecipeParseLines extends RecipeTest { + + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("RecipeLoggingElements"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + @Test + public void parseLines() throws Exception { + final Source rawData = Source.from(Arrays.asList( + ByteString.fromString("Hello World"), + ByteString.fromString("\r"), + ByteString.fromString("!\r"), + ByteString.fromString("\nHello Akka!\r\nHello Streams!"), + ByteString.fromString("\r\n\r\n"))); + + //#parse-lines + final Source lines = rawData + .via(Framing.delimiter(ByteString.fromString("\r\n"), 100, true)) + .map(b -> b.utf8String()); + //#parse-lines + + lines.grouped(10).runWith(Sink.head(), mat).toCompletableFuture().get(1, TimeUnit.SECONDS); + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeReduceByKeyTest.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeReduceByKeyTest.java new file mode 100644 index 0000000000..4ad5cae04b --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeReduceByKeyTest.java @@ -0,0 +1,123 @@ +/** + * Copyright (C) 2015-2016 Typesafe + */ +package docs.stream.javadsl.cookbook; + +import akka.NotUsed; +import akka.actor.ActorSystem; +import akka.japi.Pair; +import akka.japi.function.Function; +import akka.japi.function.Function2; +import akka.stream.ActorMaterializer; +import akka.stream.Materializer; +import akka.stream.javadsl.Flow; +import akka.stream.javadsl.Sink; +import akka.stream.javadsl.Source; +import akka.testkit.JavaTestKit; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; +import scala.concurrent.Await; +import scala.concurrent.Future; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +public class RecipeReduceByKeyTest extends RecipeTest { + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("RecipeLoggingElements"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + @Test + public void work() throws Exception { + new JavaTestKit(system) { + { + final Source words = Source.from(Arrays.asList("hello", "world", "and", "hello", "akka")); + + //#word-count + final int MAXIMUM_DISTINCT_WORDS = 1000; + + final Source, NotUsed> counts = words + // split the words into separate streams first + .groupBy(MAXIMUM_DISTINCT_WORDS, i -> i) + //transform each element to pair with number of words in it + .map(i -> new Pair<>(i, 1)) + // add counting logic to the streams + .reduce((left, right) -> new Pair<>(left.first(), left.second() + right.second())) + // get a stream of word counts + .mergeSubstreams(); + //#word-count + + final CompletionStage>> f = counts.grouped(10).runWith(Sink.head(), mat); + final Set> result = f.toCompletableFuture().get(3, TimeUnit.SECONDS).stream().collect(Collectors.toSet()); + final Set> expected = new HashSet<>(); + expected.add(new Pair<>("hello", 2)); + expected.add(new Pair<>("world", 1)); + expected.add(new Pair<>("and", 1)); + expected.add(new Pair<>("akka", 1)); + Assert.assertEquals(expected, result); + } + }; + } + + //#reduce-by-key-general + static public Flow, NotUsed> reduceByKey( + int maximumGroupSize, + Function groupKey, + Function map, + Function2 reduce) { + + return Flow. create() + .groupBy(maximumGroupSize, groupKey) + .map(i -> new Pair<>(groupKey.apply(i), map.apply(i))) + .reduce((left, right) -> new Pair<>(left.first(), reduce.apply(left.second(), right.second()))) + .mergeSubstreams(); + } + //#reduce-by-key-general + + @Test + public void workGeneralised() throws Exception { + new JavaTestKit(system) { + { + final Source words = Source.from(Arrays.asList("hello", "world", "and", "hello", "akka")); + + //#reduce-by-key-general2 + final int MAXIMUM_DISTINCT_WORDS = 1000; + + Source, NotUsed> counts = words.via(reduceByKey( + MAXIMUM_DISTINCT_WORDS, + word -> word, + word -> 1, + (left, right) -> left + right)); + + //#reduce-by-key-general2 + final CompletionStage>> f = counts.grouped(10).runWith(Sink.head(), mat); + final Set> result = f.toCompletableFuture().get(3, TimeUnit.SECONDS).stream().collect(Collectors.toSet()); + final Set> expected = new HashSet<>(); + expected.add(new Pair<>("hello", 2)); + expected.add(new Pair<>("world", 1)); + expected.add(new Pair<>("and", 1)); + expected.add(new Pair<>("akka", 1)); + Assert.assertEquals(expected, result); + } + }; + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeSimpleDrop.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeSimpleDrop.java new file mode 100644 index 0000000000..7fc1c69431 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeSimpleDrop.java @@ -0,0 +1,78 @@ +/** + * Copyright (C) 2015-2016 Typesafe + */ +package docs.stream.javadsl.cookbook; + +import akka.NotUsed; +import akka.actor.ActorSystem; +import akka.japi.Pair; +import akka.stream.ActorMaterializer; +import akka.stream.Materializer; +import akka.stream.javadsl.Flow; +import akka.stream.testkit.TestPublisher; +import akka.stream.testkit.TestSubscriber; +import akka.stream.testkit.javadsl.TestSink; +import akka.stream.testkit.javadsl.TestSource; +import akka.testkit.JavaTestKit; +import akka.testkit.TestLatch; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import scala.concurrent.Await; +import scala.concurrent.duration.Duration; + +import java.util.concurrent.TimeUnit; + +public class RecipeSimpleDrop extends RecipeTest { + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("RecipeSimpleDrop"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + @Test + public void work() throws Exception { + new JavaTestKit(system) { + { + @SuppressWarnings("unused") + //#simple-drop + final Flow droppyStream = + Flow.of(Message.class).conflate(i -> i, (lastMessage, newMessage) -> newMessage); + //#simple-drop + final TestLatch latch = new TestLatch(2, system); + final Flow realDroppyStream = + Flow.of(Message.class).conflate(i -> i, (lastMessage, newMessage) -> { latch.countDown(); return newMessage; }); + + final Pair, TestSubscriber.Probe> pubSub = TestSource + . probe(system) + .via(realDroppyStream) + .toMat(TestSink.probe(system), + (pub, sub) -> new Pair<>(pub, sub)) + .run(mat); + final TestPublisher.Probe pub = pubSub.first(); + final TestSubscriber.Probe sub = pubSub.second(); + + pub.sendNext(new Message("1")); + pub.sendNext(new Message("2")); + pub.sendNext(new Message("3")); + + Await.ready(latch, Duration.create(1, TimeUnit.SECONDS)); + + sub.requestNext(new Message("3")); + + pub.sendComplete(); + sub.request(1); + sub.expectComplete(); + } + }; + } +} diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeTest.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeTest.java new file mode 100644 index 0000000000..c4f93a76a3 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeTest.java @@ -0,0 +1,40 @@ +package docs.stream.javadsl.cookbook; + +public class RecipeTest { + final class Message { + public final String msg; + + public Message(String msg) { + this.msg = msg; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + Message message = (Message) o; + + if (msg != null ? !msg.equals(message.msg) : message.msg != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + return msg != null ? msg.hashCode() : 0; + } + } + + final class Trigger { + } + + final class Job { + } +} diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeToStrict.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeToStrict.java new file mode 100644 index 0000000000..c7e7fde861 --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeToStrict.java @@ -0,0 +1,58 @@ +/** + * Copyright (C) 2015-2016 Typesafe + */ +package docs.stream.javadsl.cookbook; + +import akka.NotUsed; +import akka.actor.ActorSystem; +import akka.stream.ActorMaterializer; +import akka.stream.Materializer; +import akka.stream.javadsl.Sink; +import akka.stream.javadsl.Source; +import akka.testkit.JavaTestKit; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import scala.concurrent.Await; +import scala.concurrent.Future; +import scala.concurrent.duration.FiniteDuration; + +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; + +public class RecipeToStrict extends RecipeTest { + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("RecipeLoggingElements"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + @Test + public void workWithPrintln() throws Exception { + new JavaTestKit(system) { + { + final Source myData = Source.from(Arrays.asList("1", "2", "3")); + final int MAX_ALLOWED_SIZE = 100; + + //#draining-to-list + final CompletionStage> strings = myData + .grouped(MAX_ALLOWED_SIZE).runWith(Sink.head(), mat); + //#draining-to-list + + strings.toCompletableFuture().get(3, TimeUnit.SECONDS); + } + }; + } + +} diff --git a/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeWorkerPool.java b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeWorkerPool.java new file mode 100644 index 0000000000..5ff7cb61ae --- /dev/null +++ b/akka-docs/rst/java/code/docs/stream/javadsl/cookbook/RecipeWorkerPool.java @@ -0,0 +1,87 @@ +/** + * Copyright (C) 2015-2016 Typesafe + */ +package docs.stream.javadsl.cookbook; + +import akka.NotUsed; +import akka.actor.ActorSystem; +import akka.stream.*; +import akka.stream.javadsl.*; +import akka.testkit.JavaTestKit; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import scala.concurrent.Await; +import scala.concurrent.Future; +import scala.concurrent.duration.FiniteDuration; + +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; + +import static org.junit.Assert.assertTrue; + +public class RecipeWorkerPool extends RecipeTest { + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create("RecipeWorkerPool"); + } + + @AfterClass + public static void tearDown() { + JavaTestKit.shutdownActorSystem(system); + system = null; + } + + final Materializer mat = ActorMaterializer.create(system); + + //#worker-pool + public static Flow balancer( + Flow worker, int workerCount) { + return Flow.fromGraph(GraphDSL.create(b -> { + boolean waitForAllDownstreams = true; + final UniformFanOutShape balance = + b.add(Balance.create(workerCount, waitForAllDownstreams)); + final UniformFanInShape merge = + b.add(Merge.create(workerCount)); + + for (int i = 0; i < workerCount; i++) { + b.from(balance.out(i)).via(b.add(worker)).toInlet(merge.in(i)); + } + + return FlowShape.of(balance.in(), merge.out()); + })); + } + //#worker-pool + + @Test + public void workForVersion1() throws Exception { + new JavaTestKit(system) { + { + Source data = + Source + .from(Arrays.asList("1", "2", "3", "4", "5")) + .map(t -> new Message(t)); + + Flow worker = Flow.of(Message.class).map(m -> new Message(m.msg + " done")); + + //#worker-pool2 + Flow balancer = balancer(worker, 3); + Source processedJobs = data.via(balancer); + //#worker-pool2 + + CompletionStage> future = processedJobs.map(m -> m.msg).grouped(10).runWith(Sink.head(), mat); + List got = future.toCompletableFuture().get(1, TimeUnit.SECONDS); + assertTrue(got.contains("1 done")); + assertTrue(got.contains("2 done")); + assertTrue(got.contains("3 done")); + assertTrue(got.contains("4 done")); + assertTrue(got.contains("5 done")); + } + }; + } + +} diff --git a/akka-docs/rst/java/code/docs/testkit/ParentChildTest.java b/akka-docs/rst/java/code/docs/testkit/ParentChildTest.java index 6ebbd408b7..8cb5502cd4 100644 --- a/akka-docs/rst/java/code/docs/testkit/ParentChildTest.java +++ b/akka-docs/rst/java/code/docs/testkit/ParentChildTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package docs.testkit; diff --git a/akka-docs/rst/java/code/docs/testkit/TestKitDocTest.java b/akka-docs/rst/java/code/docs/testkit/TestKitDocTest.java index 42ad3b5b69..9618046d59 100644 --- a/akka-docs/rst/java/code/docs/testkit/TestKitDocTest.java +++ b/akka-docs/rst/java/code/docs/testkit/TestKitDocTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.testkit; diff --git a/akka-docs/rst/java/code/docs/testkit/TestKitSampleTest.java b/akka-docs/rst/java/code/docs/testkit/TestKitSampleTest.java index da529d15d7..612a2bf674 100644 --- a/akka-docs/rst/java/code/docs/testkit/TestKitSampleTest.java +++ b/akka-docs/rst/java/code/docs/testkit/TestKitSampleTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.testkit; diff --git a/akka-docs/rst/java/code/docs/util/SocketUtils.java b/akka-docs/rst/java/code/docs/util/SocketUtils.java new file mode 100644 index 0000000000..9a820cfa5d --- /dev/null +++ b/akka-docs/rst/java/code/docs/util/SocketUtils.java @@ -0,0 +1,31 @@ +/** + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package docs.util; + +import java.net.InetSocketAddress; + +import java.io.IOException; +import java.net.ServerSocket; +import java.nio.channels.ServerSocketChannel; + +public class SocketUtils { + + public static InetSocketAddress temporaryServerAddress(String hostname) { + try { + ServerSocket socket = ServerSocketChannel.open().socket(); + socket.bind(new InetSocketAddress(hostname, 0)); + InetSocketAddress address = new InetSocketAddress(hostname, socket.getLocalPort()); + socket.close(); + return address; + } + catch (IOException io) { + throw new RuntimeException(io); + } + } + + public static InetSocketAddress temporaryServerAddress() { + return temporaryServerAddress("127.0.0.1"); + } + +} \ No newline at end of file diff --git a/akka-docs-dev/rst/java/http/client-side/connection-level.rst b/akka-docs/rst/java/http/client-side/connection-level.rst similarity index 98% rename from akka-docs-dev/rst/java/http/client-side/connection-level.rst rename to akka-docs/rst/java/http/client-side/connection-level.rst index b4d6d4e4e3..36eec3c7bc 100644 --- a/akka-docs-dev/rst/java/http/client-side/connection-level.rst +++ b/akka-docs/rst/java/http/client-side/connection-level.rst @@ -73,7 +73,7 @@ to "run" the HTTP layer (and, potentially, higher-layers) against data that do n some other source. Potential scenarios where this might be useful include tests, debugging or low-level event-sourcing (e.g by replaying network traffic). -On the client-side the stand-alone HTTP layer forms a ``BidiFlow``, +On the client-side the stand-alone HTTP layer forms a ``BidiFlow``, that is a stage that "upgrades" a potentially encrypted raw connection to the HTTP level. You create an instance of the layer by calling one of the two overloads of the ``Http.get(system).clientLayer`` method, diff --git a/akka-docs-dev/rst/java/http/client-side/host-level.rst b/akka-docs/rst/java/http/client-side/host-level.rst similarity index 97% rename from akka-docs-dev/rst/java/http/client-side/host-level.rst rename to akka-docs/rst/java/http/client-side/host-level.rst index 51fe6d3e12..889e52d553 100644 --- a/akka-docs-dev/rst/java/http/client-side/host-level.rst +++ b/akka-docs/rst/java/http/client-side/host-level.rst @@ -137,11 +137,11 @@ re-materialized the respective pool is automatically and transparently restarted In addition to the automatic shutdown via the configured idle timeouts it's also possible to trigger the immediate shutdown of a specific pool by calling ``shutdown()`` on the :class:`HostConnectionPool` instance that the pool client -flow materializes into. This ``shutdown()`` call produces a ``Future[Unit]`` which is fulfilled when the pool +flow materializes into. This ``shutdown()`` call produces a ``CompletionStage`` which is fulfilled when the pool termination has been completed. It's also possible to trigger the immediate termination of *all* connection pools in the ``ActorSystem`` at the same -time by calling ``Http.get(system).shutdownAllConnectionPools()``. This call too produces a ``Future[Unit]`` which is fulfilled when +time by calling ``Http.get(system).shutdownAllConnectionPools()``. This call too produces a ``CompletionStage`` which is fulfilled when all pools have terminated. diff --git a/akka-docs-dev/rst/java/http/client-side/https-support.rst b/akka-docs/rst/java/http/client-side/https-support.rst similarity index 93% rename from akka-docs-dev/rst/java/http/client-side/https-support.rst rename to akka-docs/rst/java/http/client-side/https-support.rst index 6ccc818815..f9a9e02776 100644 --- a/akka-docs-dev/rst/java/http/client-side/https-support.rst +++ b/akka-docs/rst/java/http/client-side/https-support.rst @@ -9,12 +9,12 @@ Akka HTTP supports TLS encryption on the client-side as well as on the :ref:`ser Akka HTTP 1.0 does not completely validate certificates when using HTTPS. Please do not treat HTTPS connections made with this version as secure. Requests are vulnerable to a Man-In-The-Middle attack via certificate substitution. - -The central vehicle for configuring encryption is the ``HttpsContext``, which can be created using -the static method ``HttpsContext.create`` which is defined like this: -.. includecode:: /../../akka-http-core/src/main/java/akka/http/javadsl/HttpsContext.java - :include: http-context-creation +The central vehicle for configuring encryption is the ``HttpsConnectionContext``, which can be created using +the static method ``ConnectionContext.https`` which is defined like this: + +.. includecode:: /../../akka-http-core/src/main/scala/akka/http/javadsl/ConnectionContext.scala + :include: https-context-creation In addition to the ``outgoingConnection``, ``newHostConnectionPool`` and ``cachedHostConnectionPool`` methods the `akka.http.javadsl.Http`_ extension also defines ``outgoingConnectionTls``, ``newHostConnectionPoolTls`` and diff --git a/akka-docs-dev/rst/java/http/client-side/index.rst b/akka-docs/rst/java/http/client-side/index.rst similarity index 100% rename from akka-docs-dev/rst/java/http/client-side/index.rst rename to akka-docs/rst/java/http/client-side/index.rst diff --git a/akka-docs-dev/rst/java/http/client-side/request-level.rst b/akka-docs/rst/java/http/client-side/request-level.rst similarity index 94% rename from akka-docs-dev/rst/java/http/client-side/request-level.rst rename to akka-docs/rst/java/http/client-side/request-level.rst index c731e13ed4..5af297b02d 100644 --- a/akka-docs-dev/rst/java/http/client-side/request-level.rst +++ b/akka-docs/rst/java/http/client-side/request-level.rst @@ -38,7 +38,7 @@ Sometimes your HTTP client needs are very basic. You simply need the HTTP respon want to bother with setting up a full-blown streaming infrastructure. For these cases Akka HTTP offers the ``Http().singleRequest(...)`` method, which simply turns an ``HttpRequest`` instance -into ``Future``. Internally the request is dispatched across the (cached) host connection pool for the +into ``CompletionStage``. Internally the request is dispatched across the (cached) host connection pool for the request's effective URI. Just like in the case of the super-pool flow described above the request must have either an absolute URI or a valid @@ -48,7 +48,7 @@ Just like in the case of the super-pool flow described above the request must ha Using the Future-Based API in Actors ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -When using the ``Future`` based API from inside an ``Actor``, all the usual caveats apply to how one should deal +When using the ``CompletionStage`` based API from inside an ``Actor``, all the usual caveats apply to how one should deal with the futures completion. For example you should not access the Actors state from within the Future's callbacks (such as ``map``, ``onComplete``, ...) and instead you should use the ``pipe`` pattern to pipe the result back to the Actor as a message: diff --git a/akka-docs-dev/rst/java/http/client-side/websocket-support.rst b/akka-docs/rst/java/http/client-side/websocket-support.rst similarity index 100% rename from akka-docs-dev/rst/java/http/client-side/websocket-support.rst rename to akka-docs/rst/java/http/client-side/websocket-support.rst diff --git a/akka-docs-dev/rst/java/http/configuration.rst b/akka-docs/rst/java/http/configuration.rst similarity index 100% rename from akka-docs-dev/rst/java/http/configuration.rst rename to akka-docs/rst/java/http/configuration.rst diff --git a/akka-docs-dev/rst/java/http/http-model.rst b/akka-docs/rst/java/http/http-model.rst similarity index 99% rename from akka-docs-dev/rst/java/http/http-model.rst rename to akka-docs/rst/java/http/http-model.rst index 7d469146df..595539a7bb 100644 --- a/akka-docs-dev/rst/java/http/http-model.rst +++ b/akka-docs/rst/java/http/http-model.rst @@ -129,7 +129,7 @@ Entity types ``HttpEntityStrict``, ``HttpEntityDefault``, and ``HttpEntityChunke which allows to use them for requests and responses. In contrast, ``HttpEntityCloseDelimited`` can only be used for responses. Streaming entity types (i.e. all but ``HttpEntityStrict``) cannot be shared or serialized. To create a strict, sharable copy of an -entity or message use ``HttpEntity.toStrict`` or ``HttpMessage.toStrict`` which returns a ``Future`` of the object with +entity or message use ``HttpEntity.toStrict`` or ``HttpMessage.toStrict`` which returns a ``CompletionStage`` of the object with the body data collected into a ``ByteString``. The class ``HttpEntities`` contains static methods to create entities from common types easily. diff --git a/akka-docs-dev/rst/java/http/index.rst b/akka-docs/rst/java/http/index.rst similarity index 100% rename from akka-docs-dev/rst/java/http/index.rst rename to akka-docs/rst/java/http/index.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/directives/host-directives/extractHost.rst b/akka-docs/rst/java/http/routing-dsl/directives/host-directives/extractHost.rst similarity index 100% rename from akka-docs-dev/rst/java/http/routing-dsl/directives/host-directives/extractHost.rst rename to akka-docs/rst/java/http/routing-dsl/directives/host-directives/extractHost.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/directives/host-directives/host.rst b/akka-docs/rst/java/http/routing-dsl/directives/host-directives/host.rst similarity index 100% rename from akka-docs-dev/rst/java/http/routing-dsl/directives/host-directives/host.rst rename to akka-docs/rst/java/http/routing-dsl/directives/host-directives/host.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/directives/host-directives/index.rst b/akka-docs/rst/java/http/routing-dsl/directives/host-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/java/http/routing-dsl/directives/host-directives/index.rst rename to akka-docs/rst/java/http/routing-dsl/directives/host-directives/index.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/directives/index.rst b/akka-docs/rst/java/http/routing-dsl/directives/index.rst similarity index 96% rename from akka-docs-dev/rst/java/http/routing-dsl/directives/index.rst rename to akka-docs/rst/java/http/routing-dsl/directives/index.rst index 5d9f2f893d..5bcb199c6e 100644 --- a/akka-docs-dev/rst/java/http/routing-dsl/directives/index.rst +++ b/akka-docs/rst/java/http/routing-dsl/directives/index.rst @@ -55,8 +55,8 @@ RangeDirectives SchemeDirectives Contains a single directive ``scheme`` to filter requests based on the URI scheme (http vs. https). -WebsocketDirectives - Contains directives to support answering Websocket requests. +WebSocketDirectives + Contains directives to support answering WebSocket requests. TODO this page should be rewritten as the corresponding Scala page diff --git a/akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/delete.rst b/akka-docs/rst/java/http/routing-dsl/directives/method-directives/delete.rst similarity index 100% rename from akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/delete.rst rename to akka-docs/rst/java/http/routing-dsl/directives/method-directives/delete.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/extractMethod.rst b/akka-docs/rst/java/http/routing-dsl/directives/method-directives/extractMethod.rst similarity index 100% rename from akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/extractMethod.rst rename to akka-docs/rst/java/http/routing-dsl/directives/method-directives/extractMethod.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/get.rst b/akka-docs/rst/java/http/routing-dsl/directives/method-directives/get.rst similarity index 100% rename from akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/get.rst rename to akka-docs/rst/java/http/routing-dsl/directives/method-directives/get.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/head.rst b/akka-docs/rst/java/http/routing-dsl/directives/method-directives/head.rst similarity index 100% rename from akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/head.rst rename to akka-docs/rst/java/http/routing-dsl/directives/method-directives/head.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/index.rst b/akka-docs/rst/java/http/routing-dsl/directives/method-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/index.rst rename to akka-docs/rst/java/http/routing-dsl/directives/method-directives/index.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/method.rst b/akka-docs/rst/java/http/routing-dsl/directives/method-directives/method.rst similarity index 100% rename from akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/method.rst rename to akka-docs/rst/java/http/routing-dsl/directives/method-directives/method.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/options.rst b/akka-docs/rst/java/http/routing-dsl/directives/method-directives/options.rst similarity index 100% rename from akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/options.rst rename to akka-docs/rst/java/http/routing-dsl/directives/method-directives/options.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/overrideMethodWithParameter.rst b/akka-docs/rst/java/http/routing-dsl/directives/method-directives/overrideMethodWithParameter.rst similarity index 100% rename from akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/overrideMethodWithParameter.rst rename to akka-docs/rst/java/http/routing-dsl/directives/method-directives/overrideMethodWithParameter.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/patch.rst b/akka-docs/rst/java/http/routing-dsl/directives/method-directives/patch.rst similarity index 100% rename from akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/patch.rst rename to akka-docs/rst/java/http/routing-dsl/directives/method-directives/patch.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/post.rst b/akka-docs/rst/java/http/routing-dsl/directives/method-directives/post.rst similarity index 100% rename from akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/post.rst rename to akka-docs/rst/java/http/routing-dsl/directives/method-directives/post.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/put.rst b/akka-docs/rst/java/http/routing-dsl/directives/method-directives/put.rst similarity index 100% rename from akka-docs-dev/rst/java/http/routing-dsl/directives/method-directives/put.rst rename to akka-docs/rst/java/http/routing-dsl/directives/method-directives/put.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/directives/path-directives.rst b/akka-docs/rst/java/http/routing-dsl/directives/path-directives.rst similarity index 100% rename from akka-docs-dev/rst/java/http/routing-dsl/directives/path-directives.rst rename to akka-docs/rst/java/http/routing-dsl/directives/path-directives.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/handlers.rst b/akka-docs/rst/java/http/routing-dsl/handlers.rst similarity index 76% rename from akka-docs-dev/rst/java/http/routing-dsl/handlers.rst rename to akka-docs/rst/java/http/routing-dsl/handlers.rst index 0d51a44ba8..6201f5951e 100644 --- a/akka-docs-dev/rst/java/http/routing-dsl/handlers.rst +++ b/akka-docs/rst/java/http/routing-dsl/handlers.rst @@ -24,7 +24,7 @@ by inspecting the ``RequestContext`` and returning a ``RouteResult``: Such a handler inspects the ``RequestContext`` it receives and uses the ``RequestContext``'s methods to create a response: -.. includecode:: /../../akka-http-tests-java8/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java +.. includecode:: /../../akka-http-tests/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java :include: simple-handler The handler can include any kind of logic but must return a ``RouteResult`` in the end which can only @@ -32,7 +32,7 @@ be created by using one of the ``RequestContext`` methods. A handler instance can be used once or several times as shown in the full example: -.. includecode:: /../../akka-http-tests-java8/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java +.. includecode:: /../../akka-http-tests/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java :include: simple-handler-example-full Handlers and Request Values @@ -42,14 +42,14 @@ In many cases, instead of manually inspecting the request, a handler will make u to extract details from the request. This is possible using one of the other ``handleWith`` overloads that bind the values of one or more request values with a ``HandlerN`` instance to produce a ``Route``: -.. includecode:: /../../akka-http-tests-java8/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java +.. includecode:: /../../akka-http-tests/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java :include: handler2 The handler here implements multiplication of two integers. However, it doesn't need to specify where these parameters come from. In ``handleWith``, as many request values of the matching type have to be specified as the handler needs. This can be seen in the full example: -.. includecode:: /../../akka-http-tests-java8/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java +.. includecode:: /../../akka-http-tests/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java :include: handler2-example-full Here, the handler is again being reused. First, in creating a route that expects URI parameters ``x`` and ``y``. This @@ -67,7 +67,7 @@ type represents a sausage, put between the "buns" which are ``RequestContext`` a In Java 8 handlers can be provided as function literals or method references. The previous example can then be written like this: -.. includecode:: /../../akka-http-tests-java8/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java +.. includecode:: /../../akka-http-tests/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java :include: handler2-java8-example-full @@ -87,12 +87,12 @@ Providing Handlers by Reflection Using Java before Java 8, writing out handlers as (anonymous) classes can be unwieldy. Therefore, ``handleReflectively`` overloads are provided that allow writing handler as simple methods and specifying them by name: -.. includecode:: /../../akka-http-tests-java8/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java +.. includecode:: /../../akka-http-tests/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java :include: reflective The complete calculator example can then be written like this: -.. includecode:: /../../akka-http-tests-java8/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java +.. includecode:: /../../akka-http-tests/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java :include: reflective-example-full There are alternative overloads for ``handleReflectively`` that take a ``Class`` instead of an object instance to refer to @@ -104,31 +104,32 @@ Deferring Result Creation Sometimes a handler cannot directly complete the request but needs to do some processing asynchronously. In this case the completion of a request needs to be deferred until the result has been generated. This is supported by the routing DSL in two ways: either you can use one of the ``handleWithAsyncN`` methods passing an ``AsyncHandlerN`` which -returns a ``Future``, i.e. an eventual ``RouteResult``, or you can also use a regular handler as shown -above and use ``RequestContext.completeWith`` for completion which takes an ``Future`` as an argument. +returns a ``CompletionStage``, i.e. an eventual ``RouteResult``, or you can also use a regular handler as shown +above and use ``RequestContext.completeWith`` for completion which takes an ``CompletionStage`` as an argument. This is demonstrated in the following example. Consider a asynchronous service defined like this (making use of Java 8 lambdas): -.. includecode:: /../../akka-http-tests-java8/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java +.. includecode:: /../../akka-http-tests/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java :include: async-service-definition Here the calculator runs the actual calculation in the background and only eventually returns the result. The HTTP service should provide a front-end to that service without having to block while waiting for the results. As explained above this can be done in two ways. -First, you can use ``handleWithAsyncN`` to be able to return a ``Future``: +First, you can use ``handleWithAsyncN`` to be able to return a ``CompletionStage``: -.. includecode:: /../../akka-http-tests-java8/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java +.. includecode:: /../../akka-http-tests/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java :include: async-handler-1 -The handler invokes the service and then maps the calculation result to a ``RouteResult`` using ``Future.map`` and -returns the resulting ``Future``. +The handler invokes the service and then maps the calculation result to a ``RouteResult`` using ``CompletionStage.thenApplyAsync`` and +returns the resulting ``CompletionStage``. Note that you should always explicitly provide an executor that designates +where the future transformation task is executed, using the JDK’s global ForkJoinPool is not recommended. Otherwise, you can also still use ``handleWithN`` and use ``RequestContext.completeWith`` to "convert" a -``Future`` into a ``RouteResult`` as shown here: +``CompletionStage`` into a ``RouteResult`` as shown here: -.. includecode:: /../../akka-http-tests-java8/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java +.. includecode:: /../../akka-http-tests/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java :include: async-handler-2 Using this style, you can decide in your handler if you want to return a direct synchronous result or if you need @@ -138,5 +139,5 @@ Both alternatives will not block and show the same runtime behavior. Here's the complete example: -.. includecode:: /../../akka-http-tests-java8/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java +.. includecode:: /../../akka-http-tests/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java :include: async-example-full diff --git a/akka-docs-dev/rst/java/http/routing-dsl/index.rst b/akka-docs/rst/java/http/routing-dsl/index.rst similarity index 100% rename from akka-docs-dev/rst/java/http/routing-dsl/index.rst rename to akka-docs/rst/java/http/routing-dsl/index.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/json-support.rst b/akka-docs/rst/java/http/routing-dsl/json-support.rst similarity index 100% rename from akka-docs-dev/rst/java/http/routing-dsl/json-support.rst rename to akka-docs/rst/java/http/routing-dsl/json-support.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/marshalling.rst b/akka-docs/rst/java/http/routing-dsl/marshalling.rst similarity index 100% rename from akka-docs-dev/rst/java/http/routing-dsl/marshalling.rst rename to akka-docs/rst/java/http/routing-dsl/marshalling.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/overview.rst b/akka-docs/rst/java/http/routing-dsl/overview.rst similarity index 98% rename from akka-docs-dev/rst/java/http/routing-dsl/overview.rst rename to akka-docs/rst/java/http/routing-dsl/overview.rst index 94f95b496e..f02af76681 100644 --- a/akka-docs-dev/rst/java/http/routing-dsl/overview.rst +++ b/akka-docs/rst/java/http/routing-dsl/overview.rst @@ -1,4 +1,4 @@ -.. _routing-java: +.. _http-routing-java: Routing DSL Overview ==================== @@ -88,7 +88,7 @@ Bind failures ^^^^^^^^^^^^^ For example the server might be unable to bind to the given port. For example when the port is already taken by another application, or if the port is privileged (i.e. only usable by ``root``). -In this case the "binding future" will fail immediatly, and we can react to if by listening on the Future's completion: +In this case the "binding future" will fail immediatly, and we can react to if by listening on the CompletionStage's completion: .. includecode:: ../../code/docs/http/javadsl/server/HighLevelServerBindFailureExample.java :include: binding-failure-high-level-example diff --git a/akka-docs-dev/rst/java/http/routing-dsl/request-vals/form-field-request-vals.rst b/akka-docs/rst/java/http/routing-dsl/request-vals/form-field-request-vals.rst similarity index 100% rename from akka-docs-dev/rst/java/http/routing-dsl/request-vals/form-field-request-vals.rst rename to akka-docs/rst/java/http/routing-dsl/request-vals/form-field-request-vals.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/request-vals/header-request-vals.rst b/akka-docs/rst/java/http/routing-dsl/request-vals/header-request-vals.rst similarity index 92% rename from akka-docs-dev/rst/java/http/routing-dsl/request-vals/header-request-vals.rst rename to akka-docs/rst/java/http/routing-dsl/request-vals/header-request-vals.rst index f73bf5317e..ced5d6d648 100644 --- a/akka-docs-dev/rst/java/http/routing-dsl/request-vals/header-request-vals.rst +++ b/akka-docs/rst/java/http/routing-dsl/request-vals/header-request-vals.rst @@ -14,10 +14,10 @@ The ``RequestVal`` builder is made up of 2 steps, initially you need to pick whi match if the header is not present in the request). This is done using one of the below depicted methods:: RequestVal instance() - RequestVal<> optionalInstance() + RequestVal<> optionalInstance() RequestVal value() - RequestVal> optionalValue() + RequestVal> optionalValue() Examples -------- diff --git a/akka-docs-dev/rst/java/http/routing-dsl/request-vals/http-basic-authenticator.rst b/akka-docs/rst/java/http/routing-dsl/request-vals/http-basic-authenticator.rst similarity index 96% rename from akka-docs-dev/rst/java/http/routing-dsl/request-vals/http-basic-authenticator.rst rename to akka-docs/rst/java/http/routing-dsl/request-vals/http-basic-authenticator.rst index 7ec157fb9e..bd817be74b 100644 --- a/akka-docs-dev/rst/java/http/routing-dsl/request-vals/http-basic-authenticator.rst +++ b/akka-docs/rst/java/http/routing-dsl/request-vals/http-basic-authenticator.rst @@ -12,7 +12,7 @@ Http basic auth allows for protection of one or more routes with a username and To use it you subclass ``HttpBasicAuthenticator`` and provide your authentication logic. There are two factory methods to create the authentication results to return from the authentication logic: ``authenticateAs(T)`` and ``refuseAccess()``. If the authentication is not very quick in memory, for example -calls a database, make sure you do not block the web server thread by executing that in a separate ``Future`` +calls a database, make sure you do not block the web server thread by executing that in a separate ``CompletionStage`` and then ``flatMap`` the result into the authentication result. When you use the authenticator in your routes you must reference the concrete authenticator twice, diff --git a/akka-docs-dev/rst/java/http/routing-dsl/request-vals/index.rst b/akka-docs/rst/java/http/routing-dsl/request-vals/index.rst similarity index 100% rename from akka-docs-dev/rst/java/http/routing-dsl/request-vals/index.rst rename to akka-docs/rst/java/http/routing-dsl/request-vals/index.rst diff --git a/akka-docs-dev/rst/java/http/routing-dsl/request-vals/oauth2-authenticator.rst b/akka-docs/rst/java/http/routing-dsl/request-vals/oauth2-authenticator.rst similarity index 97% rename from akka-docs-dev/rst/java/http/routing-dsl/request-vals/oauth2-authenticator.rst rename to akka-docs/rst/java/http/routing-dsl/request-vals/oauth2-authenticator.rst index 63adca42d3..6d7f071ee8 100644 --- a/akka-docs-dev/rst/java/http/routing-dsl/request-vals/oauth2-authenticator.rst +++ b/akka-docs/rst/java/http/routing-dsl/request-vals/oauth2-authenticator.rst @@ -18,7 +18,7 @@ the request can either be refused by returning the return value of ``refuseAcces with an object that is application specific by returning the return value of ``authenticateAs(T)``. If the authentication is not very quick in memory, for example calls a separate authentication server -to verify the token, make sure you do not block the web server thread by executing that in a separate ``Future`` +to verify the token, make sure you do not block the web server thread by executing that in a separate ``CompletionStage`` and then ``flatMap`` the result into the authentication result. .. note:: OAuth2 Bearer Token sends the token as clear text and should ONLY EVER be used over diff --git a/akka-docs-dev/rst/java/http/routing-dsl/routes.rst b/akka-docs/rst/java/http/routing-dsl/routes.rst similarity index 98% rename from akka-docs-dev/rst/java/http/routing-dsl/routes.rst rename to akka-docs/rst/java/http/routing-dsl/routes.rst index 84dcbc86f6..46b536e1c3 100644 --- a/akka-docs-dev/rst/java/http/routing-dsl/routes.rst +++ b/akka-docs/rst/java/http/routing-dsl/routes.rst @@ -26,7 +26,7 @@ RouteResult The ``RouteResult`` is an opaque structure that represents possible results of evaluating a route. A ``RouteResult`` can only be created by using one of the methods of the ``RequestContext``. A result can either be a response, if -it was generated by one of the ``completeX`` methods, it can be an eventual result, i.e. a ``Future`` for ``handleWith``, - a function ``Function`` for ``handleWithSyncHandler``, -- a function ``Function>`` for ``handleWithAsyncHandler``. +- a function ``Function>`` for ``handleWithAsyncHandler``. Here is a complete example: @@ -112,7 +112,7 @@ if HTTP pipelining is enabled where processing of multiple incoming requests may ``handleWithSyncHandler`` or ``handleWithAsyncHandler``, or the ``map`` or ``mapAsync`` stream operators, this requirement will be automatically fulfilled. -See :ref:`routing-java` for a more convenient high-level DSL to create request handlers. +See :ref:`http-routing-java` for a more convenient high-level DSL to create request handlers. Streaming Request/Response Entities ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -138,11 +138,11 @@ Server-Side HTTPS Support Akka HTTP supports TLS encryption on the server-side as well as on the :ref:`client-side `. -The central vehicle for configuring encryption is the ``HttpsContext``, which can be created using -the static method ``HttpsContext.create`` which is defined like this: +The central vehicle for configuring encryption is the ``HttpsConnectionContext``, which can be created using +the static method ``ConnectionContext.https`` which is defined like this: -.. includecode:: /../../akka-http-core/src/main/java/akka/http/javadsl/HttpsContext.java - :include: http-context-creation +.. includecode:: /../../akka-http-core/src/main/scala/akka/http/javadsl/ConnectionContext.scala + :include: https-context-creation On the server-side the ``bind``, and ``bindAndHandleXXX`` methods of the `akka.http.javadsl.Http`_ extension define an optional ``httpsContext`` parameter, which can receive the HTTPS configuration in the form of an ``HttpsContext`` @@ -160,7 +160,7 @@ to "run" the HTTP layer (and, potentially, higher-layers) against data that do n some other source. Potential scenarios where this might be useful include tests, debugging or low-level event-sourcing (e.g by replaying network traffic). -On the server-side the stand-alone HTTP layer forms a ``BidiFlow``, +On the server-side the stand-alone HTTP layer forms a ``BidiFlow``, that is a stage that "upgrades" a potentially encrypted raw connection to the HTTP level. You create an instance of the layer by calling one of the two overloads of the ``Http.get(system).serverLayer`` method, @@ -192,7 +192,7 @@ Bind failures The first type of failure is when the server is unable to bind to the given port. For example when the port is already taken by another application, or if the port is privileged (i.e. only usable by ``root``). -In this case the "binding future" will fail immediatly, and we can react to if by listening on the Future's completion: +In this case the "binding future" will fail immediatly, and we can react to if by listening on the CompletionStage’s completion: .. includecode:: ../../code/docs/http/javadsl/server/HttpServerExampleDocTest.java :include: binding-failure-handling diff --git a/akka-docs-dev/rst/java/http/server-side/websocket-support.rst b/akka-docs/rst/java/http/server-side/websocket-support.rst similarity index 78% rename from akka-docs-dev/rst/java/http/server-side/websocket-support.rst rename to akka-docs/rst/java/http/server-side/websocket-support.rst index a0902c0dbc..d595905a95 100644 --- a/akka-docs-dev/rst/java/http/server-side/websocket-support.rst +++ b/akka-docs/rst/java/http/server-side/websocket-support.rst @@ -52,20 +52,20 @@ a streaming message from an Akka Stream source. Server API ---------- -The entrypoint for the Websocket API is the synthetic ``UpgradeToWebsocket`` header which is added to a request -if Akka HTTP encounters a Websocket upgrade request. +The entrypoint for the WebSocket API is the synthetic ``UpgradeToWebSocket`` header which is added to a request +if Akka HTTP encounters a WebSocket upgrade request. -The Websocket specification mandates that details of the Websocket connection are negotiated by placing special-purpose +The WebSocket specification mandates that details of the WebSocket connection are negotiated by placing special-purpose HTTP-headers into request and response of the HTTP upgrade. In Akka HTTP these HTTP-level details of the WebSocket handshake are hidden from the application and don't need to be managed manually. -Instead, the synthetic ``UpgradeToWebsocket`` represents a valid Websocket upgrade request. An application can detect -a Websocket upgrade request by looking for the ``UpgradeToWebsocket`` header. It can choose to accept the upgrade and -start a Websocket connection by responding to that request with an ``HttpResponse`` generated by one of the -``UpgradeToWebsocket.handleMessagesWith`` methods. In its most general form this method expects two arguments: -first, a handler ``Flow`` that will be used to handle Websocket messages on this connection. +Instead, the synthetic ``UpgradeToWebSocket`` represents a valid WebSocket upgrade request. An application can detect +a WebSocket upgrade request by looking for the ``UpgradeToWebSocket`` header. It can choose to accept the upgrade and +start a WebSocket connection by responding to that request with an ``HttpResponse`` generated by one of the +``UpgradeToWebSocket.handleMessagesWith`` methods. In its most general form this method expects two arguments: +first, a handler ``Flow`` that will be used to handle WebSocket messages on this connection. Second, the application can optionally choose one of the proposed application-level sub-protocols by inspecting the -values of ``UpgradeToWebsocket.getRequestedProtocols`` and pass the chosen protocol value to ``handleMessagesWith``. +values of ``UpgradeToWebSocket.getRequestedProtocols`` and pass the chosen protocol value to ``handleMessagesWith``. Handling Messages +++++++++++++++++ @@ -76,7 +76,7 @@ scenarios this fits very well and such a ``Flow`` can be constructed from a simp There are other use-cases, e.g. in a server-push model, where a server message is sent spontaneously, or in a true bi-directional scenario where input and output aren't logically connected. Providing the handler as a ``Flow`` in -these cases may not fit. An overload of ``UpgradeToWebsocket.handleMessagesWith`` is provided, instead, +these cases may not fit. An overload of ``UpgradeToWebSocket.handleMessagesWith`` is provided, instead, which allows to pass an output-generating ``Source`` and an input-receiving ``Sink`` independently. Note that a handler is required to consume the data stream of each message to make place for new messages. Otherwise, @@ -87,40 +87,40 @@ Example Let's look at an example_. -Websocket requests come in like any other requests. In the example, requests to ``/greeter`` are expected to be -Websocket requests: +WebSocket requests come in like any other requests. In the example, requests to ``/greeter`` are expected to be +WebSocket requests: -.. includecode:: ../../code/docs/http/javadsl/server/WebsocketCoreExample.java +.. includecode:: ../../code/docs/http/javadsl/server/WebSocketCoreExample.java :include: websocket-handling -It uses a helper method ``akka.http.javadsl.model.ws.Websocket.handleWebsocketRequestWith`` which can be used if -only Websocket requests are expected. The method looks for the ``UpgradeToWebsocket`` header and returns a response -that will install the passed Websocket handler if the header is found. If the request is no Websocket request it will +It uses a helper method ``akka.http.javadsl.model.ws.WebSocket.handleWebSocketRequestWith`` which can be used if +only WebSocket requests are expected. The method looks for the ``UpgradeToWebSocket`` header and returns a response +that will install the passed WebSocket handler if the header is found. If the request is no WebSocket request it will return a ``400 Bad Request`` error response. In the example, the passed handler expects text messages where each message is expected to contain (a person's) name and then responds with another text message that contains a greeting: -.. includecode:: ../../code/docs/http/javadsl/server/WebsocketCoreExample.java +.. includecode:: ../../code/docs/http/javadsl/server/WebSocketCoreExample.java :include: websocket-handler Routing support --------------- -The routing DSL provides the ``handleWebsocketMessages`` directive to install a WebSocket handler if a request +The routing DSL provides the ``handleWebSocketMessages`` directive to install a WebSocket handler if a request is a WebSocket request. Otherwise, the directive rejects the request. Let's look at how the above example can be rewritten using the high-level routing DSL. Instead of writing the request handler manually, the routing behavior of the app is defined by a route that -uses the ``handleWebsocketRequests`` directive in place of the ``Websocket.handleWebsocketRequestWith``: +uses the ``handleWebSocketRequests`` directive in place of the ``WebSocket.handleWebSocketRequestWith``: -.. includecode:: ../../code/docs/http/javadsl/server/WebsocketRoutingExample.java +.. includecode:: ../../code/docs/http/javadsl/server/WebSocketRoutingExample.java :include: websocket-route The handling code itself will be the same as with using the low-level API. See the `full routing example`_. -.. _example: @github@/akka-docs-dev/rst/java/code/docs/http/javadsl/server/WebsocketCoreExample.java -.. _full routing example: @github@/akka-docs-dev/rst/java/code/docs/http/javadsl/server/WebsocketRoutingExample.java \ No newline at end of file +.. _example: @github@/akka-docs-dev/rst/java/code/docs/http/javadsl/server/WebSocketCoreExample.java +.. _full routing example: @github@/akka-docs-dev/rst/java/code/docs/http/javadsl/server/WebSocketRoutingExample.java \ No newline at end of file diff --git a/akka-docs/rst/java/remoting.rst b/akka-docs/rst/java/remoting.rst index c17a977656..25a6028c41 100644 --- a/akka-docs/rst/java/remoting.rst +++ b/akka-docs/rst/java/remoting.rst @@ -475,8 +475,8 @@ There are lots of configuration properties that are related to remoting in Akka. .. _remote-configuration-nat-java: -Remote configuration for NAT and Docker ---------------------------------------- +Akka behind NAT or in a Docker container +---------------------------------------- In setups involving Network Address Translation (NAT), Load Balancers or Docker containers the hostname and port pair that akka binds to will be different than the "logical" diff --git a/akka-docs-dev/rst/java/stream-index.rst b/akka-docs/rst/java/stream/index.rst similarity index 69% rename from akka-docs-dev/rst/java/stream-index.rst rename to akka-docs/rst/java/stream/index.rst index e89dd7422e..75458af8f9 100644 --- a/akka-docs-dev/rst/java/stream-index.rst +++ b/akka-docs/rst/java/stream/index.rst @@ -8,7 +8,7 @@ Streams stream-introduction stream-quickstart - ../stream-design + ../../general/stream/stream-design stream-flows-and-basics stream-graphs stream-composition @@ -19,7 +19,8 @@ Streams stream-io stream-parallelism stream-testkit - ../stages-overview + ../../general/stream/stages-overview stream-cookbook - ../stream-configuration + ../../general/stream/stream-configuration migration-guide-1.0-2.x-java + migration-guide-2.0-2.4-java diff --git a/akka-docs/rst/java/stream/migration-guide-1.0-2.x-java.rst b/akka-docs/rst/java/stream/migration-guide-1.0-2.x-java.rst new file mode 100644 index 0000000000..6b3001914a --- /dev/null +++ b/akka-docs/rst/java/stream/migration-guide-1.0-2.x-java.rst @@ -0,0 +1,9 @@ +.. _migration-2.0-java: + +########################## +Migration Guide 1.0 to 2.x +########################## + +For this migration guide see `the documentation for Akka Streams 2.0`_. + +.. _`the documentation for Akka Streams 2.0`: http://doc.akka.io/docs/akka-stream-and-http-experimental/2.0.2/java/migration-guide-1.0-2.x-java.html diff --git a/akka-docs/rst/java/stream/migration-guide-2.0-2.4-java.rst b/akka-docs/rst/java/stream/migration-guide-2.0-2.4-java.rst new file mode 100644 index 0000000000..8867effbc2 --- /dev/null +++ b/akka-docs/rst/java/stream/migration-guide-2.0-2.4-java.rst @@ -0,0 +1,146 @@ +.. _migration-streams-2.0-2.4-java: + +############################## +Migration Guide 2.0.x to 2.4.x +############################## + +General notes +============= + +Java DSL now uses Java 8 types: CompletionStage and Optional +------------------------------------------------------------ + +In order to provide a top-notch Java API we switched from Scala’s Future and Akka’s +``akka.japi.Option`` interim solutions to the JDK’s own types for deferred computation +and optional results. This has been done throughout Streams & HTTP, most notably changing most +materialized types, but also the signature of the ``mapAsync`` combinator and the +asynchronous route result combinators in the HTTP DSL. + +The ``akka.pattern`` package has been updated with a new set of implementations within +the ``PatternCS`` class that provide the ability to interact between Actors and Futures +(or streams) for ``CompletionStage``. + +Should you have the need to use Scala Futures with these new Java APIs please use +the ``scala-java8-compat`` library that comes as a dependency of Akka. For more +information see `the documentation``_. + +.. _`the documentation`:: https://github.com/scala/scala-java8-compat + +akka.Done and akka.NotUsed replacing Unit and BoxedUnit +------------------------------------------------------- + +To provide more clear signatures and have a unified API for both +Java and Scala two new types have been introduced: + +``akka.NotUsed`` is meant to be used instead of ``Unit`` in Scala +and ``BoxedUnit`` in Java to signify that the type parameter is required +but not actually used. This is commonly the case with ``Source``, ``Flow`` and ``Sink`` +that do not materialize into any value. + +``akka.Done`` is added for the use case where it is boxed inside another object to signify +completion but there is no actual value attached to the completion. It is used to replace +occurrences of ``Future`` with ``Future`` in Java and ``Future[Unit]`` with +``Future[Done]`` in Scala. + +All previous usage of ``Unit`` and ``BoxedUnit`` for these two cases in the akka streams APIs +has been updated. + +This means that Java code like this:: + + Source source = Source.from(Arrays.asList("1", "2", "3")); + Sink> sink = Sink.ignore(); + +needs to be changed into:: + + Source source = Source.from(Arrays.asList("1", "2", "3")); + Sink> sink = Sink.ignore(); + +These changes apply to all the places where streams are used, which means that signatures +in the persistent query APIs also are affected. + +Changed Operators +================= + +``expand()`` is now based on an Iterator +---------------------------------------- + +Previously the ``expand`` combinator required two functions as input: the first +one lifted incoming values into an extrapolation state and the second one +extracted values from that, possibly evolving that state. This has been +simplified into a single function that turns the incoming element into an +Iterator. + +The most prominent use-case previously was to just repeat the previously received value:: + + // This no longer works! + Flow.of(Integer.class).expand(i -> i)(i -> new Pair<>(i, i)); + +In Akka 2.4.x this is simplified to: + +.. includecode:: ../code/docs/stream/MigrationsJava.java#expand-continually + +If state needs to be be kept during the expansion process then this state will +need to be managed by the Iterator. The example of counting the number of +expansions might previously have looked like:: + + // This no longer works! + Flow.of(Integer.class).expand(i -> new Pair<>(i, 0))( + pair -> new Pair<>(new Pair<>(pair.first(), pair.second()), + new Pair<>(pair.first(), pair.second() + 1))); + +In Akka 2.4.x this is formulated like so: + +.. includecode:: ../code/docs/stream/MigrationsJava.java#expand-state + +Changed Sources / Sinks +======================= + +Sink.asPublisher is now configured using an enum +------------------------------------------------ + +In order to not use a meaningless boolean parameter we have changed the signature to: + +.. includecode:: ../code/docs/stream/MigrationsJava.java#asPublisher-import + +.. includecode:: ../code/docs/stream/MigrationsJava.java#asPublisher + +IO Sources / Sinks materialize IOResult +--------------------------------------- + +Materialized values of the following sources and sinks: + + * ``FileIO.fromFile`` + * ``FileIO.toFile`` + * ``StreamConverters.fromInputStream`` + * ``StreamConverters.fromOutputStream`` + +have been changed from ``Long`` to ``akka.stream.io.IOResult``. +This allows to signal more complicated completion scenarios. For example, on failure it is now possible +to return the exception and the number of bytes written until that exception occured. + +PushStage, PushPullStage and DetachedStage have been deprecated in favor of GraphStage +====================================================================================== + +The :class:`PushStage` :class:`PushPullStage` and :class:`DetachedStage` classes have been deprecated and +should be replaced by :class:`GraphStage` (:ref:`graphstage-java`) which is now a single powerful API +for custom stream processing. + +Update procedure +---------------- + +Please consult the :class:`GraphStage` documentation (:ref:`graphstage-java`) and the `previous migration guide`_ +on migrating from :class:`AsyncStage` to :class:`GraphStage`. + +.. _`previous migration guide`: http://doc.akka.io/docs/akka-stream-and-http-experimental/2.0.2/java/migration-guide-1.0-2.x-java.html#AsyncStage_has_been_replaced_by_GraphStage + + +Changes in Akka HTTP +==================== + +Routing settings parameter name +------------------------------- + +``RoutingSettings`` were previously the only setting available on ``RequestContext``, +and were accessible via ``settings``. We now made it possible to configure the parsers +settings as well, so ``RoutingSettings`` is now ``routingSettings`` and ``ParserSettings`` is +now accessible via ``parserSettings``. \ No newline at end of file diff --git a/akka-docs-dev/rst/java/stream-composition.rst b/akka-docs/rst/java/stream/stream-composition.rst similarity index 84% rename from akka-docs-dev/rst/java/stream-composition.rst rename to akka-docs/rst/java/stream/stream-composition.rst index 27da645dd3..113dfc4858 100644 --- a/akka-docs-dev/rst/java/stream-composition.rst +++ b/akka-docs/rst/java/stream/stream-composition.rst @@ -17,7 +17,7 @@ we illustrate the most common used stages viewed as "boxes". | -.. image:: ../images/compose_shapes.png +.. image:: ../../images/compose_shapes.png :align: center | @@ -42,13 +42,13 @@ hiding them behind a *shape* that looks like a :class:`Source`, :class:`Flow`, e | -.. image:: ../images/compose_composites.png +.. image:: ../../images/compose_composites.png :align: center | One interesting example above is a :class:`Flow` which is composed of a disconnected :class:`Sink` and :class:`Source`. -This can be achieved by using the ``wrap()`` constructor method on :class:`Flow` which takes the two parts as +This can be achieved by using the ``fromSinkAndSource()`` constructor method on :class:`Flow` which takes the two parts as parameters. The example :class:`BidiFlow` demonstrates that internally a module can be of arbitrary complexity, and the exposed @@ -63,7 +63,7 @@ that is built from a composite :class:`Source` and a composite :class:`Sink` (wh | -.. image:: ../images/compose_nested_flow.png +.. image:: ../../images/compose_nested_flow.png :align: center | @@ -78,7 +78,7 @@ with the rest of the graph), but this demonstrates the uniform underlying model. If we try to build a code snippet that corresponds to the above diagram, our first try might look like this: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/CompositionDocTest.java#non-nested-flow +.. includecode:: ../code/docs/stream/CompositionDocTest.java#non-nested-flow It is clear however that there is no nesting present in our first attempt, since the library cannot figure out where we intended to put composite module boundaries, it is our responsibility to do that. If we are using the @@ -87,7 +87,7 @@ methods ``withAttributes()`` or ``named()`` (where the latter is just a shorthan The following code demonstrates how to achieve the desired nesting: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/CompositionDocTest.java#nested-flow +.. includecode:: ../code/docs/stream/CompositionDocTest.java#nested-flow Once we have hidden the internals of our components, they act like any other built-in component of similar shape. If we hide some of the internals of our composites, the result looks just like if any other predefine component has been @@ -95,7 +95,7 @@ used: | -.. image:: ../images/compose_nested_flow_opaque.png +.. image:: ../../images/compose_nested_flow_opaque.png :align: center | @@ -103,7 +103,7 @@ used: If we look at usage of built-in components, and our custom components, there is no difference in usage as the code snippet below demonstrates. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/CompositionDocTest.java#reuse +.. includecode:: ../code/docs/stream/CompositionDocTest.java#reuse Composing complex systems ------------------------- @@ -119,7 +119,7 @@ As a first example, let's look at a more complex layout: | -.. image:: ../images/compose_graph.png +.. image:: ../../images/compose_graph.png :align: center | @@ -129,12 +129,12 @@ can be materialized) that encapsulates a non-trivial stream processing network. directed and non-directed cycles. The ``runnable()`` method of the :class:`GraphDSL` factory object allows the creation of a general, closed, and runnable graph. For example the network on the diagram can be realized like this: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/CompositionDocTest.java#complex-graph +.. includecode:: ../code/docs/stream/CompositionDocTest.java#complex-graph In the code above we used the implicit port numbering feature to make the graph more readable and similar to the diagram. It is possible to refer to the ports, so another version might look like this: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/CompositionDocTest.java#complex-graph-alt +.. includecode:: ../code/docs/stream/CompositionDocTest.java#complex-graph-alt | @@ -145,14 +145,14 @@ from the previous example, what remains is a partial graph: | -.. image:: ../images/compose_graph_partial.png +.. image:: ../../images/compose_graph_partial.png :align: center | We can recreate a similar graph in code, using the DSL in a similar way than before: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/CompositionDocTest.java#partial-graph +.. includecode:: ../code/docs/stream/CompositionDocTest.java#partial-graph The only new addition is the return value of the builder block, which is a :class:`Shape`. All graphs (including :class:`Source`, :class:`BidiFlow`, etc) have a shape, which encodes the *typed* ports of the module. In our example @@ -165,31 +165,31 @@ it is a good practice to give names to modules to help debugging. | -.. image:: ../images/compose_graph_shape.png +.. image:: ../../images/compose_graph_shape.png :align: center | Since our partial graph has the right shape, it can be already used in the simpler, linear DSL: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/CompositionDocTest.java#partial-use +.. includecode:: ../code/docs/stream/CompositionDocTest.java#partial-use It is not possible to use it as a :class:`Flow` yet, though (i.e. we cannot call ``.filter()`` on it), but :class:`Flow` -has a ``wrap()`` method that just adds the DSL to a :class:`FlowShape`. There are similar methods on :class:`Source`, +has a ``fromGraph()`` method that just adds the DSL to a :class:`FlowShape`. There are similar methods on :class:`Source`, :class:`Sink` and :class:`BidiShape`, so it is easy to get back to the simpler DSL if a graph has the right shape. For convenience, it is also possible to skip the partial graph creation, and use one of the convenience creator methods. To demonstrate this, we will create the following graph: | -.. image:: ../images/compose_graph_flow.png +.. image:: ../../images/compose_graph_flow.png :align: center | The code version of the above closed graph might look like this: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/CompositionDocTest.java#partial-flow-dsl +.. includecode:: ../code/docs/stream/CompositionDocTest.java#partial-flow-dsl .. note:: All graph builder sections check if the resulting graph has all ports connected except the exposed ones and will @@ -198,7 +198,7 @@ The code version of the above closed graph might look like this: We are still in debt of demonstrating that :class:`RunnableGraph` is a component just like any other, which can be embedded in graphs. In the following snippet we embed one closed graph in another: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/CompositionDocTest.java#embed-closed +.. includecode:: ../code/docs/stream/CompositionDocTest.java#embed-closed The type of the imported module indicates that the imported module has a :class:`ClosedShape`, and so we are not able to wire it to anything else inside the enclosing closed graph. Nevertheless, this "island" is embedded properly, @@ -241,7 +241,7 @@ The propagation of the individual materialized values from the enclosed modules | -.. image:: ../images/compose_mat.png +.. image:: ../../images/compose_mat.png :align: center | @@ -250,29 +250,29 @@ To implement the above, first, we create a composite :class:`Source`, where the materialized type of :class:`Promise`. By using the combiner function ``Keep.left()``, the resulting materialized type is of the nested module (indicated by the color *red* on the diagram): -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/CompositionDocTest.java#mat-combine-1 +.. includecode:: ../code/docs/stream/CompositionDocTest.java#mat-combine-1 Next, we create a composite :class:`Flow` from two smaller components. Here, the second enclosed :class:`Flow` has a -materialized type of :class:`Future`, and we propagate this to the parent by using ``Keep.right()`` +materialized type of :class:`CompletionStage`, and we propagate this to the parent by using ``Keep.right()`` as the combiner function (indicated by the color *yellow* on the diagram): -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/CompositionDocTest.java#mat-combine-2 +.. includecode:: ../code/docs/stream/CompositionDocTest.java#mat-combine-2 As a third step, we create a composite :class:`Sink`, using our ``nestedFlow`` as a building block. In this snippet, both the enclosed :class:`Flow` and the folding :class:`Sink` has a materialized value that is interesting for us, so we use ``Keep.both()`` to get a :class:`Pair` of them as the materialized type of ``nestedSink`` (indicated by the color *blue* on the diagram) -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/CompositionDocTest.java#mat-combine-3 +.. includecode:: ../code/docs/stream/CompositionDocTest.java#mat-combine-3 As the last example, we wire together ``nestedSource`` and ``nestedSink`` and we use a custom combiner function to create a yet another materialized type of the resulting :class:`RunnableGraph`. This combiner function just ignores -the :class:`Future` part, and wraps the other two values in a custom case class :class:`MyClass` +the :class:`CompletionStage` part, and wraps the other two values in a custom case class :class:`MyClass` (indicated by color *purple* on the diagram): -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/CompositionDocTest.java#mat-combine-4a +.. includecode:: ../code/docs/stream/CompositionDocTest.java#mat-combine-4a -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/CompositionDocTest.java#mat-combine-4b +.. includecode:: ../code/docs/stream/CompositionDocTest.java#mat-combine-4b .. note:: The nested structure in the above example is not necessary for combining the materialized values, it just @@ -292,7 +292,7 @@ unless they override them with a custom value. The code below, a modification of an earlier example sets the ``inputBuffer`` attribute on certain modules, but not on others: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/CompositionDocTest.java#attributes-inheritance +.. includecode:: ../code/docs/stream/CompositionDocTest.java#attributes-inheritance The effect is, that each module inherits the ``inputBuffer`` attribute from its enclosing parent, unless it has the same attribute explicitly set. ``nestedSource`` gets the default attributes from the materializer itself. ``nestedSink`` @@ -301,7 +301,7 @@ except the ``map`` stage which has again an explicitly provided attribute overri | -.. image:: ../images/compose_attributes.png +.. image:: ../../images/compose_attributes.png :align: center | diff --git a/akka-docs-dev/rst/java/stream-cookbook.rst b/akka-docs/rst/java/stream/stream-cookbook.rst similarity index 81% rename from akka-docs-dev/rst/java/stream-cookbook.rst rename to akka-docs/rst/java/stream/stream-cookbook.rst index bb75ac44e1..4b32a314e3 100644 --- a/akka-docs-dev/rst/java/stream-cookbook.rst +++ b/akka-docs/rst/java/stream/stream-cookbook.rst @@ -32,12 +32,12 @@ Logging elements of a stream The simplest solution is to simply use a ``map`` operation and use ``println`` to print the elements received to the console. While this recipe is rather simplistic, it is often suitable for a quick debug session. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeLoggingElements.java#println-debug +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeLoggingElements.java#println-debug Another approach to logging is to use ``log()`` operation which allows configuring logging for elements flowing through the stream as well as completion and erroring. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeLoggingElements.java#log-custom +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeLoggingElements.java#log-custom Flattening a stream of sequences -------------------------------- @@ -49,21 +49,21 @@ The ``mapConcat`` operation can be used to implement a one-to-many transformatio in the form of ``In -> List``. In this case we want to map a ``List`` of elements to the elements in the collection itself, so we can just call ``mapConcat(l -> l)``. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeFlattenList.java#flattening-lists +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeFlattenList.java#flattening-lists Draining a stream to a strict collection ---------------------------------------- -**Situation:** A finite sequence of elements is given as a stream, but a scala collection is needed instead. +**Situation:** A finite sequence of elements is given as a stream, but a Scala collection is needed instead. In this recipe we will use the ``grouped`` stream operation that groups incoming elements into a stream of limited size collections (it can be seen as the almost opposite version of the "Flattening a stream of sequences" recipe we showed before). By using a ``grouped(MAX_ALLOWED_SIZE)`` we create a stream of groups with maximum size of ``MaxAllowedSeqSize`` and then we take the first element of this stream by attaching a ``Sink.head()``. What we get is a -:class:`Future` containing a sequence with all the elements of the original up to ``MAX_ALLOWED_SIZE`` size (further +:class:`CompletionStage` containing a sequence with all the elements of the original up to ``MAX_ALLOWED_SIZE`` size (further elements are dropped). -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeToStrict.java#draining-to-list +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeToStrict.java#draining-to-list Calculating the digest of a ByteString stream --------------------------------------------- @@ -83,9 +83,9 @@ we can emit further elements ``onPull`` is called again, and we see ``ctx.isFini source has been depleted already). Since we only want to emit a final element it is enough to call ``ctx.pushAndFinish`` passing the digest ByteString to be emitted. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeDigest.java#calculating-digest +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeDigest.java#calculating-digest -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeDigest.java#calculating-digest2 +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeDigest.java#calculating-digest2 .. _cookbook-parse-lines-java: @@ -98,7 +98,7 @@ needs to be parsed. The :class:`Framing` helper class contains a convenience method to parse messages from a stream of ``ByteStrings``: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeParseLines.java#parse-lines +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeParseLines.java#parse-lines Implementing reduce-by-key -------------------------- @@ -113,7 +113,7 @@ we have a stream of streams, where every substream will serve identical words. To count the words, we need to process the stream of streams (the actual groups containing identical words). ``groupBy`` returns a :class:`SubSource`, which means that we transform the resulting substreams directly. In this case we use -the ``fold`` combinator to aggregate the word itself and the number of its +the ``reduce`` combinator to aggregate the word itself and the number of its occurrences within a :class:`Pair`. Each substream will then emit one final value—precisely such a pair—when the overall input completes. As a last step we merge back these values from the substreams into one single @@ -128,19 +128,19 @@ If the ``groupBy`` operator encounters more keys than this number then the stream cannot continue without violating its resource bound, in this case ``groupBy`` will terminate with a failure. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeReduceByKeyTest.java#word-count +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeReduceByKeyTest.java#word-count By extracting the parts specific to *wordcount* into * a ``groupKey`` function that defines the groups -* a ``foldZero`` that defines the zero element used by the fold on the substream given the group key -* a ``fold`` function that does the actual reduction +* a ``map`` map each element to value that is used by the reduce on the substream +* a ``reduce`` function that does the actual reduction we get a generalized version below: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeReduceByKeyTest.java#reduce-by-key-general +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeReduceByKeyTest.java#reduce-by-key-general -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeReduceByKeyTest.java#reduce-by-key-general2 +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeReduceByKeyTest.java#reduce-by-key-general2 .. note:: Please note that the reduce-by-key version we discussed above is sequential @@ -161,7 +161,7 @@ To achieve the desired result, we attack the problem in two steps: * Then we take this new stream of message topic pairs (containing a separate pair for each topic a given message belongs to) and feed it into groupBy, using the topic as the group key. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeMultiGroupByTest.java#multi-groupby +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeMultiGroupByTest.java#multi-groupby Working with Graphs =================== @@ -178,14 +178,14 @@ trigger signal arrives. This recipe solves the problem by simply zipping the stream of ``Message`` elments with the stream of ``Trigger`` signals. Since ``Zip`` produces pairs, we simply map the output stream selecting the first element of the pair. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeManualTrigger.java#manually-triggered-stream +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeManualTrigger.java#manually-triggered-stream Alternatively, instead of using a ``Zip``, and then using ``map`` to get the first element of the pairs, we can avoid creating the pairs in the first place by using ``ZipWith`` which takes a two argument function to produce the output element. If this function would return a pair of the two argument it would be exactly the behavior of ``Zip`` so ``ZipWith`` is a generalization of zipping. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeManualTrigger.java#manually-triggered-stream-zipwith +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeManualTrigger.java#manually-triggered-stream-zipwith Balancing jobs to a fixed pool of workers @@ -202,9 +202,9 @@ The graph consists of a ``Balance`` node which is a special fan-out operation th downstream consumers. In a ``for`` loop we wire all of our desired workers as outputs of this balancer element, then we wire the outputs of these workers to a ``Merge`` element that will collect the results from the workers. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeWorkerPool.java#worker-pool +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeWorkerPool.java#worker-pool -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeWorkerPool.java#worker-pool2 +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeWorkerPool.java#worker-pool2 Working with rate ================= @@ -228,7 +228,7 @@ case this is ``i -> i`` so our folding state starts form the message itself. The special: given the aggregate value (the last message) and the new element (the freshest element) our aggregate state becomes simply the freshest element. This choice of functions results in a simple dropping operation. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeSimpleDrop.java#simple-drop +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeSimpleDrop.java#simple-drop Dropping broadcast ------------------ @@ -243,9 +243,9 @@ defining a dropping strategy instead of the default ``Backpressure``. This allow between the different consumers (the buffer smooths out small rate variances), but also allows faster consumers to progress by dropping from the buffer of the slow consumers if necessary. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeDroppyBroadcast.java#droppy-bcast +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeDroppyBroadcast.java#droppy-bcast -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeDroppyBroadcast.java#droppy-bcast2 +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeDroppyBroadcast.java#droppy-bcast2 Collecting missed ticks ----------------------- @@ -265,7 +265,7 @@ We will use ``conflate`` to solve the problem. Conflate takes two functions: As a result, we have a flow of ``Int`` where the number represents the missed ticks. A number 0 means that we were able to consume the tick fast enough (i.e. zero means: 1 non-missed tick + 0 missed ticks) -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeMissedTicks.java#missed-ticks +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeMissedTicks.java#missed-ticks Create a stream processor that repeats the last element seen ------------------------------------------------------------ @@ -283,7 +283,7 @@ to feed the downstream if no upstream element is ready yet. In the ``onPush()`` of ``onPull()``). The downstream ``onPull`` handler is very similar, we immediately relieve the downstream by emitting ``currentValue``. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeHold.java#hold-version-1 +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeHold.java#hold-version-1 While it is relatively simple, the drawback of the first version is that it needs an arbitrary initial element which is not always possible to provide. Hence, we create a second version where the downstream might need to wait in one single @@ -294,9 +294,9 @@ We introduce a boolean variable ``waitingFirstValue`` to denote whether the firs a null can be used with the same purpose). In the downstream ``onPull()`` handler the difference from the previous version is that we call ``holdDownstream()`` if the first element is not yet available and thus blocking our downstream. The upstream ``onPush()`` handler sets ``waitingFirstValue`` to false, and after checking if ``holdDownstream()`` has been called it -either releaves the upstream producer, or both the upstream producer and downstream consumer by calling ``pushAndPull()`` +either relieves the upstream producer, or both the upstream producer and downstream consumer by calling ``pushAndPull()`` -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeHold.java#hold-version-2 +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeHold.java#hold-version-2 Globally limiting the rate of a set of streams ---------------------------------------------- @@ -317,13 +317,13 @@ of the sender is added to a queue. Once the timer for replenishing the pending p message, we increment the pending permits counter and send a reply to each of the waiting senders. If there are more waiting senders than permits available we will stay in the ``closed`` state. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeGlobalRateLimit.java#global-limiter-actor +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeGlobalRateLimit.java#global-limiter-actor To create a Flow that uses this global limiter actor we use the ``mapAsync`` function with the combination of the ``ask`` pattern. We also define a timeout, so if a reply is not received during the configured maximum wait period the returned future from ``ask`` will fail, which will fail the corresponding stream as well. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeGlobalRateLimit.java#global-limiter-flow +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeGlobalRateLimit.java#global-limiter-flow .. note:: The global actor used for limiting introduces a global bottleneck. You might want to assign a dedicated dispatcher @@ -349,9 +349,9 @@ which implements the following logic: Both ``onPush()`` and ``onPull()`` calls ``emitChunkOrPull()`` the only difference is that the push handler also stores the incoming chunk by appending to the end of the buffer. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeByteStrings.java#bytestring-chunker +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeByteStrings.java#bytestring-chunker -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeByteStrings.java#bytestring-chunker2 +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeByteStrings.java#bytestring-chunker2 Limit the number of bytes passing through a stream of ByteStrings ----------------------------------------------------------------- @@ -363,9 +363,9 @@ This recipe uses a :class:`PushStage` to implement the desired feature. In the o ``onPush()`` we just update a counter and see if it gets larger than ``maximumBytes``. If a violation happens we signal failure, otherwise we forward the chunk we have received. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeByteStrings.java#bytes-limiter +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeByteStrings.java#bytes-limiter -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeByteStrings.java#bytes-limiter2 +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeByteStrings.java#bytes-limiter2 Compact ByteStrings in a stream of ByteStrings ---------------------------------------------- @@ -377,7 +377,7 @@ chain we want to have clean copies that are no longer referencing the original B The recipe is a simple use of map, calling the ``compact()`` method of the :class:`ByteString` elements. This does copying of the underlying arrays, so this should be the last element of a long chain if used. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeByteStrings.java#compacting-bytestrings +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeByteStrings.java#compacting-bytestrings Injecting keep-alive messages into a stream of ByteStrings ---------------------------------------------------------- @@ -387,4 +387,4 @@ but only if this does not interfere with normal traffic. There is a built-in operation that allows to do this directly: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeKeepAlive.java#inject-keepalive +.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeKeepAlive.java#inject-keepalive diff --git a/akka-docs-dev/rst/java/stream-customize.rst b/akka-docs/rst/java/stream/stream-customize.rst similarity index 91% rename from akka-docs-dev/rst/java/stream-customize.rst rename to akka-docs/rst/java/stream/stream-customize.rst index b5cb0a57c0..19aca289fa 100644 --- a/akka-docs-dev/rst/java/stream-customize.rst +++ b/akka-docs/rst/java/stream/stream-customize.rst @@ -29,7 +29,7 @@ As a first motivating example, we will build a new :class:`Source` that will sim cancelled. To start, we need to define the "interface" of our stage, which is called *shape* in Akka Streams terminology (this is explained in more detail in the section :ref:`composition-java`). -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/GraphStageDocTest.java#simple-source +.. includecode:: ../code/docs/stream/GraphStageDocTest.java#simple-source As you see, in itself the :class:`GraphStage` only defines the ports of this stage and a shape that contains the ports. It also has a user implemented method called ``createLogic``. If you recall, stages are reusable in multiple @@ -50,7 +50,7 @@ that they are already usable in many situations, but do not provide the DSL meth ``Source.fromGraph`` (see :ref:`composition-java` for more details about graphs and DSLs). Now we can use the source as any other built-in one: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/GraphStageDocTest.java#simple-source-usage +.. includecode:: ../code/docs/stream/GraphStageDocTest.java#simple-source-usage Port states, AbstractInHandler and AbstractOutHandler ----------------------------------------------------- @@ -84,7 +84,7 @@ in that state. | -.. image:: ../images/outport_transitions.png +.. image:: ../../images/outport_transitions.png :align: center | @@ -99,7 +99,7 @@ The following operations are available for *input* ports: The events corresponding to an *input* port can be received in an :class:`AbstractInHandler` instance registered to the input port using ``setHandler(in, handler)``. This handler has three callbacks: -* ``onPush()`` is called when the output port has now a new element. Now it is possible to aquire this element using +* ``onPush()`` is called when the output port has now a new element. Now it is possible to acquire this element using ``grab(in)`` and/or call ``pull(in)`` on the port to request the next element. It is not mandatory to grab the element, but if it is pulled while the element has not been grabbed it will drop the buffered element. * ``onUpstreamFinish()`` is called once the upstream has completed and no longer can be pulled for new elements. @@ -120,7 +120,7 @@ in that state. | -.. image:: ../images/inport_transitions.png +.. image:: ../../images/inport_transitions.png :align: center | @@ -165,7 +165,7 @@ flowing downstream. | -.. image:: ../images/graph_stage_conceptual.png +.. image:: ../../images/graph_stage_conceptual.png :align: center :width: 500 @@ -176,7 +176,7 @@ To illustrate these concepts we create a small :class:`GraphStage` that implemen | -.. image:: ../images/graph_stage_map.png +.. image:: ../../images/graph_stage_map.png :align: center :width: 300 @@ -185,7 +185,7 @@ To illustrate these concepts we create a small :class:`GraphStage` that implemen Map calls ``push(out)`` from the ``onPush()`` handler and it also calls ``pull()`` from the ``onPull`` handler resulting in the conceptual wiring above, and fully expressed in code below: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/GraphStageDocTest.java#one-to-one +.. includecode:: ../code/docs/stream/GraphStageDocTest.java#one-to-one Map is a typical example of a one-to-one transformation of a stream where demand is passed along upstream elements passed on downstream. @@ -195,7 +195,7 @@ filter. The conceptual wiring of ``Filter`` looks like this: | -.. image:: ../images/graph_stage_filter.png +.. image:: ../../images/graph_stage_filter.png :align: center :width: 300 @@ -207,14 +207,14 @@ we return the “ball” to our upstream so that we get the new element. This is example by adding a conditional in the ``onPush`` handler and decide between a ``pull(in)`` or ``push(out)`` call (and of course not having a mapping ``f`` function). -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/GraphStageDocTest.java#many-to-one +.. includecode:: ../code/docs/stream/GraphStageDocTest.java#many-to-one To complete the picture we define a one-to-many transformation as the next step. We chose a straightforward example stage that emits every upstream element twice downstream. The conceptual wiring of this stage looks like this: | -.. image:: ../images/graph_stage_duplicate.png +.. image:: ../../images/graph_stage_duplicate.png :align: center :width: 300 @@ -224,7 +224,7 @@ This is a stage that has state: an option with the last element it has seen indi has duplicated this last element already or not. We must also make sure to emit the extra element if the upstream completes. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/GraphStageDocTest.java#one-to-many +.. includecode:: ../code/docs/stream/GraphStageDocTest.java#one-to-many In this case a pull from downstream might be consumed by the stage itself rather than passed along upstream as the stage might contain an element it wants to @@ -237,7 +237,7 @@ This example can be simplified by replacing the usage of a mutable state with ca ``emitMultiple`` which will replace the handlers, emit each of multiple elements and then reinstate the original handlers: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/GraphStageDocTest.java#simpler-one-to-many +.. includecode:: ../code/docs/stream/GraphStageDocTest.java#simpler-one-to-many Finally, to demonstrate all of the stages above, we put them together into a processing chain, which conceptually would correspond to the following structure: @@ -245,7 +245,7 @@ which conceptually would correspond to the following structure: | -.. image:: ../images/graph_stage_chain.png +.. image:: ../../images/graph_stage_chain.png :align: center :width: 700 @@ -253,7 +253,7 @@ which conceptually would correspond to the following structure: In code this is only a few lines, using the ``via`` use our custom stages in a stream: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/GraphStageDocTest.java#graph-stage-chain +.. includecode:: ../code/docs/stream/GraphStageDocTest.java#graph-stage-chain If we attempt to draw the sequence of events, it shows that there is one "event token" in circulation in a potential chain of stages, just like our conceptual "railroad tracks" representation predicts. @@ -261,7 +261,7 @@ in circulation in a potential chain of stages, just like our conceptual "railroa | -.. image:: ../images/graph_stage_tracks_1.png +.. image:: ../../images/graph_stage_tracks_1.png :align: center :width: 700 @@ -299,7 +299,7 @@ In this sample the stage toggles between open and closed, where open means no el stage starts out as closed but as soon as an element is pushed downstream the gate becomes open for a duration of time during which it will consume and drop upstream messages: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/GraphStageDocTest.java#timed +.. includecode:: ../code/docs/stream/GraphStageDocTest.java#timed Using asynchronous side-channels -------------------------------- @@ -318,7 +318,7 @@ Sharing the AsyncCallback from the constructor risks race conditions, therefore This example shows an asynchronous side channel graph stage that starts dropping elements when a future completes: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/GraphStageDocTest.java#async-side-channel +.. includecode:: ../code/docs/stream/GraphStageDocTest.java#async-side-channel Integration with actors @@ -354,7 +354,7 @@ stage logic the materialized value must be provided In this sample the materialized value is a future containing the first element to go through the stream: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/GraphStageDocTest.java#materialized +.. includecode:: ../code/docs/stream/GraphStageDocTest.java#materialized Using attributes to affect the behavior of a stage -------------------------------------------------- @@ -390,7 +390,7 @@ is seen from downstream. | -.. image:: ../images/graph_stage_detached_tracks_1.png +.. image:: ../../images/graph_stage_detached_tracks_1.png :align: center :width: 500 @@ -402,7 +402,7 @@ into the buffer stage. | -.. image:: ../images/graph_stage_detached_tracks_2.png +.. image:: ../../images/graph_stage_detached_tracks_2.png :align: center :width: 500 @@ -414,7 +414,7 @@ initialization. The buffer has demand for up to two elements without any downstr The following code example demonstrates a buffer class corresponding to the message sequence chart above. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/GraphStageDocTest.java#detached +.. includecode:: ../code/docs/stream/GraphStageDocTest.java#detached Thread safety of custom processing stages ========================================= diff --git a/akka-docs-dev/rst/java/stream-error.rst b/akka-docs/rst/java/stream/stream-error.rst similarity index 69% rename from akka-docs-dev/rst/java/stream-error.rst rename to akka-docs/rst/java/stream/stream-error.rst index 5462ab4081..c7dbde6d2d 100644 --- a/akka-docs-dev/rst/java/stream-error.rst +++ b/akka-docs/rst/java/stream/stream-error.rst @@ -28,11 +28,11 @@ There are three ways to handle exceptions from application code: By default the stopping strategy is used for all exceptions, i.e. the stream will be completed with failure when an exception is thrown. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowErrorDocTest.java#stop +.. includecode:: ../code/docs/stream/FlowErrorDocTest.java#stop The default supervision strategy for a stream can be defined on the settings of the materializer. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowErrorDocTest.java#resume +.. includecode:: ../code/docs/stream/FlowErrorDocTest.java#resume Here you can see that all ``ArithmeticException`` will resume the processing, i.e. the elements that cause the division by zero are effectively dropped. @@ -44,12 +44,12 @@ elements that cause the division by zero are effectively dropped. The supervision strategy can also be defined for all operators of a flow. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowErrorDocTest.java#resume-section +.. includecode:: ../code/docs/stream/FlowErrorDocTest.java#resume-section ``Restart`` works in a similar way as ``Resume`` with the addition that accumulated state, if any, of the failing processing stage will be reset. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowErrorDocTest.java#restart-section +.. includecode:: ../code/docs/stream/FlowErrorDocTest.java#restart-section Errors from mapAsync ==================== @@ -61,19 +61,19 @@ discard those that cannot be found. We start with the tweet stream of authors: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/IntegrationDocTest.java#tweet-authors +.. includecode:: ../code/docs/stream/IntegrationDocTest.java#tweet-authors Assume that we can lookup their email address using: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/IntegrationDocTest.java#email-address-lookup2 +.. includecode:: ../code/docs/stream/IntegrationDocTest.java#email-address-lookup2 -The ``Future`` is completed with ``Failure`` if the email is not found. +The ``CompletionStage`` is completed normally if the email is not found. Transforming the stream of authors to a stream of email addresses by using the ``lookupEmail`` service can be done with ``mapAsync`` and we use ``Supervision.getResumingDecider`` to drop unknown email addresses: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/IntegrationDocTest.java#email-addresses-mapAsync-supervision +.. includecode:: ../code/docs/stream/IntegrationDocTest.java#email-addresses-mapAsync-supervision If we would not use ``Resume`` the default stopping strategy would complete the stream -with failure on the first ``Future`` that was completed with ``Failure``. +with failure on the first ``CompletionStage`` that was completed exceptionally. diff --git a/akka-docs-dev/rst/java/stream-flows-and-basics.rst b/akka-docs/rst/java/stream/stream-flows-and-basics.rst similarity index 92% rename from akka-docs-dev/rst/java/stream-flows-and-basics.rst rename to akka-docs/rst/java/stream/stream-flows-and-basics.rst index 941112663e..1243e517e8 100644 --- a/akka-docs-dev/rst/java/stream-flows-and-basics.rst +++ b/akka-docs/rst/java/stream/stream-flows-and-basics.rst @@ -76,23 +76,23 @@ starting up Actors). Thanks to Flows being simply a description of the processin thread-safe, and freely shareable*, which means that it is for example safe to share and send them between actors, to have one actor prepare the work, and then have it be materialized at some completely different place in the code. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowDocTest.java#materialization-in-steps +.. includecode:: ../code/docs/stream/FlowDocTest.java#materialization-in-steps After running (materializing) the ``RunnableGraph`` we get a special container object, the ``MaterializedMap``. Both sources and sinks are able to put specific objects into this map. Whether they put something in or not is implementation -dependent. For example a ``FoldSink`` will make a ``Future`` available in this map which will represent the result +dependent. For example a ``FoldSink`` will make a ``CompletionStage`` available in this map which will represent the result of the folding process over the stream. In general, a stream can expose multiple materialized values, but it is quite common to be interested in only the value of the Source or the Sink in the stream. For this reason there is a convenience method called ``runWith()`` available for ``Sink``, ``Source`` or ``Flow`` requiring, respectively, a supplied ``Source`` (in order to run a ``Sink``), a ``Sink`` (in order to run a ``Source``) or both a ``Source`` and a ``Sink`` (in order to run a ``Flow``, since it has neither attached yet). -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowDocTest.java#materialization-runWith +.. includecode:: ../code/docs/stream/FlowDocTest.java#materialization-runWith It is worth pointing out that since processing stages are *immutable*, connecting them returns a new processing stage, instead of modifying the existing instance, so while constructing long flows, remember to assign the new value to a variable or run it: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowDocTest.java#source-immutable +.. includecode:: ../code/docs/stream/FlowDocTest.java#source-immutable .. note:: By default Akka Streams elements support **exactly one** downstream processing stage. @@ -105,10 +105,10 @@ of the given sink or source. Since a stream can be materialized multiple times, the ``MaterializedMap`` returned is different for each materialization. In the example below we create two running materialized instance of the stream that we described in the ``runnable`` -variable, and both materializations give us a different ``Future`` from the map even though we used the same ``sink`` +variable, and both materializations give us a different ``CompletionStage`` from the map even though we used the same ``sink`` to refer to the future: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowDocTest.java#stream-reuse +.. includecode:: ../code/docs/stream/FlowDocTest.java#stream-reuse Defining sources, sinks and flows --------------------------------- @@ -116,18 +116,18 @@ Defining sources, sinks and flows The objects :class:`Source` and :class:`Sink` define various ways to create sources and sinks of elements. The following examples show some of the most useful constructs (refer to the API documentation for more details): -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowDocTest.java#source-sink +.. includecode:: ../code/docs/stream/FlowDocTest.java#source-sink There are various ways to wire up different parts of a stream, the following examples show some of the available options: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowDocTest.java#flow-connecting +.. includecode:: ../code/docs/stream/FlowDocTest.java#flow-connecting Illegal stream elements ----------------------- In accordance to the Reactive Streams specification (`Rule 2.13 `_) Akka Streams do not allow ``null`` to be passed through the stream as an element. In case you want to model the concept -of absence of a value we recommend using ``akka.japi.Option`` (for Java 6 and 7) or ``java.util.Optional`` which is available since Java 8. +of absence of a value we recommend using ``java.util.Optional`` which is available since Java 8. .. _back-pressure-explained-java: @@ -240,13 +240,13 @@ consequences: The first point can be countered by pre-fusing and then reusing a stream blueprint as sketched below: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowDocTest.java#explicit-fusing +.. includecode:: ../code/docs/stream/FlowDocTest.java#explicit-fusing In order to balance the effects of the second and third bullet points you will have to insert asynchronous boundaries manually into your flows and graphs by way of adding ``Attributes.asyncBoundary`` to pieces that shall communicate with the rest of the graph in an asynchronous fashion. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowDocTest.java#flow-async +.. includecode:: ../code/docs/stream/FlowDocTest.java#flow-async In this example we create two regions within the flow which will be executed in one Actor each—assuming that adding and multiplying integers is an extremely costly operation this will lead to a performance gain since two CPUs can @@ -256,7 +256,7 @@ by adding information to the flow graph that has been constructed up to this poi | -.. image:: ../images/asyncBoundary.png +.. image:: ../../images/asyncBoundary.png :align: center :width: 700 @@ -287,7 +287,7 @@ to somehow express how these values should be composed to a final value when we many combinator methods have variants that take an additional argument, a function, that will be used to combine the resulting values. Some examples of using these combiners are illustrated in the example below. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowDocTest.java#flow-mat-combine +.. includecode:: ../code/docs/stream/FlowDocTest.java#flow-mat-combine .. note:: diff --git a/akka-docs-dev/rst/java/stream-graphs.rst b/akka-docs/rst/java/stream/stream-graphs.rst similarity index 87% rename from akka-docs-dev/rst/java/stream-graphs.rst rename to akka-docs/rst/java/stream/stream-graphs.rst index d3b3dfe4db..aca91f52f2 100644 --- a/akka-docs-dev/rst/java/stream-graphs.rst +++ b/akka-docs/rst/java/stream/stream-graphs.rst @@ -45,13 +45,13 @@ One of the goals of the GraphDSL DSL is to look similar to how one would draw a simple to translate a design from whiteboard to code and be able to relate those two. Let's illustrate this by translating the below hand drawn graph into Akka Streams: -.. image:: ../images/simple-graph-example.png +.. image:: ../../images/simple-graph-example.png Such graph is simple to translate to the Graph DSL since each linear element corresponds to a :class:`Flow`, and each circle corresponds to either a :class:`Junction` or a :class:`Source` or :class:`Sink` if it is beginning or ending a :class:`Flow`. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowGraphDocTest.java#simple-flow-graph +.. includecode:: ../code/docs/stream/FlowGraphDocTest.java#simple-flow-graph .. note:: Junction *reference equality* defines *graph node equality* (i.e. the same merge *instance* used in a GraphDSL @@ -75,7 +75,7 @@ In the example below we prepare a graph that consists of two parallel streams, in which we re-use the same instance of :class:`Flow`, yet it will properly be materialized as two connections between the corresponding Sources and Sinks: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowGraphDocTest.java#flow-graph-reusing-a-flow +.. includecode:: ../code/docs/stream/FlowGraphDocTest.java#flow-graph-reusing-a-flow .. _partial-flow-graph-java: @@ -97,7 +97,7 @@ Let's imagine we want to provide users with a specialized element that given 3 i the greatest int value of each zipped triple. We'll want to expose 3 input ports (unconnected sources) and one output port (unconnected sink). -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamPartialFlowGraphDocTest.java#simple-partial-flow-graph +.. includecode:: ../code/docs/stream/StreamPartialFlowGraphDocTest.java#simple-partial-flow-graph As you can see, first we construct the partial graph that describes how to compute the maximum of two input streams, then we reuse that twice while constructing the partial graph that extends this to three input streams, @@ -136,12 +136,12 @@ be attached before this Source can run”. Refer to the example below, in which we create a Source that zips together two numbers, to see this graph construction in action: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamPartialFlowGraphDocTest.java#source-from-partial-flow-graph +.. includecode:: ../code/docs/stream/StreamPartialFlowGraphDocTest.java#source-from-partial-flow-graph Similarly the same can be done for a ``Sink`` using ``SinkShape.of`` in which case the provided value must be an ``Inlet``. For defining a ``Flow`` we need to expose both an undefined source and sink: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamPartialFlowGraphDocTest.java#flow-from-partial-flow-graph +.. includecode:: ../code/docs/stream/StreamPartialFlowGraphDocTest.java#flow-from-partial-flow-graph Combining Sources and Sinks with simplified API ----------------------------------------------- @@ -150,11 +150,11 @@ There is simplified API you can use to combine sources and sinks with junctions ``Merge`` and ``Concat`` without the need for using the Graph DSL. The combine method takes care of constructing the necessary graph underneath. In following example we combine two sources into one (fan-in): -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamPartialFlowGraphDocTest.java#source-combine +.. includecode:: ../code/docs/stream/StreamPartialFlowGraphDocTest.java#source-combine The same can be done for a ``Sink`` but in this case it will be fan-out: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamPartialFlowGraphDocTest.java#sink-combine +.. includecode:: ../code/docs/stream/StreamPartialFlowGraphDocTest.java#sink-combine .. _bidi-flow-java: @@ -171,14 +171,14 @@ this purpose exists the special type :class:`BidiFlow` which is a graph that has exactly two open inlets and two open outlets. The corresponding shape is called :class:`BidiShape` and is defined like this: -.. includecode:: ../../../akka-stream/src/main/scala/akka/stream/Shape.scala +.. includecode:: ../../../../akka-stream/src/main/scala/akka/stream/Shape.scala :include: bidi-shape :exclude: implementation-details-elided A bidirectional flow is defined just like a unidirectional :class:`Flow` as demonstrated for the codec mentioned above: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/BidiFlowDocTest.java +.. includecode:: ../code/docs/stream/BidiFlowDocTest.java :include: codec :exclude: implementation-details-elided @@ -187,7 +187,7 @@ case of a functional 1:1 transformation there is a concise convenience method as shown on the last line. The implementation of the two functions is not difficult either: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/BidiFlowDocTest.java#codec-impl +.. includecode:: ../code/docs/stream/BidiFlowDocTest.java#codec-impl In this way you could easily integrate any other serialization library that turns an object into a sequence of bytes. @@ -197,11 +197,11 @@ a framing protocol means that any received chunk of bytes may correspond to zero or more messages. This is best implemented using a :class:`GraphStage` (see also :ref:`graphstage-java`). -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/BidiFlowDocTest.java#framing +.. includecode:: ../code/docs/stream/BidiFlowDocTest.java#framing With these implementations we can build a protocol stack and test it: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/BidiFlowDocTest.java#compose +.. includecode:: ../code/docs/stream/BidiFlowDocTest.java#compose This example demonstrates how :class:`BidiFlow` subgraphs can be hooked together and also turned around with the ``.reversed()`` method. The test @@ -219,12 +219,12 @@ can be used in the graph as an ordinary source or outlet, and which will eventua If the materialized value is needed at more than one place, it is possible to call ``materializedValue`` any number of times to acquire the necessary number of outlets. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowGraphDocTest.java#flow-graph-matvalue +.. includecode:: ../code/docs/stream/FlowGraphDocTest.java#flow-graph-matvalue Be careful not to introduce a cycle where the materialized value actually contributes to the materialized value. -The following example demonstrates a case where the materialized ``Future`` of a fold is fed back to the fold itself. +The following example demonstrates a case where the materialized ``CompletionStage`` of a fold is fed back to the fold itself. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowGraphDocTest.java#flow-graph-matvalue-cycle +.. includecode:: ../code/docs/stream/FlowGraphDocTest.java#flow-graph-matvalue-cycle .. _graph-cycles-java: @@ -240,7 +240,7 @@ The graph takes elements from the source, prints them, then broadcasts those ele to a consumer (we just used ``Sink.ignore`` for now) and to a feedback arc that is merged back into the main via a ``Merge`` junction. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/GraphCyclesDocTest.java#deadlocked +.. includecode:: ../code/docs/stream/GraphCyclesDocTest.java#deadlocked Running this we observe that after a few numbers have been printed, no more elements are logged to the console - all processing stops after some time. After some investigation we observe that: @@ -258,7 +258,7 @@ If we modify our feedback loop by replacing the ``Merge`` junction with a ``Merg before trying the other lower priority input ports. Since we feed back through the preferred port it is always guaranteed that the elements in the cycles can flow. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/GraphCyclesDocTest.java#unfair +.. includecode:: ../code/docs/stream/GraphCyclesDocTest.java#unfair If we run the example we see that the same sequence of numbers are printed over and over again, but the processing does not stop. Hence, we avoided the deadlock, but ``source`` is still @@ -273,7 +273,7 @@ of initial elements from ``source``. To make our cycle both live (not deadlocking) and fair we can introduce a dropping element on the feedback arc. In this case we chose the ``buffer()`` operation giving it a dropping strategy ``OverflowStrategy.dropHead``. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/GraphCyclesDocTest.java#dropping +.. includecode:: ../code/docs/stream/GraphCyclesDocTest.java#dropping If we run this example we see that @@ -292,7 +292,7 @@ the beginning instead. To achieve this we modify our first graph by replacing th Since ``ZipWith`` takes one element from ``source`` *and* from the feedback arc to inject one element into the cycle, we maintain the balance of elements. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/GraphCyclesDocTest.java#zipping-dead +.. includecode:: ../code/docs/stream/GraphCyclesDocTest.java#zipping-dead Still, when we try to run the example it turns out that no element is printed at all! After some investigation we realize that: @@ -304,7 +304,7 @@ These two conditions are a typical "chicken-and-egg" problem. The solution is to element into the cycle that is independent from ``source``. We do this by using a ``Concat`` junction on the backwards arc that injects a single element using ``Source.single``. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/GraphCyclesDocTest.java#zipping-live +.. includecode:: ../code/docs/stream/GraphCyclesDocTest.java#zipping-live When we run the above example we see that processing starts and never stops. The important takeaway from this example is that balanced cycles often need an initial "kick-off" element to be injected into the cycle. diff --git a/akka-docs-dev/rst/java/stream-integrations.rst b/akka-docs/rst/java/stream/stream-integrations.rst similarity index 79% rename from akka-docs-dev/rst/java/stream-integrations.rst rename to akka-docs/rst/java/stream/stream-integrations.rst index 3383f57eb1..2615b9c406 100644 --- a/akka-docs-dev/rst/java/stream-integrations.rst +++ b/akka-docs/rst/java/stream/stream-integrations.rst @@ -73,7 +73,7 @@ stream publisher that keeps track of the subscription life cycle and requested e Here is an example of such an actor. It dispatches incoming jobs to the attached subscriber: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/ActorPublisherDocTest.java#job-manager +.. includecode:: ../code/docs/stream/ActorPublisherDocTest.java#job-manager You send elements to the stream by calling ``onNext``. You are allowed to send as many elements as have been requested by the stream subscriber. This amount can be inquired with @@ -105,10 +105,10 @@ More detailed information can be found in the API documentation. This is how it can be used as input :class:`Source` to a :class:`Flow`: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/ActorPublisherDocTest.java#actor-publisher-usage +.. includecode:: ../code/docs/stream/ActorPublisherDocTest.java#actor-publisher-usage You can only attach one subscriber to this publisher. Use a ``Broadcast``-element or -attach a ``Sink.asPublisher(true)`` to enable multiple subscribers. +attach a ``Sink.asPublisher(AsPublisher.WITH_FANOUT)`` to enable multiple subscribers. ActorSubscriber ^^^^^^^^^^^^^^^ @@ -120,7 +120,7 @@ messages from the stream. It can also receive other, non-stream messages, in the Here is an example of such an actor. It dispatches incoming jobs to child worker actors: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/ActorSubscriberDocTest.java#worker-pool +.. includecode:: ../code/docs/stream/ActorSubscriberDocTest.java#worker-pool Subclass must define the ``RequestStrategy`` to control stream back pressure. After each incoming message the ``AbstractActorSubscriber`` will automatically invoke @@ -138,7 +138,7 @@ More detailed information can be found in the API documentation. This is how it can be used as output :class:`Sink` to a :class:`Flow`: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/ActorSubscriberDocTest.java#actor-subscriber-usage +.. includecode:: ../code/docs/stream/ActorSubscriberDocTest.java#actor-subscriber-usage Integrating with External Services ================================== @@ -149,27 +149,27 @@ performed with ``mapAsync`` or ``mapAsyncUnordered``. For example, sending emails to the authors of selected tweets using an external email service: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/IntegrationDocTest.java#email-server-send +.. includecode:: ../code/docs/stream/IntegrationDocTest.java#email-server-send We start with the tweet stream of authors: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/IntegrationDocTest.java#tweet-authors +.. includecode:: ../code/docs/stream/IntegrationDocTest.java#tweet-authors Assume that we can lookup their email address using: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/IntegrationDocTest.java#email-address-lookup +.. includecode:: ../code/docs/stream/IntegrationDocTest.java#email-address-lookup Transforming the stream of authors to a stream of email addresses by using the ``lookupEmail`` service can be done with ``mapAsync``: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/IntegrationDocTest.java#email-addresses-mapAsync +.. includecode:: ../code/docs/stream/IntegrationDocTest.java#email-addresses-mapAsync Finally, sending the emails: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/IntegrationDocTest.java#send-emails +.. includecode:: ../code/docs/stream/IntegrationDocTest.java#send-emails ``mapAsync`` is applying the given function that is calling out to the external service to -each of the elements as they pass through this processing step. The function returns a :class:`Future` +each of the elements as they pass through this processing step. The function returns a :class:`CompletionStage` and the value of that future will be emitted downstreams. The number of Futures that shall run in parallel is given as the first argument to ``mapAsync``. These Futures may complete in any order, but the elements that are emitted @@ -188,23 +188,23 @@ result stream onwards for further processing or storage. Note that ``mapAsync`` preserves the order of the stream elements. In this example the order is not important and then we can use the more efficient ``mapAsyncUnordered``: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/IntegrationDocTest.java#external-service-mapAsyncUnordered +.. includecode:: ../code/docs/stream/IntegrationDocTest.java#external-service-mapAsyncUnordered -In the above example the services conveniently returned a :class:`Future` of the result. -If that is not the case you need to wrap the call in a :class:`Future`. If the service call +In the above example the services conveniently returned a :class:`CompletionStage` of the result. +If that is not the case you need to wrap the call in a :class:`CompletionStage`. If the service call involves blocking you must also make sure that you run it on a dedicated execution context, to avoid starvation and disturbance of other tasks in the system. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/IntegrationDocTest.java#blocking-mapAsync +.. includecode:: ../code/docs/stream/IntegrationDocTest.java#blocking-mapAsync The configuration of the ``"blocking-dispatcher"`` may look something like: -.. includecode:: ../scala/code/docs/stream/IntegrationDocSpec.scala#blocking-dispatcher-config +.. includecode:: ../../scala/code/docs/stream/IntegrationDocSpec.scala#blocking-dispatcher-config An alternative for blocking calls is to perform them in a ``map`` operation, still using a dedicated dispatcher for that operation. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/IntegrationDocTest.java#blocking-map +.. includecode:: ../code/docs/stream/IntegrationDocTest.java#blocking-map However, that is not exactly the same as ``mapAsync``, since the ``mapAsync`` may run several calls concurrently, but ``map`` performs them one at a time. @@ -212,10 +212,10 @@ several calls concurrently, but ``map`` performs them one at a time. For a service that is exposed as an actor, or if an actor is used as a gateway in front of an external service, you can use ``ask``: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/IntegrationDocTest.java#save-tweets +.. includecode:: ../code/docs/stream/IntegrationDocTest.java#save-tweets Note that if the ``ask`` is not completed within the given timeout the stream is completed with failure. -If that is not desired outcome you can use ``recover`` on the ``ask`` :class:`Future`. +If that is not desired outcome you can use ``recover`` on the ``ask`` :class:`CompletionStage`. Illustrating ordering and parallelism ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -241,14 +241,14 @@ successive calls as long as there is downstream demand of several elements. Here is a fictive service that we can use to illustrate these aspects. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/IntegrationDocTest.java#sometimes-slow-service +.. includecode:: ../code/docs/stream/IntegrationDocTest.java#sometimes-slow-service Elements starting with a lower case character are simulated to take longer time to process. Here is how we can use it with ``mapAsync``: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/IntegrationDocTest.java#sometimes-slow-mapAsync +.. includecode:: ../code/docs/stream/IntegrationDocTest.java#sometimes-slow-mapAsync The output may look like this: @@ -305,7 +305,7 @@ calls are limited by the buffer size (4) of the :class:`ActorMaterializerSetting Here is how we can use the same service with ``mapAsyncUnordered``: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/IntegrationDocTest.java#sometimes-slow-mapAsyncUnordered +.. includecode:: ../code/docs/stream/IntegrationDocTest.java#sometimes-slow-mapAsyncUnordered The output may look like this: @@ -383,19 +383,19 @@ An incomplete list of other implementations: The two most important interfaces in Reactive Streams are the :class:`Publisher` and :class:`Subscriber`. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/ReactiveStreamsDocTest.java#imports +.. includecode:: ../code/docs/stream/ReactiveStreamsDocTest.java#imports Let us assume that a library provides a publisher of tweets: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/ReactiveStreamsDocTest.java#tweets-publisher +.. includecode:: ../code/docs/stream/ReactiveStreamsDocTest.java#tweets-publisher and another library knows how to store author handles in a database: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/ReactiveStreamsDocTest.java#author-storage-subscriber +.. includecode:: ../code/docs/stream/ReactiveStreamsDocTest.java#author-storage-subscriber Using an Akka Streams :class:`Flow` we can transform the stream and connect those: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/ReactiveStreamsDocTest.java +.. includecode:: ../code/docs/stream/ReactiveStreamsDocTest.java :include: authors,connect-all The :class:`Publisher` is used as an input :class:`Source` to the flow and the @@ -405,24 +405,24 @@ A :class:`Flow` can also be also converted to a :class:`RunnableGraph[Processor[ materializes to a :class:`Processor` when ``run()`` is called. ``run()`` itself can be called multiple times, resulting in a new :class:`Processor` instance each time. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/ReactiveStreamsDocTest.java#flow-publisher-subscriber +.. includecode:: ../code/docs/stream/ReactiveStreamsDocTest.java#flow-publisher-subscriber A publisher can be connected to a subscriber with the ``subscribe`` method. It is also possible to expose a :class:`Source` as a :class:`Publisher` by using the Publisher-:class:`Sink`: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/ReactiveStreamsDocTest.java#source-publisher +.. includecode:: ../code/docs/stream/ReactiveStreamsDocTest.java#source-publisher -A publisher that is created with ``Sink.asPublisher(false)`` supports only a single subscription. +A publisher that is created with ``Sink.asPublisher(AsPublisher.WITHOUT_FANOUT)`` supports only a single subscription. Additional subscription attempts will be rejected with an :class:`IllegalStateException`. A publisher that supports multiple subscribers using fan-out/broadcasting is created as follows: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/ReactiveStreamsDocTest.java +.. includecode:: ../code/docs/stream/ReactiveStreamsDocTest.java :include: author-alert-subscriber,author-storage-subscriber -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/ReactiveStreamsDocTest.java#source-fanoutPublisher +.. includecode:: ../code/docs/stream/ReactiveStreamsDocTest.java#source-fanoutPublisher The input buffer size of the stage controls how far apart the slowest subscriber can be from the fastest subscriber before slowing down the stream. @@ -430,11 +430,11 @@ before slowing down the stream. To make the picture complete, it is also possible to expose a :class:`Sink` as a :class:`Subscriber` by using the Subscriber-:class:`Source`: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/ReactiveStreamsDocTest.java#sink-subscriber +.. includecode:: ../code/docs/stream/ReactiveStreamsDocTest.java#sink-subscriber It is also possible to use re-wrap :class:`Processor` instances as a :class:`Flow` by passing a factory function that will create the :class:`Processor` instances: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/ReactiveStreamsDocTest.java#use-processor +.. includecode:: ../code/docs/stream/ReactiveStreamsDocTest.java#use-processor Please note that a factory is necessary to achieve reusability of the resulting :class:`Flow`. diff --git a/akka-docs-dev/rst/java/stream-introduction.rst b/akka-docs/rst/java/stream/stream-introduction.rst similarity index 100% rename from akka-docs-dev/rst/java/stream-introduction.rst rename to akka-docs/rst/java/stream/stream-introduction.rst diff --git a/akka-docs-dev/rst/java/stream-io.rst b/akka-docs/rst/java/stream/stream-io.rst similarity index 89% rename from akka-docs-dev/rst/java/stream-io.rst rename to akka-docs/rst/java/stream/stream-io.rst index 24715b03fa..17f2bdc12a 100644 --- a/akka-docs-dev/rst/java/stream-io.rst +++ b/akka-docs/rst/java/stream/stream-io.rst @@ -16,10 +16,10 @@ Streaming TCP Accepting connections: Echo Server ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -In order to implement a simple EchoServer we ``bind`` to a given address, which returns a ``Source>``, +In order to implement a simple EchoServer we ``bind`` to a given address, which returns a ``Source>``, which will emit an :class:`IncomingConnection` element for each new connection that the Server should handle: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/io/StreamTcpDocTest.java#echo-server-simple-bind +.. includecode:: ../code/docs/stream/io/StreamTcpDocTest.java#echo-server-simple-bind Next, we simply handle *each* incoming connection using a :class:`Flow` which will be used as the processing stage to handle and emit ByteStrings from and to the TCP Socket. Since one :class:`ByteString` does not have to necessarily @@ -28,7 +28,7 @@ helper Flow from ``akka.stream.io.Framing`` to chunk the inputs up into actual l argument indicates that we require an explicit line ending even for the last message before the connection is closed. In this example we simply add exclamation marks to each incoming text message and push it through the flow: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/io/StreamTcpDocTest.java#echo-server-simple-handle +.. includecode:: ../code/docs/stream/io/StreamTcpDocTest.java#echo-server-simple-handle Notice that while most building blocks in Akka Streams are reusable and freely shareable, this is *not* the case for the incoming connection Flow, since it directly corresponds to an existing, already accepted connection its handling can @@ -52,7 +52,7 @@ Let's say we know a server has exposed a simple command line interface over TCP, and would like to interact with it using Akka Streams over TCP. To open an outgoing connection socket we use the ``outgoingConnection`` method: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/io/StreamTcpDocTest.java#repl-client +.. includecode:: ../code/docs/stream/io/StreamTcpDocTest.java#repl-client The ``repl`` flow we use to handle the server interaction first prints the servers response, then awaits on input from the command line (this blocking call is used here just for the sake of simplicity) and converts it to a @@ -84,7 +84,7 @@ Thankfully in most situations finding the right spot to start the conversation i to the protocol we are trying to implement using Streams. In chat-like applications, which our examples resemble, it makes sense to make the Server initiate the conversation by emitting a "hello" message: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/io/StreamTcpDocTest.java#welcome-banner-chat-server +.. includecode:: ../code/docs/stream/io/StreamTcpDocTest.java#welcome-banner-chat-server The way we constructed a :class:`Flow` using the :class:`GraphDSL` is explained in detail in :ref:`constructing-sources-sinks-flows-from-partial-graphs-java`, however the basic concepts is rather simple– @@ -113,7 +113,7 @@ on files. Streaming data from a file is as easy as creating a `FileIO.fromFile` given a target file, and an optional ``chunkSize`` which determines the buffer size determined as one "element" in such stream: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/io/StreamFileDocTest.java#file-source +.. includecode:: ../code/docs/stream/io/StreamFileDocTest.java#file-source Please note that these processing stages are backed by Actors and by default are configured to run on a pre-configured threadpool-backed dispatcher dedicated for File IO. This is very important as it isolates the blocking file IO operations from the rest @@ -121,4 +121,4 @@ of the ActorSystem allowing each dispatcher to be utilised in the most efficient dispatcher for file IO operations globally, you can do so by changing the ``akka.stream.blocking-io-dispatcher``, or for a specific stage by specifying a custom Dispatcher in code, like this: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/io/StreamFileDocTest.java#custom-dispatcher-code +.. includecode:: ../code/docs/stream/io/StreamFileDocTest.java#custom-dispatcher-code diff --git a/akka-docs-dev/rst/java/stream-parallelism.rst b/akka-docs/rst/java/stream/stream-parallelism.rst similarity index 92% rename from akka-docs-dev/rst/java/stream-parallelism.rst rename to akka-docs/rst/java/stream/stream-parallelism.rst index 812798895b..f8a4b07647 100644 --- a/akka-docs-dev/rst/java/stream-parallelism.rst +++ b/akka-docs/rst/java/stream/stream-parallelism.rst @@ -23,7 +23,7 @@ are two pancakes being cooked at the same time, one being cooked on its first si completion. This is how this setup would look like implemented as a stream: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowParallelismDocTest.java#pipelining +.. includecode:: ../code/docs/stream/FlowParallelismDocTest.java#pipelining The two ``map`` stages in sequence (encapsulated in the "frying pan" flows) will be executed in a pipelined way, basically doing the same as Roland with his frying pans: @@ -51,7 +51,7 @@ the results on a shared plate. Whenever a pan becomes empty, he takes the next s In essence he parallelizes the same process over multiple pans. This is how this setup will look like if implemented using streams: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowParallelismDocTest.java#parallelism +.. includecode:: ../code/docs/stream/FlowParallelismDocTest.java#parallelism The benefit of parallelizing is that it is easy to scale. In the pancake example it is easy to add a third frying pan with Patrik's method, but Roland cannot add a third frying pan, @@ -74,7 +74,7 @@ First, let's look at how we can parallelize pipelined processing stages. In the will employ two chefs, each working using Roland's pipelining method, but we use the two chefs in parallel, just like Patrik used the two frying pans. This is how it looks like if expressed as streams: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowParallelismDocTest.java#parallel-pipeline +.. includecode:: ../code/docs/stream/FlowParallelismDocTest.java#parallel-pipeline The above pattern works well if there are many independent jobs that do not depend on the results of each other, but the jobs themselves need multiple processing steps where each step builds on the result of @@ -91,7 +91,7 @@ It is also possible to organize parallelized stages into pipelines. This would m This is again straightforward to implement with the streams API: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/FlowParallelismDocTest.java#pipelined-parallel +.. includecode:: ../code/docs/stream/FlowParallelismDocTest.java#pipelined-parallel This usage pattern is less common but might be usable if a certain step in the pipeline might take wildly different times to finish different jobs. The reason is that there are more balance-merge steps in this pattern diff --git a/akka-docs-dev/rst/java/stream-quickstart.rst b/akka-docs/rst/java/stream/stream-quickstart.rst similarity index 80% rename from akka-docs-dev/rst/java/stream-quickstart.rst rename to akka-docs/rst/java/stream/stream-quickstart.rst index 2ec964d4bf..cacdc00a96 100644 --- a/akka-docs-dev/rst/java/stream-quickstart.rst +++ b/akka-docs/rst/java/stream/stream-quickstart.rst @@ -15,7 +15,7 @@ allow to control what should happen in such scenarios. Here's the data model we'll be working with throughout the quickstart examples: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/TwitterStreamQuickstartDocTest.java#model +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#model .. note:: @@ -31,7 +31,7 @@ like for example finding all twitter handles of users who tweet about ``#akka``. In order to prepare our environment by creating an :class:`ActorSystem` and :class:`ActorMaterializer`, which will be responsible for materializing and running the streams we are about to create: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/TwitterStreamQuickstartDocTest.java#materializer-setup +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#materializer-setup The :class:`ActorMaterializer` can optionally take :class:`ActorMaterializerSettings` which can be used to define materialization properties, such as default buffer sizes (see also :ref:`stream-buffers-java`), the dispatcher to @@ -39,7 +39,7 @@ be used by the pipeline etc. These can be overridden with ``withAttributes`` on Let's assume we have a stream of tweets readily available. In Akka this is expressed as a :class:`Source`: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/TwitterStreamQuickstartDocTest.java#tweet-source +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#tweet-source Streams always start flowing from a ``Source`` then can continue through ``Flow`` elements or more advanced graph elements to finally be consumed by a ``Sink``. @@ -53,7 +53,7 @@ The operations should look familiar to anyone who has used the Scala Collections however they operate on streams and not collections of data (which is a very important distinction, as some operations only make sense in streaming and vice versa): -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/TwitterStreamQuickstartDocTest.java#authors-filter-map +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#authors-filter-map Finally in order to :ref:`materialize ` and run the stream computation we need to attach the Flow to a ``Sink`` that will get the Flow running. The simplest way to do this is to call @@ -61,18 +61,18 @@ the Flow to a ``Sink`` that will get the Flow running. The simplest way to the `Sink class `_. For now let's simply print each author: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/TwitterStreamQuickstartDocTest.java#authors-foreachsink-println +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#authors-foreachsink-println or by using the shorthand version (which are defined only for the most popular Sinks such as :class:`Sink.fold` and :class:`Sink.foreach`): -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/TwitterStreamQuickstartDocTest.java#authors-foreach-println +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#authors-foreach-println Materializing and running a stream always requires a :class:`Materializer` to be passed in explicitly, like this: ``.run(mat)``. The complete snippet looks like this: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/TwitterStreamQuickstartDocTest.java#first-sample +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#first-sample Flattening sequences in streams ------------------------------- @@ -81,7 +81,7 @@ we might want to map from one element to a number of elements and receive a "fla works on Scala Collections. In order to get a flattened stream of hashtags from our stream of tweets we can use the ``mapConcat`` combinator: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/TwitterStreamQuickstartDocTest.java#hashtags-mapConcat +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#hashtags-mapConcat .. note:: The name ``flatMap`` was consciously avoided due to its proximity with for-comprehensions and monadic composition. @@ -109,7 +109,7 @@ at the expense of not reading as familiarly as collection transformations. Graphs are constructed using :class:`GraphDSL` like this: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/TwitterStreamQuickstartDocTest.java#flow-graph-broadcast +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#flow-graph-broadcast As you can see, we use graph builder ``b`` to construct the graph using ``UniformFanOutShape`` and ``Flow`` s. @@ -121,7 +121,7 @@ The runnable graph can then be ``run()`` to materialize a stream out of it. Both :class:`Graph` and :class:`RunnableGraph` are *immutable, thread-safe, and freely shareable*. A graph can also have one of several other shapes, with one or more unconnected ports. Having unconnected ports -expresses a grapth that is a *partial graph*. Concepts around composing and nesting graphs in large structures are +expresses a graph that is a *partial graph*. Concepts around composing and nesting graphs in large structures are explained in detail in :ref:`composition-java`. It is also possible to wrap complex computation graphs as Flows, Sinks or Sources, which will be explained in detail in :ref:`partial-flow-graph-java`. @@ -140,7 +140,7 @@ in either ``OutOfMemoryError`` s or other severe degradations of service respons and must be handled explicitly. For example, if we are only interested in the "*most recent tweets, with a buffer of 10 elements*" this can be expressed using the ``buffer`` element: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/TwitterStreamQuickstartDocTest.java#tweets-slow-consumption-dropHead +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#tweets-slow-consumption-dropHead The ``buffer`` element takes an explicit and required ``OverflowStrategy``, which defines how the buffer should react when it receives another element while it is full. Strategies provided include dropping the oldest element (``dropHead``), @@ -159,25 +159,25 @@ but in general it is possible to deal with finite streams and come up with a nic First, let's write such an element counter using ``Flow.of(Class)`` and ``Sink.fold`` to see how the types look like: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/TwitterStreamQuickstartDocTest.java#tweets-fold-count +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#tweets-fold-count First we prepare a reusable ``Flow`` that will change each incoming tweet into an integer of value ``1``. We'll use this in order to combine those with a ``Sink.fold`` that will sum all ``Integer`` elements of the stream and make its result available as -a ``Future``. Next we connect the ``tweets`` stream to ``count`` with ``via``. Finally we connect the Flow to the previously +a ``CompletionStage``. Next we connect the ``tweets`` stream to ``count`` with ``via``. Finally we connect the Flow to the previously prepared Sink using ``toMat``. Remember those mysterious ``Mat`` type parameters on ``Source``, ``Flow`` and ``Sink``? They represent the type of values these processing parts return when materialized. When you chain these together, you can explicitly combine their materialized values: in our example we used the ``Keep.right`` predefined function, which tells the implementation to only care about the materialized type of the stage currently appended to the right. -The materialized type of ``sumSink`` is ``Future`` and because of using ``Keep.right``, the resulting :class:`RunnableGraph` -has also a type parameter of ``Future``. +The materialized type of ``sumSink`` is ``CompletionStage`` and because of using ``Keep.right``, the resulting :class:`RunnableGraph` +has also a type parameter of ``CompletionStage``. This step does *not* yet materialize the processing pipeline, it merely prepares the description of the Flow, which is now connected to a Sink, and therefore can -be ``run()``, as indicated by its type: ``RunnableGraph>``. Next we call ``run()`` which uses the :class:`ActorMaterializer` +be ``run()``, as indicated by its type: ``RunnableGraph>``. Next we call ``run()`` which uses the :class:`ActorMaterializer` to materialize and run the Flow. The value returned by calling ``run()`` on a ``RunnableGraph`` is of type ``T``. -In our case this type is ``Future`` which, when completed, will contain the total length of our tweets stream. +In our case this type is ``CompletionStage`` which, when completed, will contain the total length of our tweets stream. In case of the stream failing, this future would complete with a Failure. A :class:`RunnableGraph` may be reused @@ -185,13 +185,13 @@ and materialized multiple times, because it is just the "blueprint" of the strea for example one that consumes a live stream of tweets within a minute, the materialized values for those two materializations will be different, as illustrated by this example: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/TwitterStreamQuickstartDocTest.java#tweets-runnable-flow-materialized-twice +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#tweets-runnable-flow-materialized-twice Many elements in Akka Streams provide materialized values which can be used for obtaining either results of computation or steering these elements which will be discussed in detail in :ref:`stream-materialization-java`. Summing up this section, now we know what happens behind the scenes when we run this one-liner, which is equivalent to the multi line version above: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/TwitterStreamQuickstartDocTest.java#tweets-fold-count-oneline +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocTest.java#tweets-fold-count-oneline .. note:: ``runWith()`` is a convenience method that automatically ignores the materialized value of any other stages except diff --git a/akka-docs-dev/rst/java/stream-rate.rst b/akka-docs/rst/java/stream/stream-rate.rst similarity index 80% rename from akka-docs-dev/rst/java/stream-rate.rst rename to akka-docs/rst/java/stream/stream-rate.rst index 1943bd6884..ae815a8743 100644 --- a/akka-docs-dev/rst/java/stream-rate.rst +++ b/akka-docs/rst/java/stream/stream-rate.rst @@ -8,7 +8,7 @@ Akka Streams processing stages are asynchronous and pipelined by default which m an element to its downstream consumer is able to immediately process the next message. To demonstrate what we mean by this, let's take a look at the following example: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamBuffersRateDocTest.java#pipelining +.. includecode:: ../code/docs/stream/StreamBuffersRateDocTest.java#pipelining Running the above example, one of the possible outputs looks like this: @@ -64,16 +64,16 @@ to a level suitable for the throughput requirements of the application. Default Alternatively they can be set by passing a :class:`ActorMaterializerSettings` to the materializer: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamBuffersRateDocTest.java#materializer-buffer +.. includecode:: ../code/docs/stream/StreamBuffersRateDocTest.java#materializer-buffer If the buffer size needs to be set for segments of a :class:`Flow` only, it is possible by defining a separate :class:`Flow` with these attributes: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamBuffersRateDocTest.java#section-buffer +.. includecode:: ../code/docs/stream/StreamBuffersRateDocTest.java#section-buffer Here is an example of a code that demonstrate some of the issues caused by internal buffers: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamBuffersRateDocTest.java#buffering-abstraction-leak +.. includecode:: ../code/docs/stream/StreamBuffersRateDocTest.java#buffering-abstraction-leak Running the above example one would expect the number *3* to be printed in every 3 seconds (the ``conflate`` step here is configured so that it counts the number of elements received before the downstream ``ZipWith`` consumes them). What @@ -97,7 +97,7 @@ pipeline of an application. The example below will ensure that 1000 jobs (but not more) are dequeued from an external (imaginary) system and stored locally in memory - relieving the external system: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamBuffersRateDocTest.java#explicit-buffers-backpressure +.. includecode:: ../code/docs/stream/StreamBuffersRateDocTest.java#explicit-buffers-backpressure The next example will also queue up 1000 jobs locally, but if there are more jobs waiting in the imaginary external systems, it makes space for the new element by @@ -105,12 +105,12 @@ dropping one element from the *tail* of the buffer. Dropping from the tail is a it must be noted that this will drop the *youngest* waiting job. If some "fairness" is desired in the sense that we want to be nice to jobs that has been waiting for long, then this option can be useful. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamBuffersRateDocTest.java#explicit-buffers-droptail +.. includecode:: ../code/docs/stream/StreamBuffersRateDocTest.java#explicit-buffers-droptail Instead of dropping the youngest element from the tail of the buffer a new element can be dropped without enqueueing it to the buffer at all. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamBuffersRateDocTest.java#explicit-buffers-dropnew +.. includecode:: ../code/docs/stream/StreamBuffersRateDocTest.java#explicit-buffers-dropnew Here is another example with a queue of 1000 jobs, but it makes space for the new element by dropping one element from the *head* of the buffer. This is the *oldest* @@ -119,13 +119,13 @@ resent if not processed in a certain period. The oldest element will be retransmitted soon, (in fact a retransmitted duplicate might be already in the queue!) so it makes sense to drop it first. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamBuffersRateDocTest.java#explicit-buffers-drophead +.. includecode:: ../code/docs/stream/StreamBuffersRateDocTest.java#explicit-buffers-drophead Compared to the dropping strategies above, dropBuffer drops all the 1000 jobs it has enqueued once the buffer gets full. This aggressive strategy is useful when dropping jobs is preferred to delaying jobs. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamBuffersRateDocTest.java#explicit-buffers-dropbuffer +.. includecode:: ../code/docs/stream/StreamBuffersRateDocTest.java#explicit-buffers-dropbuffer If our imaginary external job provider is a client using our API, we might want to enforce that the client cannot have more than 1000 queued jobs @@ -133,7 +133,7 @@ otherwise we consider it flooding and terminate the connection. This is easily achievable by the error strategy which simply fails the stream once the buffer gets full. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamBuffersRateDocTest.java#explicit-buffers-fail +.. includecode:: ../code/docs/stream/StreamBuffersRateDocTest.java#explicit-buffers-fail Rate transformation =================== @@ -145,13 +145,13 @@ When a fast producer can not be informed to slow down by backpressure or some ot Below is an example snippet that summarizes fast stream of elements to a standart deviation, mean and count of elements that have arrived while the stats have been calculated. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/RateTransformationDocTest.java#conflate-summarize +.. includecode:: ../code/docs/stream/RateTransformationDocTest.java#conflate-summarize This example demonstrates that such flow's rate is decoupled. The element rate at the start of the flow can be much higher that the element rate at the end of the flow. Another possible use of ``conflate`` is to not consider all elements for summary when producer starts getting too fast. Example below demonstrates how ``conflate`` can be used to implement random drop of elements when consumer is not able to keep up with the producer. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/RateTransformationDocTest.java#conflate-sample +.. includecode:: ../code/docs/stream/RateTransformationDocTest.java#conflate-sample Understanding expand -------------------- @@ -160,10 +160,10 @@ Expand helps to deal with slow producers which are unable to keep up with the de As a simple use of ``expand`` here is a flow that sends the same element to consumer when producer does not send any new elements. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/RateTransformationDocTest.java#expand-last +.. includecode:: ../code/docs/stream/RateTransformationDocTest.java#expand-last Expand also allows to keep some state between demand requests from the downstream. Leveraging this, here is a flow that tracks and reports a drift between fast consumer and slow producer. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/RateTransformationDocTest.java#expand-drift +.. includecode:: ../code/docs/stream/RateTransformationDocTest.java#expand-drift Note that all of the elements coming from upstream will go through ``expand`` at least once. This means that the output of this flow is going to report a drift of zero if producer is fast enough, or a larger drift otherwise. diff --git a/akka-docs-dev/rst/java/stream-testkit.rst b/akka-docs/rst/java/stream/stream-testkit.rst similarity index 70% rename from akka-docs-dev/rst/java/stream-testkit.rst rename to akka-docs/rst/java/stream/stream-testkit.rst index aa8cfc97f7..6f863f0810 100644 --- a/akka-docs-dev/rst/java/stream-testkit.rst +++ b/akka-docs/rst/java/stream/stream-testkit.rst @@ -25,20 +25,20 @@ elements from a predefined collection, running a constructed test flow and asserting on the results that sink produced. Here is an example of a test for a sink: -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamTestKitDocTest.java#strict-collection +.. includecode:: ../code/docs/stream/StreamTestKitDocTest.java#strict-collection The same strategy can be applied for sources as well. In the next example we have a source that produces an infinite stream of elements. Such source can be tested by asserting that first arbitrary number of elements hold some condition. Here the ``grouped`` combinator and ``Sink.head`` are very useful. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamTestKitDocTest.java#grouped-infinite +.. includecode:: ../code/docs/stream/StreamTestKitDocTest.java#grouped-infinite When testing a flow we need to attach a source and a sink. As both stream ends are under our control, we can choose sources that tests various edge cases of the flow and sinks that ease assertions. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamTestKitDocTest.java#folded-stream +.. includecode:: ../code/docs/stream/StreamTestKitDocTest.java#folded-stream TestKit ======= @@ -48,10 +48,10 @@ used for writing stream tests that use familiar :class:`TestProbe` from the :mod:`akka-testkit` API. One of the more straightforward tests would be to materialize stream to a -:class:`Future` and then use ``pipe`` pattern to pipe the result of that future +:class:`CompletionStage` and then use ``PatternsCS.pipe`` pattern to pipe the result of that future to the probe. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamTestKitDocTest.java#pipeto-testprobe +.. includecode:: ../code/docs/stream/StreamTestKitDocTest.java#pipeto-testprobe Instead of materializing to a future, we can use a :class:`Sink.actorRef` that sends all incoming elements to the given :class:`ActorRef`. Now we can use @@ -59,13 +59,13 @@ assertion methods on :class:`TestProbe` and expect elements one by one as they arrive. We can also assert stream completion by expecting for ``onCompleteMessage`` which was given to :class:`Sink.actorRef`. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamTestKitDocTest.java#sink-actorref +.. includecode:: ../code/docs/stream/StreamTestKitDocTest.java#sink-actorref Similarly to :class:`Sink.actorRef` that provides control over received elements, we can use :class:`Source.actorRef` and have full control over elements to be sent. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamTestKitDocTest.java#source-actorref +.. includecode:: ../code/docs/stream/StreamTestKitDocTest.java#source-actorref Streams TestKit =============== @@ -83,18 +83,18 @@ provide sources and sinks that materialize to probes that allow fluent API. A sink returned by ``TestSink.probe`` allows manual control over demand and assertions over elements coming downstream. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamTestKitDocTest.java#test-sink-probe +.. includecode:: ../code/docs/stream/StreamTestKitDocTest.java#test-sink-probe A source returned by ``TestSource.probe`` can be used for asserting demand or controlling when stream is completed or ended with an error. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamTestKitDocTest.java#test-source-probe +.. includecode:: ../code/docs/stream/StreamTestKitDocTest.java#test-source-probe You can also inject exceptions and test sink behaviour on error conditions. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamTestKitDocTest.java#injecting-failure +.. includecode:: ../code/docs/stream/StreamTestKitDocTest.java#injecting-failure Test source and sink can be used together in combination when testing flows. -.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/StreamTestKitDocTest.java#test-source-and-sink +.. includecode:: ../code/docs/stream/StreamTestKitDocTest.java#test-source-and-sink diff --git a/akka-docs/rst/project/migration-guide-2.3.x-2.4.x.rst b/akka-docs/rst/project/migration-guide-2.3.x-2.4.x.rst index 4dcde879c3..031a7915df 100644 --- a/akka-docs/rst/project/migration-guide-2.3.x-2.4.x.rst +++ b/akka-docs/rst/project/migration-guide-2.3.x-2.4.x.rst @@ -673,9 +673,12 @@ Be careful to not do any operations on the ``Future[Terminated]`` using the ``sy as ``ExecutionContext`` as it will be shut down with the ``ActorSystem``, instead use for example the Scala standard library context from ``scala.concurrent.ExecutionContext.global``. +:: + // import system.dispatcher <- this would not work import scala.concurrent.ExecutionContext.Implicits.global system.terminate().foreach { _ => println("Actor system was shut down") } + diff --git a/akka-docs/rst/scala.rst b/akka-docs/rst/scala.rst index 473b199f76..bce3187bef 100644 --- a/akka-docs/rst/scala.rst +++ b/akka-docs/rst/scala.rst @@ -12,6 +12,8 @@ Scala Documentation scala/index-futures scala/index-network scala/index-utilities + scala/stream/index + scala/http/index scala/howto experimental/index dev/index diff --git a/akka-docs/rst/scala/cluster-sharding.rst b/akka-docs/rst/scala/cluster-sharding.rst index 0d1b5f29d4..9160e40f3b 100644 --- a/akka-docs/rst/scala/cluster-sharding.rst +++ b/akka-docs/rst/scala/cluster-sharding.rst @@ -217,7 +217,7 @@ If the state of the entities are persistent you may stop entities that are not u reduce memory consumption. This is done by the application specific implementation of the entity actors for example by defining receive timeout (``context.setReceiveTimeout``). If a message is already enqueued to the entity when it stops itself the enqueued message -in the mailbox will be dropped. To support graceful passivation without loosing such +in the mailbox will be dropped. To support graceful passivation without losing such messages the entity actor can send ``ShardRegion.Passivate`` to its parent ``Shard``. The specified wrapped message in ``Passivate`` will be sent back to the entity, which is then supposed to stop itself. Incoming messages will be buffered by the ``Shard`` @@ -358,4 +358,4 @@ a ``ClusterShard.ClusterShardingStats`` containing the identifiers of the shards of entities that are alive in each shard. The purpose of these messages is testing and monitoring, they are not provided to give access to -directly sending messages to the individual entities. \ No newline at end of file +directly sending messages to the individual entities. diff --git a/akka-docs/rst/scala/cluster-usage.rst b/akka-docs/rst/scala/cluster-usage.rst index 336e0987f2..cfa6bff926 100644 --- a/akka-docs/rst/scala/cluster-usage.rst +++ b/akka-docs/rst/scala/cluster-usage.rst @@ -29,8 +29,8 @@ settings, but with ``akka.cluster.ClusterActorRefProvider``. The ``akka.cluster.seed-nodes`` should normally also be added to your ``application.conf`` file. .. note:: - If you are using Docker or the nodes for some other reason have separate internal and external ip addresses - you must configure remoting according to :ref:`remote-configuration-nat` + If you are running Akka in a Docker container or the nodes for some other reason have separate internal and + external ip addresses you must configure remoting according to :ref:`remote-configuration-nat` The seed nodes are configured contact points for initial, automatic, join of the cluster. diff --git a/akka-docs/rst/scala/code/docs/actor/ActorDocSpec.scala b/akka-docs/rst/scala/code/docs/actor/ActorDocSpec.scala index 95406a9bbd..ae2bbc6796 100644 --- a/akka-docs/rst/scala/code/docs/actor/ActorDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/actor/ActorDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor @@ -24,6 +24,7 @@ import scala.concurrent.Await //#my-actor class MyActor extends Actor { val log = Logging(context.system, this) + def receive = { case "test" => log.info("received test") case _ => log.info("received unknown message") @@ -54,6 +55,7 @@ class DemoActorWrapper extends Actor { object DemoActor { /** * Create Props for an actor of this type. + * * @param magicNumber The magic number to be passed to this actor’s constructor. * @return a Props for creating this actor, which can then be further configured * (e.g. calling `.withDispatcher()` on it) @@ -257,7 +259,10 @@ final case class Give(thing: Any) //#receive-orElse -class ActorDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) { +class ActorDocSpec extends AkkaSpec(""" + akka.loglevel = INFO + akka.loggers = [] + """) { "import context" in { new AnyRef { diff --git a/akka-docs/rst/scala/code/docs/actor/FSMDocSpec.scala b/akka-docs/rst/scala/code/docs/actor/FSMDocSpec.scala index d77a4279ad..5901e256c2 100644 --- a/akka-docs/rst/scala/code/docs/actor/FSMDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/actor/FSMDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor diff --git a/akka-docs/rst/scala/code/docs/actor/FaultHandlingDocSample.scala b/akka-docs/rst/scala/code/docs/actor/FaultHandlingDocSample.scala index 6fb5f409b6..524df6d7b8 100644 --- a/akka-docs/rst/scala/code/docs/actor/FaultHandlingDocSample.scala +++ b/akka-docs/rst/scala/code/docs/actor/FaultHandlingDocSample.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor diff --git a/akka-docs/rst/scala/code/docs/actor/FaultHandlingDocSpec.scala b/akka-docs/rst/scala/code/docs/actor/FaultHandlingDocSpec.scala index d0ff6cdda8..ed4e659602 100644 --- a/akka-docs/rst/scala/code/docs/actor/FaultHandlingDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/actor/FaultHandlingDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor diff --git a/akka-docs/rst/scala/code/docs/actor/InitializationDocSpec.scala b/akka-docs/rst/scala/code/docs/actor/InitializationDocSpec.scala index e9727a5748..d4444f8abb 100644 --- a/akka-docs/rst/scala/code/docs/actor/InitializationDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/actor/InitializationDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor diff --git a/akka-docs/rst/scala/code/docs/actor/SchedulerDocSpec.scala b/akka-docs/rst/scala/code/docs/actor/SchedulerDocSpec.scala index bc6c67ea01..b3638da0cb 100644 --- a/akka-docs/rst/scala/code/docs/actor/SchedulerDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/actor/SchedulerDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor diff --git a/akka-docs/rst/scala/code/docs/actor/TypedActorDocSpec.scala b/akka-docs/rst/scala/code/docs/actor/TypedActorDocSpec.scala index 0c6095e950..22a79d0b07 100644 --- a/akka-docs/rst/scala/code/docs/actor/TypedActorDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/actor/TypedActorDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor diff --git a/akka-docs/rst/scala/code/docs/actor/UnnestedReceives.scala b/akka-docs/rst/scala/code/docs/actor/UnnestedReceives.scala index c4f0fbf65f..b5c24db690 100644 --- a/akka-docs/rst/scala/code/docs/actor/UnnestedReceives.scala +++ b/akka-docs/rst/scala/code/docs/actor/UnnestedReceives.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.actor diff --git a/akka-docs/rst/scala/code/docs/agent/AgentDocSpec.scala b/akka-docs/rst/scala/code/docs/agent/AgentDocSpec.scala index 642667336c..bdc4448234 100644 --- a/akka-docs/rst/scala/code/docs/agent/AgentDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/agent/AgentDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.agent diff --git a/akka-docs/rst/scala/code/docs/akka/typed/IntroSpec.scala b/akka-docs/rst/scala/code/docs/akka/typed/IntroSpec.scala index f819f51189..0a77a56b1b 100644 --- a/akka-docs/rst/scala/code/docs/akka/typed/IntroSpec.scala +++ b/akka-docs/rst/scala/code/docs/akka/typed/IntroSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package docs.akka.typed @@ -78,7 +78,7 @@ class IntroSpec extends TypedSpec { def `must say hello`(): Unit = { //#hello-world import HelloWorld._ - // using global pool since we want to run tasks after system shutdown + // using global pool since we want to run tasks after system.terminate import scala.concurrent.ExecutionContext.Implicits.global val system: ActorSystem[Greet] = ActorSystem("hello", Props(greeter)) diff --git a/akka-docs/rst/scala/code/docs/camel/Consumers.scala b/akka-docs/rst/scala/code/docs/camel/Consumers.scala index d032cf8ae1..8a95e57ec9 100644 --- a/akka-docs/rst/scala/code/docs/camel/Consumers.scala +++ b/akka-docs/rst/scala/code/docs/camel/Consumers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.camel diff --git a/akka-docs/rst/scala/code/docs/camel/CustomRoute.scala b/akka-docs/rst/scala/code/docs/camel/CustomRoute.scala index ea6016f0f5..f09e39fee0 100644 --- a/akka-docs/rst/scala/code/docs/camel/CustomRoute.scala +++ b/akka-docs/rst/scala/code/docs/camel/CustomRoute.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.camel diff --git a/akka-docs/rst/scala/code/docs/cluster/ClusterDocSpec.scala b/akka-docs/rst/scala/code/docs/cluster/ClusterDocSpec.scala index d124fc6c14..6c01d592c4 100644 --- a/akka-docs/rst/scala/code/docs/cluster/ClusterDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/cluster/ClusterDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.cluster diff --git a/akka-docs/rst/scala/code/docs/ddata/DistributedDataDocSpec.scala b/akka-docs/rst/scala/code/docs/ddata/DistributedDataDocSpec.scala index fdf37090e0..b423ff0ed1 100644 --- a/akka-docs/rst/scala/code/docs/ddata/DistributedDataDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/ddata/DistributedDataDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.ddata diff --git a/akka-docs/rst/scala/code/docs/ddata/TwoPhaseSet.scala b/akka-docs/rst/scala/code/docs/ddata/TwoPhaseSet.scala index 48b5e71e37..5a4182ce92 100644 --- a/akka-docs/rst/scala/code/docs/ddata/TwoPhaseSet.scala +++ b/akka-docs/rst/scala/code/docs/ddata/TwoPhaseSet.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.ddata diff --git a/akka-docs/rst/scala/code/docs/ddata/protobuf/TwoPhaseSetSerializer.scala b/akka-docs/rst/scala/code/docs/ddata/protobuf/TwoPhaseSetSerializer.scala index 98e67f62ad..7279de0694 100644 --- a/akka-docs/rst/scala/code/docs/ddata/protobuf/TwoPhaseSetSerializer.scala +++ b/akka-docs/rst/scala/code/docs/ddata/protobuf/TwoPhaseSetSerializer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.ddata.protobuf diff --git a/akka-docs/rst/scala/code/docs/ddata/protobuf/TwoPhaseSetSerializer2.scala b/akka-docs/rst/scala/code/docs/ddata/protobuf/TwoPhaseSetSerializer2.scala index a625ca5261..b12c5aa689 100644 --- a/akka-docs/rst/scala/code/docs/ddata/protobuf/TwoPhaseSetSerializer2.scala +++ b/akka-docs/rst/scala/code/docs/ddata/protobuf/TwoPhaseSetSerializer2.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.ddata.protobuf diff --git a/akka-docs/rst/scala/code/docs/dispatcher/DispatcherDocSpec.scala b/akka-docs/rst/scala/code/docs/dispatcher/DispatcherDocSpec.scala index 510e8981ff..d87fce3377 100644 --- a/akka-docs/rst/scala/code/docs/dispatcher/DispatcherDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/dispatcher/DispatcherDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.dispatcher diff --git a/akka-docs/rst/scala/code/docs/dispatcher/MyUnboundedMailbox.scala b/akka-docs/rst/scala/code/docs/dispatcher/MyUnboundedMailbox.scala index 58eaa5ba0f..ff96d4bad8 100644 --- a/akka-docs/rst/scala/code/docs/dispatcher/MyUnboundedMailbox.scala +++ b/akka-docs/rst/scala/code/docs/dispatcher/MyUnboundedMailbox.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.dispatcher diff --git a/akka-docs/rst/scala/code/docs/event/EventBusDocSpec.scala b/akka-docs/rst/scala/code/docs/event/EventBusDocSpec.scala index 7ba0be4b20..39a9e6a610 100644 --- a/akka-docs/rst/scala/code/docs/event/EventBusDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/event/EventBusDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.event diff --git a/akka-docs/rst/scala/code/docs/event/LoggingDocSpec.scala b/akka-docs/rst/scala/code/docs/event/LoggingDocSpec.scala index d60e757d80..dc8ba5ba82 100644 --- a/akka-docs/rst/scala/code/docs/event/LoggingDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/event/LoggingDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.event diff --git a/akka-docs/rst/scala/code/docs/extension/ExtensionDocSpec.scala b/akka-docs/rst/scala/code/docs/extension/ExtensionDocSpec.scala index 61ecc30c89..6f20b742fb 100644 --- a/akka-docs/rst/scala/code/docs/extension/ExtensionDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/extension/ExtensionDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.extension diff --git a/akka-docs/rst/scala/code/docs/extension/SettingsExtensionDocSpec.scala b/akka-docs/rst/scala/code/docs/extension/SettingsExtensionDocSpec.scala index 2f44c075ea..b432b9b732 100644 --- a/akka-docs/rst/scala/code/docs/extension/SettingsExtensionDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/extension/SettingsExtensionDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.extension diff --git a/akka-docs/rst/scala/code/docs/future/FutureDocSpec.scala b/akka-docs/rst/scala/code/docs/future/FutureDocSpec.scala index b0cdb05178..ac3f926c05 100644 --- a/akka-docs/rst/scala/code/docs/future/FutureDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/future/FutureDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.future diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/HttpClientExampleSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/HttpClientExampleSpec.scala similarity index 90% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/HttpClientExampleSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/HttpClientExampleSpec.scala index 69075c8d5f..110711cac6 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/HttpClientExampleSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/HttpClientExampleSpec.scala @@ -1,10 +1,11 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.scaladsl import akka.actor.{ ActorLogging, ActorSystem } +import akka.stream.{ ActorMaterializerSettings } import akka.util.ByteString import org.scalatest.{ Matchers, WordSpec } @@ -78,15 +79,17 @@ class HttpClientExampleSpec extends WordSpec with Matchers { import akka.actor.Actor import akka.http.scaladsl.Http import akka.http.scaladsl.model._ - import akka.stream.scaladsl.ImplicitMaterializer + import akka.stream.ActorMaterializer + import akka.stream.ActorMaterializerSettings class Myself extends Actor - with ImplicitMaterializer with ActorLogging { import akka.pattern.pipe import context.dispatcher + final implicit val materializer: ActorMaterializer = ActorMaterializer(ActorMaterializerSettings(context.system)) + val http = Http(context.system) override def preStart() = { diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/HttpServerExampleSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/HttpServerExampleSpec.scala similarity index 98% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/HttpServerExampleSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/HttpServerExampleSpec.scala index e677ecb5dc..2733e221ce 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/HttpServerExampleSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/HttpServerExampleSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.scaladsl @@ -281,7 +281,7 @@ class HttpServerExampleSpec extends WordSpec with Matchers { Console.readLine() // for the future transformations bindingFuture .flatMap(_.unbind()) // trigger unbinding from the port - .onComplete(_ ⇒ system.shutdown()) // and shutdown when done + .onComplete(_ ⇒ system.terminate()) // and shutdown when done } } diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/MarshalSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/MarshalSpec.scala similarity index 95% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/MarshalSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/MarshalSpec.scala index 4e993510d6..447fe639ce 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/MarshalSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/MarshalSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.scaladsl diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/ModelSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/ModelSpec.scala similarity index 97% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/ModelSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/ModelSpec.scala index 6b6db2a189..94d982d29a 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/ModelSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/ModelSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.scaladsl diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/SprayJsonExampleSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/SprayJsonExampleSpec.scala similarity index 95% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/SprayJsonExampleSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/SprayJsonExampleSpec.scala index 6b224d30e5..6da1837a8a 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/SprayJsonExampleSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/SprayJsonExampleSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.scaladsl diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/UnmarshalSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/UnmarshalSpec.scala similarity index 92% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/UnmarshalSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/UnmarshalSpec.scala index 693460688d..dd5df68606 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/UnmarshalSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/UnmarshalSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.scaladsl diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/CaseClassExtractionExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/CaseClassExtractionExamplesSpec.scala similarity index 97% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/CaseClassExtractionExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/CaseClassExtractionExamplesSpec.scala index bede052ddb..84d957f2ea 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/CaseClassExtractionExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/CaseClassExtractionExamplesSpec.scala @@ -1,9 +1,11 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.scaladsl.server + + /* import org.scalatest.Inside import akka.http.scaladsl.server._ diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/DirectiveExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/DirectiveExamplesSpec.scala similarity index 97% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/DirectiveExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/DirectiveExamplesSpec.scala index efbeafe9de..70602370e6 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/DirectiveExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/DirectiveExamplesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.scaladsl.server diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/ExceptionHandlerExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/ExceptionHandlerExamplesSpec.scala similarity index 97% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/ExceptionHandlerExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/ExceptionHandlerExamplesSpec.scala index f0117e278f..52f8073e27 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/ExceptionHandlerExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/ExceptionHandlerExamplesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.scaladsl.server diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/FileUploadExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/FileUploadExamplesSpec.scala similarity index 92% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/FileUploadExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/FileUploadExamplesSpec.scala index 4963a2a0c2..689dac3e9b 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/FileUploadExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/FileUploadExamplesSpec.scala @@ -1,10 +1,11 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.scaladsl.server import java.io.File +import akka.Done import akka.actor.ActorRef import akka.http.scaladsl.model.Multipart.FormData.BodyPart import akka.stream.io.{ Framing } @@ -73,18 +74,18 @@ class FileUploadExamplesSpec extends RoutingSpec { val csvUploads = path("metadata" / LongNumber) { id => entity(as[Multipart.FormData]) { formData => - val done = formData.parts.mapAsync(1) { + val done: Future[Done] = formData.parts.mapAsync(1) { case b: BodyPart if b.filename.exists(_.endsWith(".csv")) => b.entity.dataBytes .via(splitLines) .map(_.utf8String.split(",").toVector) .runForeach(csv => metadataActor ! MetadataActor.Entry(id, csv)) - case _ => Future.successful(Unit) + case _ => Future.successful(Done) }.runWith(Sink.ignore) // when processing have finished create a response for the user - onSuccess(done) { + onSuccess(done) { _ => complete { "ok!" } diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/FullTestKitExampleSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/FullTestKitExampleSpec.scala similarity index 95% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/FullTestKitExampleSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/FullTestKitExampleSpec.scala index 457557187e..d0b551c8fb 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/FullTestKitExampleSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/FullTestKitExampleSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.scaladsl.server diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/RejectionHandlerExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/RejectionHandlerExamplesSpec.scala similarity index 96% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/RejectionHandlerExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/RejectionHandlerExamplesSpec.scala index 00c7551c82..427afd02fb 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/RejectionHandlerExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/RejectionHandlerExamplesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.scaladsl.server diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/RoutingSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/RoutingSpec.scala similarity index 81% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/RoutingSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/RoutingSpec.scala index fcc3c5b194..283ae63327 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/RoutingSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/RoutingSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.scaladsl.server diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/WebsocketExampleSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/WebSocketExampleSpec.scala similarity index 87% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/WebsocketExampleSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/WebSocketExampleSpec.scala index d2fc0cab49..383113ebd3 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/WebsocketExampleSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/WebSocketExampleSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.scaladsl.server @@ -8,7 +8,7 @@ import akka.http.scaladsl.model.ws.BinaryMessage import akka.stream.scaladsl.Sink import org.scalatest.{ Matchers, WordSpec } -class WebsocketExampleSpec extends WordSpec with Matchers { +class WebSocketExampleSpec extends WordSpec with Matchers { "core-example" in { pending // compile-time only test //#websocket-example-using-core @@ -16,7 +16,7 @@ class WebsocketExampleSpec extends WordSpec with Matchers { import akka.stream.ActorMaterializer import akka.stream.scaladsl.{ Source, Flow } import akka.http.scaladsl.Http - import akka.http.scaladsl.model.ws.UpgradeToWebsocket + import akka.http.scaladsl.model.ws.UpgradeToWebSocket import akka.http.scaladsl.model.ws.{ TextMessage, Message } import akka.http.scaladsl.model.{ HttpResponse, Uri, HttpRequest } import akka.http.scaladsl.model.HttpMethods._ @@ -27,7 +27,7 @@ class WebsocketExampleSpec extends WordSpec with Matchers { //#websocket-handler // The Greeter WebSocket Service expects a "name" per message and // returns a greeting message for that name - val greeterWebsocketService = + val greeterWebSocketService = Flow[Message] .mapConcat { // we match but don't actually consume the text message here, @@ -45,8 +45,8 @@ class WebsocketExampleSpec extends WordSpec with Matchers { //#websocket-request-handling val requestHandler: HttpRequest ⇒ HttpResponse = { case req @ HttpRequest(GET, Uri.Path("/greeter"), _, _, _) ⇒ - req.header[UpgradeToWebsocket] match { - case Some(upgrade) ⇒ upgrade.handleMessages(greeterWebsocketService) + req.header[UpgradeToWebSocket] match { + case Some(upgrade) ⇒ upgrade.handleMessages(greeterWebSocketService) case None ⇒ HttpResponse(400, entity = "Not a valid websocket request!") } case _: HttpRequest ⇒ HttpResponse(404, entity = "Unknown resource!") @@ -62,7 +62,7 @@ class WebsocketExampleSpec extends WordSpec with Matchers { import system.dispatcher // for the future transformations bindingFuture .flatMap(_.unbind()) // trigger unbinding from the port - .onComplete(_ ⇒ system.shutdown()) // and shutdown when done + .onComplete(_ ⇒ system.terminate()) // and shutdown when done } "routing-example" in { pending // compile-time only test @@ -80,7 +80,7 @@ class WebsocketExampleSpec extends WordSpec with Matchers { // The Greeter WebSocket Service expects a "name" per message and // returns a greeting message for that name - val greeterWebsocketService = + val greeterWebSocketService = Flow[Message] .collect { case tm: TextMessage ⇒ TextMessage(Source.single("Hello ") ++ tm.textStream) @@ -91,7 +91,7 @@ class WebsocketExampleSpec extends WordSpec with Matchers { val route = path("greeter") { get { - handleWebsocketMessages(greeterWebsocketService) + handleWebSocketMessages(greeterWebSocketService) } } //#websocket-routing @@ -104,6 +104,6 @@ class WebsocketExampleSpec extends WordSpec with Matchers { import system.dispatcher // for the future transformations bindingFuture .flatMap(_.unbind()) // trigger unbinding from the port - .onComplete(_ ⇒ system.shutdown()) // and shutdown when done + .onComplete(_ ⇒ system.terminate()) // and shutdown when done } } diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/BasicDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/BasicDirectivesExamplesSpec.scala similarity index 97% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/BasicDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/BasicDirectivesExamplesSpec.scala index 7030faaec0..e4271de6aa 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/BasicDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/BasicDirectivesExamplesSpec.scala @@ -1,9 +1,8 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ -package docs.http.scaladsl.server -package directives +package docs.http.scaladsl.server.directives import java.io.File @@ -13,9 +12,11 @@ import akka.http.scaladsl.model._ import akka.http.scaladsl.model.headers.{ Server, RawHeader } import akka.http.scaladsl.server.RouteResult.{ Complete, Rejected } import akka.http.scaladsl.server._ +import akka.http.scaladsl.settings.RoutingSettings import akka.stream.ActorMaterializer import akka.stream.scaladsl.{ FileIO, Sink, Source } import akka.util.ByteString +import docs.http.scaladsl.server.RoutingSpec import scala.concurrent.Future import scala.util.control.NonFatal @@ -105,9 +106,9 @@ class BasicDirectivesExamplesSpec extends RoutingSpec { def sample() = path("sample") { - extractExecutionContext { implicit ec => + extractExecutionContext { implicit executor => complete { - Future(s"Run on ${ec.##}!") // uses the `ec` ExecutionContext + Future(s"Run on ${executor.##}!") // uses the `executor` ExecutionContext } } } @@ -132,9 +133,9 @@ class BasicDirectivesExamplesSpec extends RoutingSpec { //#extractExecutionContext-0 def sample() = path("sample") { - extractExecutionContext { implicit ec => + extractExecutionContext { implicit executor => complete { - Future(s"Run on ${ec.##}!") // uses the `ec` ExecutionContext + Future(s"Run on ${executor.##}!") // uses the `executor` ExecutionContext } } } @@ -183,7 +184,7 @@ class BasicDirectivesExamplesSpec extends RoutingSpec { } "withSettings-0" in compileOnlySpec { //#withSettings-0 - val special = RoutingSettings(system).copy(fileIODispatcher = "special-io-dispatcher") + val special = RoutingSettings(system).withFileIODispatcher("special-io-dispatcher") def sample() = path("sample") { @@ -700,7 +701,7 @@ class BasicDirectivesExamplesSpec extends RoutingSpec { "mapSettings-examples" in { //#mapSettings-examples val tunedSettings = mapSettings { settings => - settings.copy(fileGetConditional = false) + settings.withFileGetConditional(false) } val route = diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/CodingDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/CodingDirectivesExamplesSpec.scala similarity index 96% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/CodingDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/CodingDirectivesExamplesSpec.scala index 7276cac7f9..2207a5ebb9 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/CodingDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/CodingDirectivesExamplesSpec.scala @@ -1,11 +1,11 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ -package docs.http.scaladsl.server -package directives +package docs.http.scaladsl.server.directives import akka.http.scaladsl.coding._ +import docs.http.scaladsl.server.RoutingSpec import akka.http.scaladsl.model.{ HttpResponse, StatusCodes } import akka.http.scaladsl.model.headers.{ HttpEncodings, HttpEncoding, `Accept-Encoding`, `Content-Encoding` } import akka.http.scaladsl.model.headers.HttpEncodings._ diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/CookieDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/CookieDirectivesExamplesSpec.scala similarity index 92% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/CookieDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/CookieDirectivesExamplesSpec.scala index 090a19dd4f..d96a7b0e4b 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/CookieDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/CookieDirectivesExamplesSpec.scala @@ -1,12 +1,12 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ -package docs.http.scaladsl.server -package directives +package docs.http.scaladsl.server.directives import akka.http.scaladsl.server._ import akka.http.scaladsl.model.headers.{ HttpCookie, Cookie, `Set-Cookie` } +import docs.http.scaladsl.server.RoutingSpec import akka.http.scaladsl.model.DateTime class CookieDirectivesExamplesSpec extends RoutingSpec { diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/CustomDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/CustomDirectivesExamplesSpec.scala similarity index 93% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/CustomDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/CustomDirectivesExamplesSpec.scala index c77709484c..b33c78dcc3 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/CustomDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/CustomDirectivesExamplesSpec.scala @@ -1,8 +1,7 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ -package docs.http.scaladsl.server -package directives +package docs.http.scaladsl.server.directives import akka.http.scaladsl.server.{ Directive1, Directive } import docs.http.scaladsl.server.RoutingSpec diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/DebuggingDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/DebuggingDirectivesExamplesSpec.scala similarity index 96% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/DebuggingDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/DebuggingDirectivesExamplesSpec.scala index 7d0ba017ca..aab7443b41 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/DebuggingDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/DebuggingDirectivesExamplesSpec.scala @@ -1,13 +1,13 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ -package docs.http.scaladsl.server -package directives +package docs.http.scaladsl.server.directives import akka.event.Logging import akka.http.scaladsl.model.{ HttpRequest, HttpResponse } import akka.http.scaladsl.server.directives.{ DebuggingDirectives, LogEntry, LoggingMagnet } +import docs.http.scaladsl.server.RoutingSpec class DebuggingDirectivesExamplesSpec extends RoutingSpec { "logRequest-0" in { diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/ExecutionDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/ExecutionDirectivesExamplesSpec.scala similarity index 93% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/ExecutionDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/ExecutionDirectivesExamplesSpec.scala index daafbdd7e6..5713199804 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/ExecutionDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/ExecutionDirectivesExamplesSpec.scala @@ -1,12 +1,12 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ -package docs.http.scaladsl.server -package directives +package docs.http.scaladsl.server.directives import akka.http.scaladsl.model.StatusCodes import akka.http.scaladsl.server._ +import docs.http.scaladsl.server.RoutingSpec class ExecutionDirectivesExamplesSpec extends RoutingSpec { "handleExceptions" in { diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/FileAndResourceDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/FileAndResourceDirectivesExamplesSpec.scala similarity index 96% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/FileAndResourceDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/FileAndResourceDirectivesExamplesSpec.scala index 441e1a841d..31b00c4a38 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/FileAndResourceDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/FileAndResourceDirectivesExamplesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.scaladsl.server.directives @@ -9,6 +9,7 @@ import akka.http.scaladsl.model.StatusCodes import akka.http.scaladsl.server.directives.DirectoryListing import akka.http.scaladsl.server.directives.FileAndResourceDirectives.DirectoryRenderer import docs.http.scaladsl.server.RoutingSpec +import docs.http.scaladsl.server.RoutingSpec class FileAndResourceDirectivesExamplesSpec extends RoutingSpec { "getFromFile-examples" in compileOnlySpec { diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/FileUploadDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/FileUploadDirectivesExamplesSpec.scala similarity index 96% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/FileUploadDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/FileUploadDirectivesExamplesSpec.scala index 94b2a200fd..1a7d938196 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/FileUploadDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/FileUploadDirectivesExamplesSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.http.scaladsl.server.directives diff --git a/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/FormFieldDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/FormFieldDirectivesExamplesSpec.scala new file mode 100644 index 0000000000..13827a4123 --- /dev/null +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/FormFieldDirectivesExamplesSpec.scala @@ -0,0 +1,95 @@ +/* + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package docs.http.scaladsl.server.directives + +import akka.http.scaladsl.server.Route +import akka.http.scaladsl.model._ +import docs.http.scaladsl.server.RoutingSpec + +class FormFieldDirectivesExamplesSpec extends RoutingSpec { + "formFields" in { + val route = + formFields('color, 'age.as[Int]) { (color, age) => + complete(s"The color is '$color' and the age ten years ago was ${age - 10}") + } + + // tests: + Post("/", FormData("color" -> "blue", "age" -> "68")) ~> route ~> check { + responseAs[String] shouldEqual "The color is 'blue' and the age ten years ago was 58" + } + + Get("/") ~> Route.seal(route) ~> check { + status shouldEqual StatusCodes.BadRequest + responseAs[String] shouldEqual "Request is missing required form field 'color'" + } + } + "formField" in { + val route = + formField('color) { color => + complete(s"The color is '$color'") + } ~ + formField('id.as[Int]) { id => + complete(s"The id is '$id'") + } + + // tests: + Post("/", FormData("color" -> "blue")) ~> route ~> check { + responseAs[String] shouldEqual "The color is 'blue'" + } + + Get("/") ~> Route.seal(route) ~> check { + status shouldEqual StatusCodes.BadRequest + responseAs[String] shouldEqual "Request is missing required form field 'color'" + } + } + "formFieldMap" in { + val route = + formFieldMap { fields => + def formFieldString(formField: (String, String)): String = + s"""${formField._1} = '${formField._2}'""" + complete(s"The form fields are ${fields.map(formFieldString).mkString(", ")}") + } + + // tests: + Post("/", FormData("color" -> "blue", "count" -> "42")) ~> route ~> check { + responseAs[String] shouldEqual "The form fields are color = 'blue', count = '42'" + } + Post("/", FormData("x" -> "1", "x" -> "5")) ~> route ~> check { + responseAs[String] shouldEqual "The form fields are x = '5'" + } + } + "formFieldMultiMap" in { + val route = + formFieldMultiMap { fields => + complete("There are " + + s"form fields ${fields.map(x => x._1 + " -> " + x._2.size).mkString(", ")}") + } + + // tests: + Post("/", FormData("color" -> "blue", "count" -> "42")) ~> route ~> check { + responseAs[String] shouldEqual "There are form fields color -> 1, count -> 1" + } + Post("/", FormData("x" -> "23", "x" -> "4", "x" -> "89")) ~> route ~> check { + responseAs[String] shouldEqual "There are form fields x -> 3" + } + } + "formFieldSeq" in { + val route = + formFieldSeq { fields => + def formFieldString(formField: (String, String)): String = + s"""${formField._1} = '${formField._2}'""" + complete(s"The form fields are ${fields.map(formFieldString).mkString(", ")}") + } + + // tests: + Post("/", FormData("color" -> "blue", "count" -> "42")) ~> route ~> check { + responseAs[String] shouldEqual "The form fields are color = 'blue', count = '42'" + } + Post("/", FormData("x" -> "23", "x" -> "4", "x" -> "89")) ~> route ~> check { + responseAs[String] shouldEqual "The form fields are x = '23', x = '4', x = '89'" + } + } + +} diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/FutureDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/FutureDirectivesExamplesSpec.scala similarity index 94% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/FutureDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/FutureDirectivesExamplesSpec.scala index 6beb509d1b..fc8ed90774 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/FutureDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/FutureDirectivesExamplesSpec.scala @@ -1,11 +1,12 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ -package docs.http.scaladsl.server -package directives +package docs.http.scaladsl.server.directives import java.util.concurrent.TimeUnit +import docs.http.scaladsl.server.RoutingSpec + import scala.concurrent.Future import scala.util.{ Success, Failure } import akka.http.scaladsl.server.ExceptionHandler diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/HeaderDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/HeaderDirectivesExamplesSpec.scala similarity index 97% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/HeaderDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/HeaderDirectivesExamplesSpec.scala index 38c0fdcb76..3e258fb517 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/HeaderDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/HeaderDirectivesExamplesSpec.scala @@ -1,13 +1,13 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ -package docs.http.scaladsl.server -package directives +package docs.http.scaladsl.server.directives import akka.http.scaladsl.model._ import akka.http.scaladsl.server.MissingHeaderRejection import akka.http.scaladsl.server.Route +import docs.http.scaladsl.server.RoutingSpec import headers._ import StatusCodes._ import org.scalatest.Inside diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/HostDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/HostDirectivesExamplesSpec.scala similarity index 92% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/HostDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/HostDirectivesExamplesSpec.scala index f3e3d5ecea..120463508c 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/HostDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/HostDirectivesExamplesSpec.scala @@ -1,11 +1,11 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ -package docs.http.scaladsl.server -package directives +package docs.http.scaladsl.server.directives import akka.http.scaladsl.model._ +import docs.http.scaladsl.server.RoutingSpec import headers._ import StatusCodes._ diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/MarshallingDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/MarshallingDirectivesExamplesSpec.scala similarity index 93% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/MarshallingDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/MarshallingDirectivesExamplesSpec.scala index 1581c09655..cf156d21a7 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/MarshallingDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/MarshallingDirectivesExamplesSpec.scala @@ -1,11 +1,11 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ -package docs.http.scaladsl.server -package directives +package docs.http.scaladsl.server.directives import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport +import docs.http.scaladsl.server.RoutingSpec import akka.http.scaladsl.model.MediaTypes.`application/json` import akka.http.scaladsl.model._ import spray.json.DefaultJsonProtocol diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/MethodDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/MethodDirectivesExamplesSpec.scala similarity index 95% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/MethodDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/MethodDirectivesExamplesSpec.scala index b9b1efb515..673675003c 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/MethodDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/MethodDirectivesExamplesSpec.scala @@ -1,12 +1,12 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ -package docs.http.scaladsl.server -package directives +package docs.http.scaladsl.server.directives import akka.http.scaladsl.model._ import akka.http.scaladsl.server.Route +import docs.http.scaladsl.server.RoutingSpec class MethodDirectivesExamplesSpec extends RoutingSpec { diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/MiscDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/MiscDirectivesExamplesSpec.scala similarity index 94% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/MiscDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/MiscDirectivesExamplesSpec.scala index eacabc5f86..c888383064 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/MiscDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/MiscDirectivesExamplesSpec.scala @@ -1,13 +1,13 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ -package docs.http.scaladsl.server -package directives +package docs.http.scaladsl.server.directives import akka.http.scaladsl.model._ import akka.http.scaladsl.server._ import headers._ +import docs.http.scaladsl.server.RoutingSpec class MiscDirectivesExamplesSpec extends RoutingSpec { diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/ParameterDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/ParameterDirectivesExamplesSpec.scala similarity index 97% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/ParameterDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/ParameterDirectivesExamplesSpec.scala index 6824e3c03e..64ba394a7c 100755 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/ParameterDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/ParameterDirectivesExamplesSpec.scala @@ -1,13 +1,13 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ -package docs.http.scaladsl.server -package directives +package docs.http.scaladsl.server.directives import akka.http.scaladsl.model._ import akka.http.scaladsl.server.Route import akka.http.scaladsl.unmarshalling.PredefinedFromStringUnmarshallers +import docs.http.scaladsl.server.RoutingSpec class ParameterDirectivesExamplesSpec extends RoutingSpec with PredefinedFromStringUnmarshallers { "example-1" in { diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/PathDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/PathDirectivesExamplesSpec.scala similarity index 98% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/PathDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/PathDirectivesExamplesSpec.scala index ad71b04d59..694325c50d 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/PathDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/PathDirectivesExamplesSpec.scala @@ -1,12 +1,12 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ -package docs.http.scaladsl.server -package directives +package docs.http.scaladsl.server.directives import akka.http.scaladsl.model.StatusCodes._ import akka.http.scaladsl.server._ +import docs.http.scaladsl.server.RoutingSpec class PathDirectivesExamplesSpec extends RoutingSpec { diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/RangeDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/RangeDirectivesExamplesSpec.scala similarity index 91% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/RangeDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/RangeDirectivesExamplesSpec.scala index c2933e208c..5a25c1b3ec 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/RangeDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/RangeDirectivesExamplesSpec.scala @@ -1,9 +1,8 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ -package docs.http.scaladsl.server -package directives +package docs.http.scaladsl.server.directives import akka.http.scaladsl.model._ import com.typesafe.config.{ ConfigFactory, Config } @@ -12,6 +11,7 @@ import akka.util.ByteString import headers._ import scala.concurrent.Await import scala.concurrent.duration._ +import docs.http.scaladsl.server.RoutingSpec class RangeDirectivesExamplesSpec extends RoutingSpec { diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/RespondWithDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/RespondWithDirectivesExamplesSpec.scala similarity index 94% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/RespondWithDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/RespondWithDirectivesExamplesSpec.scala index ff0b71619d..e3b452643d 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/RespondWithDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/RespondWithDirectivesExamplesSpec.scala @@ -1,11 +1,11 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ -package docs.http.scaladsl.server -package directives +package docs.http.scaladsl.server.directives import akka.http.scaladsl.model.headers._ +import docs.http.scaladsl.server.RoutingSpec class RespondWithDirectivesExamplesSpec extends RoutingSpec { diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/RouteDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/RouteDirectivesExamplesSpec.scala similarity index 94% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/RouteDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/RouteDirectivesExamplesSpec.scala index 74bbaac0ca..0c7fe5ff02 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/RouteDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/RouteDirectivesExamplesSpec.scala @@ -1,13 +1,13 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ -package docs.http.scaladsl.server -package directives +package docs.http.scaladsl.server.directives import akka.http.scaladsl.model._ import akka.http.scaladsl.server.{ Route, ValidationRejection } import akka.testkit.EventFilter +import docs.http.scaladsl.server.RoutingSpec class RouteDirectivesExamplesSpec extends RoutingSpec { diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/SchemeDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/SchemeDirectivesExamplesSpec.scala similarity index 87% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/SchemeDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/SchemeDirectivesExamplesSpec.scala index d31a79b29a..1e1d2b696b 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/SchemeDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/SchemeDirectivesExamplesSpec.scala @@ -1,9 +1,9 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ -package docs.http.scaladsl.server -package directives +package docs.http.scaladsl.server.directives +import docs.http.scaladsl.server.RoutingSpec class SchemeDirectivesExamplesSpec extends RoutingSpec { "example-1" in { diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/SecurityDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/SecurityDirectivesExamplesSpec.scala similarity index 98% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/SecurityDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/SecurityDirectivesExamplesSpec.scala index d0de7d3954..a6079e7987 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/SecurityDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/SecurityDirectivesExamplesSpec.scala @@ -1,9 +1,8 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ -package docs.http.scaladsl.server -package directives +package docs.http.scaladsl.server.directives import akka.http.scaladsl.model._ import akka.http.scaladsl.model.headers._ @@ -11,6 +10,7 @@ import akka.http.scaladsl.server.Route import akka.http.scaladsl.server.directives.Credentials import scala.concurrent.Future +import docs.http.scaladsl.server.RoutingSpec class SecurityDirectivesExamplesSpec extends RoutingSpec { diff --git a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/WebsocketDirectivesExamplesSpec.scala b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/WebSocketDirectivesExamplesSpec.scala similarity index 84% rename from akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/WebsocketDirectivesExamplesSpec.scala rename to akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/WebSocketDirectivesExamplesSpec.scala index ab807fba90..c1a72efa56 100644 --- a/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/directives/WebsocketDirectivesExamplesSpec.scala +++ b/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/WebSocketDirectivesExamplesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.scaladsl.server.directives @@ -12,11 +12,10 @@ import akka.stream.OverflowStrategy import akka.stream.scaladsl.{ Sink, Source, Flow } import docs.http.scaladsl.server.RoutingSpec - import akka.http.scaladsl.model.ws.{ TextMessage, Message, BinaryMessage } import akka.http.scaladsl.testkit.WSProbe -class WebsocketDirectivesExamplesSpec extends RoutingSpec { +class WebSocketDirectivesExamplesSpec extends RoutingSpec { "greeter-service" in { def greeter: Flow[Message, Message, Any] = Flow[Message].mapConcat { @@ -29,18 +28,18 @@ class WebsocketDirectivesExamplesSpec extends RoutingSpec { } val websocketRoute = path("greeter") { - handleWebsocketMessages(greeter) + handleWebSocketMessages(greeter) } // tests: // create a testing probe representing the client-side val wsClient = WSProbe() - // WS creates a Websocket request for testing + // WS creates a WebSocket request for testing WS("/greeter", wsClient.flow) ~> websocketRoute ~> check { // check response for WS Upgrade headers - isWebsocketUpgrade shouldEqual true + isWebSocketUpgrade shouldEqual true // manually run a WS conversation wsClient.sendMessage("Peter") @@ -75,18 +74,18 @@ class WebsocketDirectivesExamplesSpec extends RoutingSpec { def websocketMultipleProtocolRoute = path("services") { - handleWebsocketMessagesForProtocol(greeterService, "greeter") ~ - handleWebsocketMessagesForProtocol(echoService, "echo") + handleWebSocketMessagesForProtocol(greeterService, "greeter") ~ + handleWebSocketMessagesForProtocol(echoService, "echo") } // tests: val wsClient = WSProbe() - // WS creates a Websocket request for testing + // WS creates a WebSocket request for testing WS("/services", wsClient.flow, List("other", "echo")) ~> websocketMultipleProtocolRoute ~> check { - expectWebsocketUpgradeWithProtocol { protocol ⇒ + expectWebSocketUpgradeWithProtocol { protocol ⇒ protocol shouldEqual "echo" wsClient.sendMessage("Peter") diff --git a/akka-docs/rst/scala/code/docs/io/EchoServer.scala b/akka-docs/rst/scala/code/docs/io/EchoServer.scala index 046186540c..6d8253064e 100644 --- a/akka-docs/rst/scala/code/docs/io/EchoServer.scala +++ b/akka-docs/rst/scala/code/docs/io/EchoServer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.io diff --git a/akka-docs/rst/scala/code/docs/io/IODocSpec.scala b/akka-docs/rst/scala/code/docs/io/IODocSpec.scala index 60aa3fe5f2..e667258a39 100644 --- a/akka-docs/rst/scala/code/docs/io/IODocSpec.scala +++ b/akka-docs/rst/scala/code/docs/io/IODocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.io diff --git a/akka-docs/rst/scala/code/docs/io/ReadBackPressure.scala b/akka-docs/rst/scala/code/docs/io/ReadBackPressure.scala index 075865ac9c..276218c890 100644 --- a/akka-docs/rst/scala/code/docs/io/ReadBackPressure.scala +++ b/akka-docs/rst/scala/code/docs/io/ReadBackPressure.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.io diff --git a/akka-docs/rst/scala/code/docs/io/ScalaUdpMulticast.scala b/akka-docs/rst/scala/code/docs/io/ScalaUdpMulticast.scala index a617a79277..cc8c616ca5 100644 --- a/akka-docs/rst/scala/code/docs/io/ScalaUdpMulticast.scala +++ b/akka-docs/rst/scala/code/docs/io/ScalaUdpMulticast.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.io diff --git a/akka-docs/rst/scala/code/docs/io/ScalaUdpMulticastSpec.scala b/akka-docs/rst/scala/code/docs/io/ScalaUdpMulticastSpec.scala index 3b72555169..07d250142a 100644 --- a/akka-docs/rst/scala/code/docs/io/ScalaUdpMulticastSpec.scala +++ b/akka-docs/rst/scala/code/docs/io/ScalaUdpMulticastSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.io @@ -19,6 +19,8 @@ class ScalaUdpMulticastSpec extends TestKit(ActorSystem("ScalaUdpMulticastSpec") "listener" should { "send message back to sink" in { + // TODO make this work consistently on all platforms + pending def okInterfaceToUse(iface: NetworkInterface): Boolean = { iface.getInetAddresses.exists(_.isInstanceOf[Inet6Address]) && @@ -41,6 +43,7 @@ class ScalaUdpMulticastSpec extends TestKit(ActorSystem("ScalaUdpMulticastSpec") val listener = system.actorOf(Props(classOf[Listener], iface, group, port, sink)) expectMsgType[Udp.Bound] val sender = system.actorOf(Props(classOf[Sender], iface, group, port, msg)) + // fails here, so binding succeeds but sending a message does not expectMsg(msg) // unbind diff --git a/akka-docs/rst/scala/code/docs/pattern/BackoffSupervisorDocSpec.scala b/akka-docs/rst/scala/code/docs/pattern/BackoffSupervisorDocSpec.scala index 52c29be33f..6a8c823a52 100644 --- a/akka-docs/rst/scala/code/docs/pattern/BackoffSupervisorDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/pattern/BackoffSupervisorDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.pattern diff --git a/akka-docs/rst/scala/code/docs/pattern/ScalaTemplate.scala b/akka-docs/rst/scala/code/docs/pattern/ScalaTemplate.scala index 3e8db1cb73..ce69e47c25 100644 --- a/akka-docs/rst/scala/code/docs/pattern/ScalaTemplate.scala +++ b/akka-docs/rst/scala/code/docs/pattern/ScalaTemplate.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.pattern diff --git a/akka-docs/rst/scala/code/docs/pattern/SchedulerPatternSpec.scala b/akka-docs/rst/scala/code/docs/pattern/SchedulerPatternSpec.scala index 69ed43bfab..ac026454a4 100644 --- a/akka-docs/rst/scala/code/docs/pattern/SchedulerPatternSpec.scala +++ b/akka-docs/rst/scala/code/docs/pattern/SchedulerPatternSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.pattern diff --git a/akka-docs/rst/scala/code/docs/persistence/PersistenceDocSpec.scala b/akka-docs/rst/scala/code/docs/persistence/PersistenceDocSpec.scala index 186d3a7949..0903d611b3 100644 --- a/akka-docs/rst/scala/code/docs/persistence/PersistenceDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/persistence/PersistenceDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.persistence @@ -392,7 +392,6 @@ object PersistenceDocSpec { // order of received messages: // a // # b arrives at mailbox, stashing; internal-stash = [b] - // # PoisonPill arrives at mailbox, stashing; internal-stash = [b, Shutdown] // PoisonPill is an AutoReceivedMessage, is handled automatically // !! stop !! // Actor is stopped without handling `b` nor the `a` handler! diff --git a/akka-docs/rst/scala/code/docs/persistence/PersistenceEventAdapterDocSpec.scala b/akka-docs/rst/scala/code/docs/persistence/PersistenceEventAdapterDocSpec.scala index 65f7a06986..5fecb88fe2 100644 --- a/akka-docs/rst/scala/code/docs/persistence/PersistenceEventAdapterDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/persistence/PersistenceEventAdapterDocSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.persistence diff --git a/akka-docs/rst/scala/code/docs/persistence/PersistenceMultiDocSpec.scala b/akka-docs/rst/scala/code/docs/persistence/PersistenceMultiDocSpec.scala index 6487dcdc17..e66ee0291b 100644 --- a/akka-docs/rst/scala/code/docs/persistence/PersistenceMultiDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/persistence/PersistenceMultiDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ import akka.persistence.PersistentActor diff --git a/akka-docs/rst/scala/code/docs/persistence/PersistencePluginDocSpec.scala b/akka-docs/rst/scala/code/docs/persistence/PersistencePluginDocSpec.scala index 2e9d85d212..8496043b53 100644 --- a/akka-docs/rst/scala/code/docs/persistence/PersistencePluginDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/persistence/PersistencePluginDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.persistence diff --git a/akka-docs/rst/scala/code/docs/persistence/PersistenceSchemaEvolutionDocSpec.scala b/akka-docs/rst/scala/code/docs/persistence/PersistenceSchemaEvolutionDocSpec.scala index 76d02b50d1..f1e0d1cd8f 100644 --- a/akka-docs/rst/scala/code/docs/persistence/PersistenceSchemaEvolutionDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/persistence/PersistenceSchemaEvolutionDocSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.persistence diff --git a/akka-docs/rst/scala/code/docs/persistence/PersistenceSerializerDocSpec.scala b/akka-docs/rst/scala/code/docs/persistence/PersistenceSerializerDocSpec.scala index e2360e6924..545c540da8 100644 --- a/akka-docs/rst/scala/code/docs/persistence/PersistenceSerializerDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/persistence/PersistenceSerializerDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.persistence diff --git a/akka-docs/rst/scala/code/docs/persistence/query/LeveldbPersistenceQueryDocSpec.scala b/akka-docs/rst/scala/code/docs/persistence/query/LeveldbPersistenceQueryDocSpec.scala index 1a3c365b4e..4d6bb246af 100644 --- a/akka-docs/rst/scala/code/docs/persistence/query/LeveldbPersistenceQueryDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/persistence/query/LeveldbPersistenceQueryDocSpec.scala @@ -1,8 +1,9 @@ /* - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.persistence.query +import akka.NotUsed import akka.persistence.journal.{ EventAdapter, EventSeq } import akka.testkit.AkkaSpec import akka.persistence.query.PersistenceQuery @@ -56,10 +57,10 @@ class LeveldbPersistenceQueryDocSpec(config: String) extends AkkaSpec(config) { val queries = PersistenceQuery(system).readJournalFor[LeveldbReadJournal]( LeveldbReadJournal.Identifier) - val src: Source[EventEnvelope, Unit] = + val src: Source[EventEnvelope, NotUsed] = queries.eventsByPersistenceId("some-persistence-id", 0L, Long.MaxValue) - val events: Source[Any, Unit] = src.map(_.event) + val events: Source[Any, NotUsed] = src.map(_.event) //#EventsByPersistenceId } @@ -69,7 +70,7 @@ class LeveldbPersistenceQueryDocSpec(config: String) extends AkkaSpec(config) { val queries = PersistenceQuery(system).readJournalFor[LeveldbReadJournal]( LeveldbReadJournal.Identifier) - val src: Source[String, Unit] = queries.allPersistenceIds() + val src: Source[String, NotUsed] = queries.allPersistenceIds() //#AllPersistenceIds } @@ -79,7 +80,7 @@ class LeveldbPersistenceQueryDocSpec(config: String) extends AkkaSpec(config) { val queries = PersistenceQuery(system).readJournalFor[LeveldbReadJournal]( LeveldbReadJournal.Identifier) - val src: Source[EventEnvelope, Unit] = + val src: Source[EventEnvelope, NotUsed] = queries.eventsByTag(tag = "green", offset = 0L) //#EventsByTag } diff --git a/akka-docs/rst/scala/code/docs/persistence/query/MyEventsByTagPublisher.scala b/akka-docs/rst/scala/code/docs/persistence/query/MyEventsByTagPublisher.scala index 69d7a70047..7e543c229f 100644 --- a/akka-docs/rst/scala/code/docs/persistence/query/MyEventsByTagPublisher.scala +++ b/akka-docs/rst/scala/code/docs/persistence/query/MyEventsByTagPublisher.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.persistence.query diff --git a/akka-docs/rst/scala/code/docs/persistence/query/PersistenceQueryDocSpec.scala b/akka-docs/rst/scala/code/docs/persistence/query/PersistenceQueryDocSpec.scala index dc8cfdf57a..9dbbe0e91f 100644 --- a/akka-docs/rst/scala/code/docs/persistence/query/PersistenceQueryDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/persistence/query/PersistenceQueryDocSpec.scala @@ -1,9 +1,10 @@ /* - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.persistence.query +import akka.NotUsed import akka.actor._ import akka.persistence.{ Recovery, PersistentActor } import akka.persistence.query._ @@ -53,25 +54,25 @@ object PersistenceQueryDocSpec { config.getDuration("refresh-interval", MILLISECONDS).millis override def eventsByTag( - tag: String, offset: Long = 0L): Source[EventEnvelope, Unit] = { + tag: String, offset: Long = 0L): Source[EventEnvelope, NotUsed] = { val props = MyEventsByTagPublisher.props(tag, offset, refreshInterval) Source.actorPublisher[EventEnvelope](props) - .mapMaterializedValue(_ ⇒ ()) + .mapMaterializedValue(_ ⇒ NotUsed) } override def eventsByPersistenceId( persistenceId: String, fromSequenceNr: Long = 0L, - toSequenceNr: Long = Long.MaxValue): Source[EventEnvelope, Unit] = { + toSequenceNr: Long = Long.MaxValue): Source[EventEnvelope, NotUsed] = { // implement in a similar way as eventsByTag ??? } - override def allPersistenceIds(): Source[String, Unit] = { + override def allPersistenceIds(): Source[String, NotUsed] = { // implement in a similar way as eventsByTag ??? } - override def currentPersistenceIds(): Source[String, Unit] = { + override def currentPersistenceIds(): Source[String, NotUsed] = { // implement in a similar way as eventsByTag ??? } @@ -95,19 +96,19 @@ object PersistenceQueryDocSpec { with akka.persistence.query.javadsl.CurrentPersistenceIdsQuery { override def eventsByTag( - tag: String, offset: Long = 0L): javadsl.Source[EventEnvelope, Unit] = + tag: String, offset: Long = 0L): javadsl.Source[EventEnvelope, NotUsed] = scaladslReadJournal.eventsByTag(tag, offset).asJava override def eventsByPersistenceId( persistenceId: String, fromSequenceNr: Long = 0L, - toSequenceNr: Long = Long.MaxValue): javadsl.Source[EventEnvelope, Unit] = + toSequenceNr: Long = Long.MaxValue): javadsl.Source[EventEnvelope, NotUsed] = scaladslReadJournal.eventsByPersistenceId( persistenceId, fromSequenceNr, toSequenceNr).asJava - override def allPersistenceIds(): javadsl.Source[String, Unit] = + override def allPersistenceIds(): javadsl.Source[String, NotUsed] = scaladslReadJournal.allPersistenceIds().asJava - override def currentPersistenceIds(): javadsl.Source[String, Unit] = + override def currentPersistenceIds(): javadsl.Source[String, NotUsed] = scaladslReadJournal.currentPersistenceIds().asJava // possibility to add more plugin specific queries @@ -201,7 +202,7 @@ class PersistenceQueryDocSpec(s: String) extends AkkaSpec(s) { "akka.persistence.query.my-read-journal") // issue query to journal - val source: Source[EventEnvelope, Unit] = + val source: Source[EventEnvelope, NotUsed] = readJournal.eventsByPersistenceId("user-1337", 0, Long.MaxValue) // materialize stream, consuming events @@ -220,7 +221,7 @@ class PersistenceQueryDocSpec(s: String) extends AkkaSpec(s) { //#events-by-tag // assuming journal is able to work with numeric offsets we can: - val blueThings: Source[EventEnvelope, Unit] = + val blueThings: Source[EventEnvelope, NotUsed] = readJournal.eventsByTag("blue") // find top 10 blue things: diff --git a/akka-docs/rst/scala/code/docs/remoting/RemoteDeploymentDocSpec.scala b/akka-docs/rst/scala/code/docs/remoting/RemoteDeploymentDocSpec.scala index 00070c29da..b3d6e7afe6 100644 --- a/akka-docs/rst/scala/code/docs/remoting/RemoteDeploymentDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/remoting/RemoteDeploymentDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.remoting diff --git a/akka-docs/rst/scala/code/docs/routing/ConsistentHashingRouterDocSpec.scala b/akka-docs/rst/scala/code/docs/routing/ConsistentHashingRouterDocSpec.scala index c2d77411f2..f8ba35aeb8 100644 --- a/akka-docs/rst/scala/code/docs/routing/ConsistentHashingRouterDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/routing/ConsistentHashingRouterDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.routing diff --git a/akka-docs/rst/scala/code/docs/routing/CustomRouterDocSpec.scala b/akka-docs/rst/scala/code/docs/routing/CustomRouterDocSpec.scala index 271fe26169..d4c6ba3736 100644 --- a/akka-docs/rst/scala/code/docs/routing/CustomRouterDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/routing/CustomRouterDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.routing diff --git a/akka-docs/rst/scala/code/docs/routing/RouterDocSpec.scala b/akka-docs/rst/scala/code/docs/routing/RouterDocSpec.scala index d0627a50af..f22cfc75e7 100644 --- a/akka-docs/rst/scala/code/docs/routing/RouterDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/routing/RouterDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.routing diff --git a/akka-docs/rst/scala/code/docs/serialization/SerializationDocSpec.scala b/akka-docs/rst/scala/code/docs/serialization/SerializationDocSpec.scala index 638e88d7d0..212704a0f0 100644 --- a/akka-docs/rst/scala/code/docs/serialization/SerializationDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/serialization/SerializationDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.serialization { diff --git a/akka-docs-dev/rst/scala/code/docs/stream/ActorPublisherDocSpec.scala b/akka-docs/rst/scala/code/docs/stream/ActorPublisherDocSpec.scala similarity index 97% rename from akka-docs-dev/rst/scala/code/docs/stream/ActorPublisherDocSpec.scala rename to akka-docs/rst/scala/code/docs/stream/ActorPublisherDocSpec.scala index 8b1fc10935..4018ca7688 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/ActorPublisherDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/stream/ActorPublisherDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package docs.stream diff --git a/akka-docs-dev/rst/scala/code/docs/stream/ActorSubscriberDocSpec.scala b/akka-docs/rst/scala/code/docs/stream/ActorSubscriberDocSpec.scala similarity index 97% rename from akka-docs-dev/rst/scala/code/docs/stream/ActorSubscriberDocSpec.scala rename to akka-docs/rst/scala/code/docs/stream/ActorSubscriberDocSpec.scala index 099b605cf0..98f6d96d2f 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/ActorSubscriberDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/stream/ActorSubscriberDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package docs.stream diff --git a/akka-docs-dev/rst/scala/code/docs/stream/BidiFlowDocSpec.scala b/akka-docs/rst/scala/code/docs/stream/BidiFlowDocSpec.scala similarity index 98% rename from akka-docs-dev/rst/scala/code/docs/stream/BidiFlowDocSpec.scala rename to akka-docs/rst/scala/code/docs/stream/BidiFlowDocSpec.scala index f58052ebbc..e830fb91d6 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/BidiFlowDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/stream/BidiFlowDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.stream diff --git a/akka-docs-dev/rst/scala/code/docs/stream/CompositionDocSpec.scala b/akka-docs/rst/scala/code/docs/stream/CompositionDocSpec.scala similarity index 97% rename from akka-docs-dev/rst/scala/code/docs/stream/CompositionDocSpec.scala rename to akka-docs/rst/scala/code/docs/stream/CompositionDocSpec.scala index 54fe9bde7f..ee0d3636cb 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/CompositionDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/stream/CompositionDocSpec.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.stream +import akka.NotUsed import akka.stream._ import akka.stream.scaladsl.Tcp.OutgoingConnection import akka.stream.scaladsl._ @@ -181,7 +182,7 @@ class CompositionDocSpec extends AkkaSpec { val source: Source[Int, Promise[Option[Int]]] = Source.maybe[Int] // Materializes to Unit (black) - val flow1: Flow[Int, Int, Unit] = Flow[Int].take(100) + val flow1: Flow[Int, Int, NotUsed] = Flow[Int].take(100) // Materializes to Promise[Int] (red) val nestedSource: Source[Int, Promise[Option[Int]]] = @@ -190,7 +191,7 @@ class CompositionDocSpec extends AkkaSpec { //#mat-combine-2 // Materializes to Unit (orange) - val flow2: Flow[Int, ByteString, Unit] = Flow[Int].map { i => ByteString(i.toString) } + val flow2: Flow[Int, ByteString, NotUsed] = Flow[Int].map { i => ByteString(i.toString) } // Materializes to Future[OutgoingConnection] (yellow) val flow3: Flow[ByteString, ByteString, Future[OutgoingConnection]] = diff --git a/akka-docs-dev/rst/scala/code/docs/stream/FlowDocSpec.scala b/akka-docs/rst/scala/code/docs/stream/FlowDocSpec.scala similarity index 97% rename from akka-docs-dev/rst/scala/code/docs/stream/FlowDocSpec.scala rename to akka-docs/rst/scala/code/docs/stream/FlowDocSpec.scala index 8e0a3ea344..c179fef9cd 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/FlowDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/stream/FlowDocSpec.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package docs.stream +import akka.NotUsed import akka.actor.Cancellable import akka.stream.{ ClosedShape, FlowShape } import akka.stream.scaladsl._ @@ -134,11 +135,11 @@ class FlowDocSpec extends AkkaSpec { source.to(Sink.foreach(println(_))) // Starting from a Sink - val sink: Sink[Int, Unit] = Flow[Int].map(_ * 2).to(Sink.foreach(println(_))) + val sink: Sink[Int, NotUsed] = Flow[Int].map(_ * 2).to(Sink.foreach(println(_))) Source(1 to 6).to(sink) // Broadcast to a sink inline - val otherSink: Sink[Int, Unit] = + val otherSink: Sink[Int, NotUsed] = Flow[Int].alsoTo(Sink.foreach(println(_))).to(Sink.ignore) Source(1 to 6).to(otherSink) diff --git a/akka-docs-dev/rst/scala/code/docs/stream/FlowErrorDocSpec.scala b/akka-docs/rst/scala/code/docs/stream/FlowErrorDocSpec.scala similarity index 97% rename from akka-docs-dev/rst/scala/code/docs/stream/FlowErrorDocSpec.scala rename to akka-docs/rst/scala/code/docs/stream/FlowErrorDocSpec.scala index fce4597308..888b2182d2 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/FlowErrorDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/stream/FlowErrorDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.stream diff --git a/akka-docs-dev/rst/scala/code/docs/stream/FlowGraphDocSpec.scala b/akka-docs/rst/scala/code/docs/stream/FlowGraphDocSpec.scala similarity index 98% rename from akka-docs-dev/rst/scala/code/docs/stream/FlowGraphDocSpec.scala rename to akka-docs/rst/scala/code/docs/stream/FlowGraphDocSpec.scala index e56ab5a3bf..ebbf623632 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/FlowGraphDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/stream/FlowGraphDocSpec.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package docs.stream +import akka.NotUsed import akka.stream._ import akka.stream.scaladsl._ import akka.stream.testkit.AkkaSpec @@ -21,7 +22,7 @@ class FlowGraphDocSpec extends AkkaSpec { "build simple graph" in { //format: OFF //#simple-flow-graph - val g = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder: GraphDSL.Builder[Unit] => + val g = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder: GraphDSL.Builder[NotUsed] => import GraphDSL.Implicits._ val in = Source(1 to 10) val out = Sink.ignore @@ -131,7 +132,7 @@ class FlowGraphDocSpec extends AkkaSpec { object PriorityWorkerPool { def apply[In, Out]( worker: Flow[In, Out, Any], - workerCount: Int): Graph[PriorityWorkerPoolShape[In, Out], Unit] = { + workerCount: Int): Graph[PriorityWorkerPoolShape[In, Out], NotUsed] = { GraphDSL.create() { implicit b ⇒ import GraphDSL.Implicits._ diff --git a/akka-docs-dev/rst/scala/code/docs/stream/FlowParallelismDocSpec.scala b/akka-docs/rst/scala/code/docs/stream/FlowParallelismDocSpec.scala similarity index 83% rename from akka-docs-dev/rst/scala/code/docs/stream/FlowParallelismDocSpec.scala rename to akka-docs/rst/scala/code/docs/stream/FlowParallelismDocSpec.scala index 35da0a67b4..5daaf76aac 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/FlowParallelismDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/stream/FlowParallelismDocSpec.scala @@ -1,5 +1,6 @@ package docs.stream +import akka.NotUsed import akka.stream.FlowShape import akka.stream.scaladsl.{ GraphDSL, Merge, Balance, Source, Flow } import akka.stream.testkit.AkkaSpec @@ -15,11 +16,11 @@ class FlowParallelismDocSpec extends AkkaSpec { //format: OFF //#pipelining // Takes a scoop of batter and creates a pancake with one side cooked - val fryingPan1: Flow[ScoopOfBatter, HalfCookedPancake, Unit] = + val fryingPan1: Flow[ScoopOfBatter, HalfCookedPancake, NotUsed] = Flow[ScoopOfBatter].map { batter => HalfCookedPancake() } // Finishes a half-cooked pancake - val fryingPan2: Flow[HalfCookedPancake, Pancake, Unit] = + val fryingPan2: Flow[HalfCookedPancake, Pancake, NotUsed] = Flow[HalfCookedPancake].map { halfCooked => Pancake() } //#pipelining //format: ON @@ -28,17 +29,17 @@ class FlowParallelismDocSpec extends AkkaSpec { //#pipelining // With the two frying pans we can fully cook pancakes - val pancakeChef: Flow[ScoopOfBatter, Pancake, Unit] = + val pancakeChef: Flow[ScoopOfBatter, Pancake, NotUsed] = Flow[ScoopOfBatter].via(fryingPan1).via(fryingPan2) //#pipelining } "Demonstrate parallel processing" in { //#parallelism - val fryingPan: Flow[ScoopOfBatter, Pancake, Unit] = + val fryingPan: Flow[ScoopOfBatter, Pancake, NotUsed] = Flow[ScoopOfBatter].map { batter => Pancake() } - val pancakeChef: Flow[ScoopOfBatter, Pancake, Unit] = Flow.fromGraph(GraphDSL.create() { implicit builder => + val pancakeChef: Flow[ScoopOfBatter, Pancake, NotUsed] = Flow.fromGraph(GraphDSL.create() { implicit builder => val dispatchBatter = builder.add(Balance[ScoopOfBatter](2)) val mergePancakes = builder.add(Merge[Pancake](2)) @@ -58,7 +59,7 @@ class FlowParallelismDocSpec extends AkkaSpec { "Demonstrate parallelized pipelines" in { //#parallel-pipeline - val pancakeChef: Flow[ScoopOfBatter, Pancake, Unit] = + val pancakeChef: Flow[ScoopOfBatter, Pancake, NotUsed] = Flow.fromGraph(GraphDSL.create() { implicit builder => val dispatchBatter = builder.add(Balance[ScoopOfBatter](2)) @@ -76,7 +77,7 @@ class FlowParallelismDocSpec extends AkkaSpec { "Demonstrate pipelined parallel processing" in { //#pipelined-parallel - val pancakeChefs1: Flow[ScoopOfBatter, HalfCookedPancake, Unit] = + val pancakeChefs1: Flow[ScoopOfBatter, HalfCookedPancake, NotUsed] = Flow.fromGraph(GraphDSL.create() { implicit builder => val dispatchBatter = builder.add(Balance[ScoopOfBatter](2)) val mergeHalfPancakes = builder.add(Merge[HalfCookedPancake](2)) @@ -89,7 +90,7 @@ class FlowParallelismDocSpec extends AkkaSpec { FlowShape(dispatchBatter.in, mergeHalfPancakes.out) }) - val pancakeChefs2: Flow[HalfCookedPancake, Pancake, Unit] = + val pancakeChefs2: Flow[HalfCookedPancake, Pancake, NotUsed] = Flow.fromGraph(GraphDSL.create() { implicit builder => val dispatchHalfPancakes = builder.add(Balance[HalfCookedPancake](2)) val mergePancakes = builder.add(Merge[Pancake](2)) @@ -102,7 +103,7 @@ class FlowParallelismDocSpec extends AkkaSpec { FlowShape(dispatchHalfPancakes.in, mergePancakes.out) }) - val kitchen: Flow[ScoopOfBatter, Pancake, Unit] = pancakeChefs1.via(pancakeChefs2) + val kitchen: Flow[ScoopOfBatter, Pancake, NotUsed] = pancakeChefs1.via(pancakeChefs2) //#pipelined-parallel } diff --git a/akka-docs-dev/rst/scala/code/docs/stream/FlowStagesSpec.scala b/akka-docs/rst/scala/code/docs/stream/FlowStagesSpec.scala similarity index 100% rename from akka-docs-dev/rst/scala/code/docs/stream/FlowStagesSpec.scala rename to akka-docs/rst/scala/code/docs/stream/FlowStagesSpec.scala diff --git a/akka-docs-dev/rst/scala/code/docs/stream/GraphCyclesSpec.scala b/akka-docs/rst/scala/code/docs/stream/GraphCyclesSpec.scala similarity index 100% rename from akka-docs-dev/rst/scala/code/docs/stream/GraphCyclesSpec.scala rename to akka-docs/rst/scala/code/docs/stream/GraphCyclesSpec.scala diff --git a/akka-docs-dev/rst/scala/code/docs/stream/GraphStageDocSpec.scala b/akka-docs/rst/scala/code/docs/stream/GraphStageDocSpec.scala similarity index 98% rename from akka-docs-dev/rst/scala/code/docs/stream/GraphStageDocSpec.scala rename to akka-docs/rst/scala/code/docs/stream/GraphStageDocSpec.scala index 7e950832c1..c25d68100d 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/GraphStageDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/stream/GraphStageDocSpec.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.stream +import akka.NotUsed import akka.stream.scaladsl.{ Keep, Sink, Flow, Source } import akka.stream.stage._ import akka.stream._ @@ -68,10 +69,10 @@ class GraphStageDocSpec extends AkkaSpec { //#simple-source-usage // A GraphStage is a proper Graph, just like what GraphDSL.create would return - val sourceGraph: Graph[SourceShape[Int], Unit] = new NumbersSource + val sourceGraph: Graph[SourceShape[Int], NotUsed] = new NumbersSource // Create a Source from the Graph to access the DSL - val mySource: Source[Int, Unit] = Source.fromGraph(new NumbersSource) + val mySource: Source[Int, NotUsed] = Source.fromGraph(new NumbersSource) // Returns 55 val result1: Future[Int] = mySource.take(10).runFold(0)(_ + _) diff --git a/akka-docs-dev/rst/scala/code/docs/stream/IntegrationDocSpec.scala b/akka-docs/rst/scala/code/docs/stream/IntegrationDocSpec.scala similarity index 93% rename from akka-docs-dev/rst/scala/code/docs/stream/IntegrationDocSpec.scala rename to akka-docs/rst/scala/code/docs/stream/IntegrationDocSpec.scala index 8364d0fdef..776bf13ed2 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/IntegrationDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/stream/IntegrationDocSpec.scala @@ -1,8 +1,10 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package docs.stream +import akka.NotUsed + import scala.concurrent.duration._ import akka.stream.testkit.AkkaSpec import akka.stream.scaladsl._ @@ -132,21 +134,21 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) { val emailServer = new EmailServer(probe.ref) //#tweet-authors - val authors: Source[Author, Unit] = + val authors: Source[Author, NotUsed] = tweets .filter(_.hashtags.contains(akka)) .map(_.author) //#tweet-authors //#email-addresses-mapAsync - val emailAddresses: Source[String, Unit] = + val emailAddresses: Source[String, NotUsed] = authors .mapAsync(4)(author => addressSystem.lookupEmail(author.handle)) .collect { case Some(emailAddress) => emailAddress } //#email-addresses-mapAsync //#send-emails - val sendEmails: RunnableGraph[Unit] = + val sendEmails: RunnableGraph[NotUsed] = emailAddresses .mapAsync(4)(address => { emailServer.send( @@ -168,14 +170,14 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) { "lookup email with mapAsync and supervision" in { val addressSystem = new AddressSystem2 - val authors: Source[Author, Unit] = + val authors: Source[Author, NotUsed] = tweets.filter(_.hashtags.contains(akka)).map(_.author) //#email-addresses-mapAsync-supervision import ActorAttributes.supervisionStrategy import Supervision.resumingDecider - val emailAddresses: Source[String, Unit] = + val emailAddresses: Source[String, NotUsed] = authors.via( Flow[Author].mapAsync(4)(author => addressSystem.lookupEmail(author.handle)) .withAttributes(supervisionStrategy(resumingDecider))) @@ -188,15 +190,15 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) { val emailServer = new EmailServer(probe.ref) //#external-service-mapAsyncUnordered - val authors: Source[Author, Unit] = + val authors: Source[Author, NotUsed] = tweets.filter(_.hashtags.contains(akka)).map(_.author) - val emailAddresses: Source[String, Unit] = + val emailAddresses: Source[String, NotUsed] = authors .mapAsyncUnordered(4)(author => addressSystem.lookupEmail(author.handle)) .collect { case Some(emailAddress) => emailAddress } - val sendEmails: RunnableGraph[Unit] = + val sendEmails: RunnableGraph[NotUsed] = emailAddresses .mapAsyncUnordered(4)(address => { emailServer.send( @@ -231,7 +233,7 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) { //#blocking-mapAsync val blockingExecutionContext = system.dispatchers.lookup("blocking-dispatcher") - val sendTextMessages: RunnableGraph[Unit] = + val sendTextMessages: RunnableGraph[NotUsed] = phoneNumbers .mapAsync(4)(phoneNo => { Future { @@ -271,7 +273,7 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) { smsServer.send(TextMessage(to = phoneNo, body = "I like your tweet")) } .withAttributes(ActorAttributes.dispatcher("blocking-dispatcher")) - val sendTextMessages: RunnableGraph[Unit] = + val sendTextMessages: RunnableGraph[NotUsed] = phoneNumbers.via(send).to(Sink.ignore) sendTextMessages.run() @@ -291,10 +293,10 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) { val database = system.actorOf(Props(classOf[DatabaseService], probe.ref), "db") //#save-tweets - val akkaTweets: Source[Tweet, Unit] = tweets.filter(_.hashtags.contains(akka)) + val akkaTweets: Source[Tweet, NotUsed] = tweets.filter(_.hashtags.contains(akka)) implicit val timeout = Timeout(3.seconds) - val saveTweets: RunnableGraph[Unit] = + val saveTweets: RunnableGraph[NotUsed] = akkaTweets .mapAsync(4)(tweet => database ? Save(tweet)) .to(Sink.ignore) diff --git a/akka-docs/rst/scala/code/docs/stream/MigrationsScala.scala b/akka-docs/rst/scala/code/docs/stream/MigrationsScala.scala new file mode 100644 index 0000000000..9490976605 --- /dev/null +++ b/akka-docs/rst/scala/code/docs/stream/MigrationsScala.scala @@ -0,0 +1,30 @@ +/** + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package docs.stream + +import akka.stream.scaladsl._ +import akka.stream.testkit.AkkaSpec + +class MigrationsScala extends AkkaSpec { + + "Examples in migration guide" must { + "compile" in { + lazy val dontExecuteMe = { + //#expand-continually + Flow[Int].expand(Iterator.continually(_)) + //#expand-continually + //#expand-state + Flow[Int].expand(i => { + var state = 0 + Iterator.continually({ + state += 1 + (i, state) + }) + }) + //#expand-state + } + } + } + +} diff --git a/akka-docs-dev/rst/scala/code/docs/stream/RateTransformationDocSpec.scala b/akka-docs/rst/scala/code/docs/stream/RateTransformationDocSpec.scala similarity index 86% rename from akka-docs-dev/rst/scala/code/docs/stream/RateTransformationDocSpec.scala rename to akka-docs/rst/scala/code/docs/stream/RateTransformationDocSpec.scala index 6b428a262a..b7632cb4eb 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/RateTransformationDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/stream/RateTransformationDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.stream @@ -63,7 +63,7 @@ class RateTransformationDocSpec extends AkkaSpec { "expand should repeat last" in { //#expand-last val lastFlow = Flow[Double] - .expand(identity)(s => (s, s)) + .expand(Iterator.continually(_)) //#expand-last val (probe, fut) = TestSource.probe[Double] @@ -81,15 +81,11 @@ class RateTransformationDocSpec extends AkkaSpec { "expand should track drift" in { //#expand-drift val driftFlow = Flow[Double] - .expand((_, 0)) { - case (lastElement, drift) => ((lastElement, drift), (lastElement, drift + 1)) - } + .expand(i => Iterator.from(0).map(i -> _)) //#expand-drift val latch = TestLatch(2) val realDriftFlow = Flow[Double] - .expand(d => { latch.countDown(); (d, 0) }) { - case (lastElement, drift) => ((lastElement, drift), (lastElement, drift + 1)) - } + .expand(d => { latch.countDown(); Iterator.from(0).map(d -> _) }) val (pub, sub) = TestSource.probe[Double] .via(realDriftFlow) diff --git a/akka-docs-dev/rst/scala/code/docs/stream/ReactiveStreamsDocSpec.scala b/akka-docs/rst/scala/code/docs/stream/ReactiveStreamsDocSpec.scala similarity index 93% rename from akka-docs-dev/rst/scala/code/docs/stream/ReactiveStreamsDocSpec.scala rename to akka-docs/rst/scala/code/docs/stream/ReactiveStreamsDocSpec.scala index 763d0e112b..7cb7bdfdf1 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/ReactiveStreamsDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/stream/ReactiveStreamsDocSpec.scala @@ -1,10 +1,11 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package docs.stream +import akka.NotUsed import akka.stream.ActorMaterializer -import akka.stream.scaladsl.{ RunnableGraph, Flow, Sink, Source } +import akka.stream.scaladsl.{ Flow, Sink, Source } import akka.stream.testkit._ import org.reactivestreams.Processor @@ -41,7 +42,7 @@ class ReactiveStreamsDocSpec extends AkkaSpec { val impl = new Fixture { override def tweets: Publisher[Tweet] = - TwitterStreamQuickstartDocSpec.tweets.runWith(Sink.asPublisher(false)) + TwitterStreamQuickstartDocSpec.tweets.runWith(Sink.asPublisher(fanout = false)) override def storage = TestSubscriber.manualProbe[Author] @@ -139,7 +140,7 @@ class ReactiveStreamsDocSpec extends AkkaSpec { // An example Processor factory def createProcessor: Processor[Int, Int] = Flow[Int].toProcessor.run() - val flow: Flow[Int, Int, Unit] = Flow.fromProcessor(() => createProcessor) + val flow: Flow[Int, Int, NotUsed] = Flow.fromProcessor(() => createProcessor) //#use-processor } diff --git a/akka-docs-dev/rst/scala/code/docs/stream/StreamBuffersRateSpec.scala b/akka-docs/rst/scala/code/docs/stream/StreamBuffersRateSpec.scala similarity index 94% rename from akka-docs-dev/rst/scala/code/docs/stream/StreamBuffersRateSpec.scala rename to akka-docs/rst/scala/code/docs/stream/StreamBuffersRateSpec.scala index 0098dc1de9..8fc348531a 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/StreamBuffersRateSpec.scala +++ b/akka-docs/rst/scala/code/docs/stream/StreamBuffersRateSpec.scala @@ -1,5 +1,6 @@ package docs.stream +import akka.NotUsed import akka.stream._ import akka.stream.scaladsl._ import akka.stream.testkit.AkkaSpec @@ -57,10 +58,10 @@ class StreamBuffersRateSpec extends AkkaSpec { "explcit buffers" in { trait Job - def inboundJobsConnector(): Source[Job, Unit] = Source.empty + def inboundJobsConnector(): Source[Job, NotUsed] = Source.empty //#explicit-buffers-backpressure // Getting a stream of jobs from an imaginary external system as a Source - val jobs: Source[Job, Unit] = inboundJobsConnector() + val jobs: Source[Job, NotUsed] = inboundJobsConnector() jobs.buffer(1000, OverflowStrategy.backpressure) //#explicit-buffers-backpressure diff --git a/akka-docs-dev/rst/scala/code/docs/stream/StreamPartialFlowGraphDocSpec.scala b/akka-docs/rst/scala/code/docs/stream/StreamPartialFlowGraphDocSpec.scala similarity index 98% rename from akka-docs-dev/rst/scala/code/docs/stream/StreamPartialFlowGraphDocSpec.scala rename to akka-docs/rst/scala/code/docs/stream/StreamPartialFlowGraphDocSpec.scala index 4bc2dc4d0f..98a49dc501 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/StreamPartialFlowGraphDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/stream/StreamPartialFlowGraphDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package docs.stream diff --git a/akka-docs-dev/rst/scala/code/docs/stream/StreamTestKitDocSpec.scala b/akka-docs/rst/scala/code/docs/stream/StreamTestKitDocSpec.scala similarity index 98% rename from akka-docs-dev/rst/scala/code/docs/stream/StreamTestKitDocSpec.scala rename to akka-docs/rst/scala/code/docs/stream/StreamTestKitDocSpec.scala index 824d6196c1..9cbeb5e635 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/StreamTestKitDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/stream/StreamTestKitDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.stream diff --git a/akka-docs-dev/rst/scala/code/docs/stream/TwitterStreamQuickstartDocSpec.scala b/akka-docs/rst/scala/code/docs/stream/TwitterStreamQuickstartDocSpec.scala similarity index 91% rename from akka-docs-dev/rst/scala/code/docs/stream/TwitterStreamQuickstartDocSpec.scala rename to akka-docs/rst/scala/code/docs/stream/TwitterStreamQuickstartDocSpec.scala index 3f54d72abb..af96e467a0 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/TwitterStreamQuickstartDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/stream/TwitterStreamQuickstartDocSpec.scala @@ -1,10 +1,11 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package docs.stream //#imports +import akka.{ Done, NotUsed } import akka.actor.ActorSystem import akka.stream.{ ClosedShape, ActorMaterializer, OverflowStrategy } import akka.stream.scaladsl._ @@ -73,7 +74,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec { //#first-sample //#authors-filter-map - val authors: Source[Author, Unit] = + val authors: Source[Author, NotUsed] = tweets .filter(_.hashtags.contains(akka)) .map(_.author) @@ -82,7 +83,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec { trait Example3 { //#authors-collect - val authors: Source[Author, Unit] = + val authors: Source[Author, NotUsed] = tweets.collect { case t if t.hashtags.contains(akka) => t.author } //#authors-collect } @@ -101,7 +102,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec { "mapConcat hashtags" in { //#hashtags-mapConcat - val hashtags: Source[Hashtag, Unit] = tweets.mapConcat(_.hashtags.toList) + val hashtags: Source[Hashtag, NotUsed] = tweets.mapConcat(_.hashtags.toList) //#hashtags-mapConcat } @@ -113,8 +114,8 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec { } "simple broadcast" in { - val writeAuthors: Sink[Author, Future[Unit]] = Sink.ignore - val writeHashtags: Sink[Hashtag, Future[Unit]] = Sink.ignore + val writeAuthors: Sink[Author, Future[Done]] = Sink.ignore + val writeHashtags: Sink[Hashtag, Future[Done]] = Sink.ignore // format: OFF //#flow-graph-broadcast @@ -151,7 +152,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec { import scala.concurrent.duration._ //#backpressure-by-readline - val completion: Future[Unit] = + val completion: Future[Done] = Source(1 to 10) .map(i => { println(s"map => $i"); i }) .runForeach { i => readLine(s"Element = $i; continue reading? [press enter]\n") } @@ -163,7 +164,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec { "count elements on finite stream" in { //#tweets-fold-count - val count: Flow[Tweet, Int, Unit] = Flow[Tweet].map(_ => 1) + val count: Flow[Tweet, Int, NotUsed] = Flow[Tweet].map(_ => 1) val sumSink: Sink[Int, Future[Int]] = Sink.fold[Int, Int](0)(_ + _) diff --git a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeByteStrings.scala b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeByteStrings.scala similarity index 97% rename from akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeByteStrings.scala rename to akka-docs/rst/scala/code/docs/stream/cookbook/RecipeByteStrings.scala index db7a994681..efee481c4c 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeByteStrings.scala +++ b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeByteStrings.scala @@ -1,5 +1,6 @@ package docs.stream.cookbook +import akka.NotUsed import akka.stream.scaladsl.{ Flow, Sink, Source } import akka.util.ByteString @@ -89,7 +90,7 @@ class RecipeByteStrings extends RecipeSpec { val data = Source(List(ByteString(1, 2), ByteString(3), ByteString(4, 5, 6), ByteString(7, 8, 9))) //#compacting-bytestrings - val compacted: Source[ByteString, Unit] = data.map(_.compact) + val compacted: Source[ByteString, NotUsed] = data.map(_.compact) //#compacting-bytestrings Await.result(compacted.grouped(10).runWith(Sink.head), 3.seconds).forall(_.isCompact) should be(true) diff --git a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeCollectingMetrics.scala b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeCollectingMetrics.scala similarity index 96% rename from akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeCollectingMetrics.scala rename to akka-docs/rst/scala/code/docs/stream/cookbook/RecipeCollectingMetrics.scala index 3203890b08..dcad4e53fe 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeCollectingMetrics.scala +++ b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeCollectingMetrics.scala @@ -75,7 +75,7 @@ class RecipeCollectingMetrics extends RecipeSpec { // To finish the recipe, we simply use :class:`ZipWith` to trigger reading the latest value from the ``currentLoad`` // stream whenever a new ``Tick`` arrives on the stream of ticks, ``reportTicks``. // - // .. includecode:: code/docs/stream/cookbook/RecipeCollectingMetrics.scala#periodic-metrics-collection + // .. includecode:: ../code/docs/stream/cookbook/RecipeCollectingMetrics.scala#periodic-metrics-collection // // .. warning:: // In order for this recipe to work the buffer size for the :class:`ZipWith` must be set to 1. The reason for this is diff --git a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeDigest.scala b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeDigest.scala similarity index 93% rename from akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeDigest.scala rename to akka-docs/rst/scala/code/docs/stream/cookbook/RecipeDigest.scala index 729dce0e13..2ad50e1c27 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeDigest.scala +++ b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeDigest.scala @@ -2,6 +2,7 @@ package docs.stream.cookbook import java.security.MessageDigest +import akka.NotUsed import akka.stream.scaladsl.{ Sink, Source } import akka.util.ByteString @@ -41,7 +42,7 @@ class RecipeDigest extends RecipeSpec { } } - val digest: Source[ByteString, Unit] = data.transform(() => digestCalculator("SHA-256")) + val digest: Source[ByteString, NotUsed] = data.transform(() => digestCalculator("SHA-256")) //#calculating-digest Await.result(digest.runWith(Sink.head), 3.seconds) should be( diff --git a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeDroppyBroadcast.scala b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeDroppyBroadcast.scala similarity index 100% rename from akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeDroppyBroadcast.scala rename to akka-docs/rst/scala/code/docs/stream/cookbook/RecipeDroppyBroadcast.scala diff --git a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeFlattenSeq.scala b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeFlattenSeq.scala similarity index 78% rename from akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeFlattenSeq.scala rename to akka-docs/rst/scala/code/docs/stream/cookbook/RecipeFlattenSeq.scala index f46c7780ea..5ffc2e467e 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeFlattenSeq.scala +++ b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeFlattenSeq.scala @@ -1,5 +1,6 @@ package docs.stream.cookbook +import akka.NotUsed import akka.stream.scaladsl.{ Sink, Source } import scala.collection.immutable @@ -15,8 +16,8 @@ class RecipeFlattenSeq extends RecipeSpec { val someDataSource = Source(List(List("1"), List("2"), List("3", "4", "5"), List("6", "7"))) //#flattening-seqs - val myData: Source[List[Message], Unit] = someDataSource - val flattened: Source[Message, Unit] = myData.mapConcat(identity) + val myData: Source[List[Message], NotUsed] = someDataSource + val flattened: Source[Message, NotUsed] = myData.mapConcat(identity) //#flattening-seqs Await.result(flattened.grouped(8).runWith(Sink.head), 3.seconds) should be(List("1", "2", "3", "4", "5", "6", "7")) diff --git a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeGlobalRateLimit.scala b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeGlobalRateLimit.scala similarity index 98% rename from akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeGlobalRateLimit.scala rename to akka-docs/rst/scala/code/docs/stream/cookbook/RecipeGlobalRateLimit.scala index dc28c2a75d..c251b86693 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeGlobalRateLimit.scala +++ b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeGlobalRateLimit.scala @@ -1,5 +1,6 @@ package docs.stream.cookbook +import akka.NotUsed import akka.actor.{ Props, ActorRef, Actor } import akka.actor.Actor.Receive import akka.stream.ClosedShape @@ -78,7 +79,7 @@ class RecipeGlobalRateLimit extends RecipeSpec { "work" in { //#global-limiter-flow - def limitGlobal[T](limiter: ActorRef, maxAllowedWait: FiniteDuration): Flow[T, T, Unit] = { + def limitGlobal[T](limiter: ActorRef, maxAllowedWait: FiniteDuration): Flow[T, T, NotUsed] = { import akka.pattern.ask import akka.util.Timeout Flow[T].mapAsync(4)((element: T) => { diff --git a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeHold.scala b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeHold.scala similarity index 100% rename from akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeHold.scala rename to akka-docs/rst/scala/code/docs/stream/cookbook/RecipeHold.scala diff --git a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeKeepAlive.scala b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeKeepAlive.scala similarity index 86% rename from akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeKeepAlive.scala rename to akka-docs/rst/scala/code/docs/stream/cookbook/RecipeKeepAlive.scala index 9a205b977a..5375c14e46 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeKeepAlive.scala +++ b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeKeepAlive.scala @@ -1,5 +1,6 @@ package docs.stream.cookbook +import akka.NotUsed import akka.stream.ClosedShape import akka.stream.scaladsl._ import akka.stream.testkit._ @@ -14,7 +15,7 @@ class RecipeKeepAlive extends RecipeSpec { //#inject-keepalive import scala.concurrent.duration._ - val injectKeepAlive: Flow[ByteString, ByteString, Unit] = + val injectKeepAlive: Flow[ByteString, ByteString, NotUsed] = Flow[ByteString].keepAlive(1.second, () => keepaliveMessage) //#inject-keepalive diff --git a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeLoggingElements.scala b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeLoggingElements.scala similarity index 100% rename from akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeLoggingElements.scala rename to akka-docs/rst/scala/code/docs/stream/cookbook/RecipeLoggingElements.scala diff --git a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeManualTrigger.scala b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeManualTrigger.scala similarity index 100% rename from akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeManualTrigger.scala rename to akka-docs/rst/scala/code/docs/stream/cookbook/RecipeManualTrigger.scala diff --git a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeMissedTicks.scala b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeMissedTicks.scala similarity index 91% rename from akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeMissedTicks.scala rename to akka-docs/rst/scala/code/docs/stream/cookbook/RecipeMissedTicks.scala index 1ea6f7bada..98bdd2ffeb 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeMissedTicks.scala +++ b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeMissedTicks.scala @@ -1,5 +1,6 @@ package docs.stream.cookbook +import akka.NotUsed import akka.stream.scaladsl._ import akka.stream.testkit._ import scala.concurrent.duration._ @@ -19,12 +20,12 @@ class RecipeMissedTicks extends RecipeSpec { val sink = Sink.fromSubscriber(sub) //#missed-ticks - val missedTicks: Flow[Tick, Int, Unit] = + val missedTicks: Flow[Tick, Int, NotUsed] = Flow[Tick].conflate(seed = (_) => 0)( (missedTicks, tick) => missedTicks + 1) //#missed-ticks val latch = TestLatch(3) - val realMissedTicks: Flow[Tick, Int, Unit] = + val realMissedTicks: Flow[Tick, Int, NotUsed] = Flow[Tick].conflate(seed = (_) => 0)( (missedTicks, tick) => { latch.countDown(); missedTicks + 1 }) diff --git a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeMultiGroupBy.scala b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeMultiGroupBy.scala similarity index 92% rename from akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeMultiGroupBy.scala rename to akka-docs/rst/scala/code/docs/stream/cookbook/RecipeMultiGroupBy.scala index e48438aca5..bd9163684f 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeMultiGroupBy.scala +++ b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeMultiGroupBy.scala @@ -1,5 +1,6 @@ package docs.stream.cookbook +import akka.NotUsed import akka.stream.scaladsl.{ Sink, Source } import scala.collection.immutable @@ -23,7 +24,7 @@ class RecipeMultiGroupBy extends RecipeSpec { //#multi-groupby val topicMapper: (Message) => immutable.Seq[Topic] = extractTopics - val messageAndTopic: Source[(Message, Topic), Unit] = elems.mapConcat { msg: Message => + val messageAndTopic: Source[(Message, Topic), NotUsed] = elems.mapConcat { msg: Message => val topicsForMessage = topicMapper(msg) // Create a (Msg, Topic) pair for each of the topics // the message belongs to diff --git a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeParseLines.scala b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeParseLines.scala similarity index 100% rename from akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeParseLines.scala rename to akka-docs/rst/scala/code/docs/stream/cookbook/RecipeParseLines.scala diff --git a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeReduceByKey.scala b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeReduceByKey.scala similarity index 67% rename from akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeReduceByKey.scala rename to akka-docs/rst/scala/code/docs/stream/cookbook/RecipeReduceByKey.scala index 043b07b1f8..494303fb32 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeReduceByKey.scala +++ b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeReduceByKey.scala @@ -1,5 +1,6 @@ package docs.stream.cookbook +import akka.NotUsed import akka.stream.{ Graph, FlowShape, Inlet, Outlet, Attributes, OverflowStrategy } import akka.stream.scaladsl._ import scala.concurrent.{ Await, Future } @@ -17,13 +18,13 @@ class RecipeReduceByKey extends RecipeSpec { def words = Source(List("hello", "world", "and", "hello", "universe", "akka") ++ List.fill(1000)("rocks!")) //#word-count - val counts: Source[(String, Int), Unit] = words + val counts: Source[(String, Int), NotUsed] = words // split the words into separate streams first .groupBy(MaximumDistinctWords, identity) + //transform each element to pair with number of words in it + .map(_ -> 1) // add counting logic to the streams - .fold(("", 0)) { - case ((_, count), word) => (word, count + 1) - } + .reduce((l, r) => (l._1, l._2 + r._2)) // get a stream of word counts .mergeSubstreams //#word-count @@ -45,26 +46,19 @@ class RecipeReduceByKey extends RecipeSpec { def reduceByKey[In, K, Out]( maximumGroupSize: Int, groupKey: (In) => K, - foldZero: (K) => Out)(fold: (Out, In) => Out): Flow[In, (K, Out), Unit] = { + map: (In) => Out)(reduce: (Out, Out) => Out): Flow[In, (K, Out), NotUsed] = { Flow[In] - .groupBy(maximumGroupSize, groupKey) - .fold(Option.empty[(K, Out)]) { - case (None, elem) => - val key = groupKey(elem) - Some((key, fold(foldZero(key), elem))) - case (Some((key, out)), elem) => - Some((key, fold(out, elem))) - } - .map(_.get) + .groupBy[K](maximumGroupSize, groupKey) + .map(e => groupKey(e) -> map(e)) + .reduce((l, r) => l._1 -> reduce(l._2, r._2)) .mergeSubstreams } - val wordCounts = words.via(reduceByKey( - MaximumDistinctWords, - groupKey = (word: String) => word, - foldZero = (key: String) => 0)(fold = (count: Int, elem: String) => count + 1)) - + val wordCounts = words.via( + reduceByKey(MaximumDistinctWords, + groupKey = (word: String) => word, + map = (word: String) => 1)((left: Int, right: Int) => left + right)) //#reduce-by-key-general Await.result(wordCounts.grouped(10).runWith(Sink.head), 3.seconds).toSet should be(Set( diff --git a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeSimpleDrop.scala b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeSimpleDrop.scala similarity index 94% rename from akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeSimpleDrop.scala rename to akka-docs/rst/scala/code/docs/stream/cookbook/RecipeSimpleDrop.scala index 135d61c0a9..609803eab5 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeSimpleDrop.scala +++ b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeSimpleDrop.scala @@ -1,5 +1,6 @@ package docs.stream.cookbook +import akka.NotUsed import akka.stream.scaladsl.{ Flow, Sink, Source } import akka.stream.testkit._ import scala.concurrent.duration._ @@ -13,7 +14,7 @@ class RecipeSimpleDrop extends RecipeSpec { "work" in { //#simple-drop - val droppyStream: Flow[Message, Message, Unit] = + val droppyStream: Flow[Message, Message, NotUsed] = Flow[Message].conflate(seed = identity)((lastMessage, newMessage) => newMessage) //#simple-drop val latch = TestLatch(2) diff --git a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeSpec.scala b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeSpec.scala similarity index 100% rename from akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeSpec.scala rename to akka-docs/rst/scala/code/docs/stream/cookbook/RecipeSpec.scala diff --git a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeToStrict.scala b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeToStrict.scala similarity index 100% rename from akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeToStrict.scala rename to akka-docs/rst/scala/code/docs/stream/cookbook/RecipeToStrict.scala diff --git a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeWorkerPool.scala b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeWorkerPool.scala similarity index 89% rename from akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeWorkerPool.scala rename to akka-docs/rst/scala/code/docs/stream/cookbook/RecipeWorkerPool.scala index 9a17a79007..ee3d08fafc 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/cookbook/RecipeWorkerPool.scala +++ b/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeWorkerPool.scala @@ -1,5 +1,6 @@ package docs.stream.cookbook +import akka.NotUsed import akka.stream.FlowShape import akka.stream.scaladsl._ import akka.testkit.TestProbe @@ -18,7 +19,7 @@ class RecipeWorkerPool extends RecipeSpec { val worker = Flow[String].map(_ + " done") //#worker-pool - def balancer[In, Out](worker: Flow[In, Out, Any], workerCount: Int): Flow[In, Out, Unit] = { + def balancer[In, Out](worker: Flow[In, Out, Any], workerCount: Int): Flow[In, Out, NotUsed] = { import GraphDSL.Implicits._ Flow.fromGraph(GraphDSL.create() { implicit b => @@ -35,7 +36,7 @@ class RecipeWorkerPool extends RecipeSpec { }) } - val processedJobs: Source[Result, Unit] = myJobs.via(balancer(worker, 3)) + val processedJobs: Source[Result, NotUsed] = myJobs.via(balancer(worker, 3)) //#worker-pool Await.result(processedJobs.grouped(10).runWith(Sink.head), 3.seconds).toSet should be(Set( diff --git a/akka-docs-dev/rst/scala/code/docs/stream/io/StreamFileDocSpec.scala b/akka-docs/rst/scala/code/docs/stream/io/StreamFileDocSpec.scala similarity index 88% rename from akka-docs-dev/rst/scala/code/docs/stream/io/StreamFileDocSpec.scala rename to akka-docs/rst/scala/code/docs/stream/io/StreamFileDocSpec.scala index 5cb634444f..a672c6059e 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/io/StreamFileDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/stream/io/StreamFileDocSpec.scala @@ -1,11 +1,12 @@ /* - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package docs.stream.io import java.io.File import akka.stream._ +import akka.stream.io.IOResult import akka.stream.scaladsl.{ FileIO, Sink, Source } import akka.stream.testkit.Utils._ import akka.stream.testkit._ @@ -45,7 +46,7 @@ class StreamFileDocSpec extends AkkaSpec(UnboundedMailboxConfig) { //#file-source - val foreach: Future[Long] = FileIO.fromFile(file) + val foreach: Future[IOResult] = FileIO.fromFile(file) .to(Sink.ignore) .run() //#file-source diff --git a/akka-docs-dev/rst/scala/code/docs/stream/io/StreamTcpDocSpec.scala b/akka-docs/rst/scala/code/docs/stream/io/StreamTcpDocSpec.scala similarity index 98% rename from akka-docs-dev/rst/scala/code/docs/stream/io/StreamTcpDocSpec.scala rename to akka-docs/rst/scala/code/docs/stream/io/StreamTcpDocSpec.scala index 4502237c7e..aad39f6f16 100644 --- a/akka-docs-dev/rst/scala/code/docs/stream/io/StreamTcpDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/stream/io/StreamTcpDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package docs.stream.io diff --git a/akka-docs/rst/scala/code/docs/testkit/PlainWordSpec.scala b/akka-docs/rst/scala/code/docs/testkit/PlainWordSpec.scala index 81143a909d..5ff4019985 100644 --- a/akka-docs/rst/scala/code/docs/testkit/PlainWordSpec.scala +++ b/akka-docs/rst/scala/code/docs/testkit/PlainWordSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.testkit diff --git a/akka-docs/rst/scala/code/docs/testkit/TestKitUsageSpec.scala b/akka-docs/rst/scala/code/docs/testkit/TestKitUsageSpec.scala index ccfdeac4f5..81e0bce572 100644 --- a/akka-docs/rst/scala/code/docs/testkit/TestKitUsageSpec.scala +++ b/akka-docs/rst/scala/code/docs/testkit/TestKitUsageSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.testkit diff --git a/akka-docs/rst/scala/code/docs/testkit/TestkitDocSpec.scala b/akka-docs/rst/scala/code/docs/testkit/TestkitDocSpec.scala index 8f4f06dc72..f3a6cac5ee 100644 --- a/akka-docs/rst/scala/code/docs/testkit/TestkitDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/testkit/TestkitDocSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.testkit diff --git a/akka-docs-dev/rst/scala/code/docs/utils/TestUtils.scala b/akka-docs/rst/scala/code/docs/utils/TestUtils.scala similarity index 91% rename from akka-docs-dev/rst/scala/code/docs/utils/TestUtils.scala rename to akka-docs/rst/scala/code/docs/utils/TestUtils.scala index 9e88725e81..c1ed1e1006 100644 --- a/akka-docs-dev/rst/scala/code/docs/utils/TestUtils.scala +++ b/akka-docs/rst/scala/code/docs/utils/TestUtils.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.utils diff --git a/akka-docs-dev/rst/scala/http/client-side/connection-level.rst b/akka-docs/rst/scala/http/client-side/connection-level.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/client-side/connection-level.rst rename to akka-docs/rst/scala/http/client-side/connection-level.rst diff --git a/akka-docs-dev/rst/scala/http/client-side/host-level.rst b/akka-docs/rst/scala/http/client-side/host-level.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/client-side/host-level.rst rename to akka-docs/rst/scala/http/client-side/host-level.rst diff --git a/akka-docs-dev/rst/scala/http/client-side/https-support.rst b/akka-docs/rst/scala/http/client-side/https-support.rst similarity index 93% rename from akka-docs-dev/rst/scala/http/client-side/https-support.rst rename to akka-docs/rst/scala/http/client-side/https-support.rst index be5620c65b..4c480db335 100644 --- a/akka-docs-dev/rst/scala/http/client-side/https-support.rst +++ b/akka-docs/rst/scala/http/client-side/https-support.rst @@ -10,10 +10,11 @@ Akka HTTP supports TLS encryption on the client-side as well as on the :ref:`ser Akka HTTP 1.0 does not completely validate certificates when using HTTPS. Please do not treat HTTPS connections made with this version as secure. Requests are vulnerable to a Man-In-The-Middle attack via certificate substitution. -The central vehicle for configuring encryption is the ``HttpsContext``, which is defined as such: +The central vehicle for configuring encryption is the ``HttpsConnectionContext``, which can be created using +the static method ``ConnectionContext.https`` which is defined like this: -.. includecode2:: /../../akka-http-core/src/main/scala/akka/http/scaladsl/Http.scala - :snippet: https-context-impl +.. includecode:: /../../akka-http-core/src/main/scala/akka/http/scaladsl/ConnectionContext.scala + :include: https-context-creation In addition to the ``outgoingConnection``, ``newHostConnectionPool`` and ``cachedHostConnectionPool`` methods the `akka.http.scaladsl.Http`_ extension also defines ``outgoingConnectionTls``, ``newHostConnectionPoolTls`` and diff --git a/akka-docs-dev/rst/scala/http/client-side/index.rst b/akka-docs/rst/scala/http/client-side/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/client-side/index.rst rename to akka-docs/rst/scala/http/client-side/index.rst diff --git a/akka-docs-dev/rst/scala/http/client-side/request-level.rst b/akka-docs/rst/scala/http/client-side/request-level.rst similarity index 96% rename from akka-docs-dev/rst/scala/http/client-side/request-level.rst rename to akka-docs/rst/scala/http/client-side/request-level.rst index c322058ff7..5061b124bc 100644 --- a/akka-docs-dev/rst/scala/http/client-side/request-level.rst +++ b/akka-docs/rst/scala/http/client-side/request-level.rst @@ -54,9 +54,6 @@ to the Actor as a message. .. includecode:: ../../code/docs/http/scaladsl/HttpClientExampleSpec.scala :include: single-request-in-actor-example -An ``ActorMaterializer`` instance needed for Http to perfom its duties can be obtained using the ``ImplicitMaterializer`` -helper trait. - Example ------- diff --git a/akka-docs-dev/rst/scala/http/client-side/websocket-support.rst b/akka-docs/rst/scala/http/client-side/websocket-support.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/client-side/websocket-support.rst rename to akka-docs/rst/scala/http/client-side/websocket-support.rst diff --git a/akka-docs-dev/rst/scala/http/common/de-coding.rst b/akka-docs/rst/scala/http/common/de-coding.rst similarity index 85% rename from akka-docs-dev/rst/scala/http/common/de-coding.rst rename to akka-docs/rst/scala/http/common/de-coding.rst index 89e2c9ae47..022c19c907 100644 --- a/akka-docs-dev/rst/scala/http/common/de-coding.rst +++ b/akka-docs/rst/scala/http/common/de-coding.rst @@ -12,5 +12,5 @@ The support is not enabled automatically, but must be explicitly requested. For enabling message encoding/decoding with :ref:`Routing DSL ` see the :ref:`CodingDirectives`. .. _HTTP spec: http://tools.ietf.org/html/rfc7231#section-3.1.2.1 -.. _akka.http.scaladsl.coding: https://github.com/akka/akka/tree/release-2.3-dev/akka-http/src/main/scala/akka/http/scaladsl/coding +.. _akka.http.scaladsl.coding: @github@/akka-http/src/main/scala/akka/http/scaladsl/coding diff --git a/akka-docs-dev/rst/scala/http/common/http-model.rst b/akka-docs/rst/scala/http/common/http-model.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/common/http-model.rst rename to akka-docs/rst/scala/http/common/http-model.rst diff --git a/akka-docs-dev/rst/scala/http/common/index.rst b/akka-docs/rst/scala/http/common/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/common/index.rst rename to akka-docs/rst/scala/http/common/index.rst diff --git a/akka-docs-dev/rst/scala/http/common/json-support.rst b/akka-docs/rst/scala/http/common/json-support.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/common/json-support.rst rename to akka-docs/rst/scala/http/common/json-support.rst diff --git a/akka-docs-dev/rst/scala/http/common/marshalling.rst b/akka-docs/rst/scala/http/common/marshalling.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/common/marshalling.rst rename to akka-docs/rst/scala/http/common/marshalling.rst diff --git a/akka-docs-dev/rst/scala/http/common/unmarshalling.rst b/akka-docs/rst/scala/http/common/unmarshalling.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/common/unmarshalling.rst rename to akka-docs/rst/scala/http/common/unmarshalling.rst diff --git a/akka-docs-dev/rst/scala/http/common/xml-support.rst b/akka-docs/rst/scala/http/common/xml-support.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/common/xml-support.rst rename to akka-docs/rst/scala/http/common/xml-support.rst diff --git a/akka-docs-dev/rst/scala/http/configuration.rst b/akka-docs/rst/scala/http/configuration.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/configuration.rst rename to akka-docs/rst/scala/http/configuration.rst diff --git a/akka-docs-dev/rst/scala/http/index.rst b/akka-docs/rst/scala/http/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/index.rst rename to akka-docs/rst/scala/http/index.rst diff --git a/akka-docs-dev/rst/scala/http/introduction.rst b/akka-docs/rst/scala/http/introduction.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/introduction.rst rename to akka-docs/rst/scala/http/introduction.rst diff --git a/akka-docs-dev/rst/scala/http/low-level-server-side-api.rst b/akka-docs/rst/scala/http/low-level-server-side-api.rst similarity index 97% rename from akka-docs-dev/rst/scala/http/low-level-server-side-api.rst rename to akka-docs/rst/scala/http/low-level-server-side-api.rst index 8c132a6350..b4c2156b44 100644 --- a/akka-docs-dev/rst/scala/http/low-level-server-side-api.rst +++ b/akka-docs/rst/scala/http/low-level-server-side-api.rst @@ -12,7 +12,7 @@ It sports the following features: - Full support for `HTTP pipelining`_ - Full support for asynchronous HTTP streaming including "chunked" transfer encoding accessible through an idiomatic API - Optional SSL/TLS encryption -- Websocket support +- WebSocket support .. _HTTP persistent connections: http://en.wikipedia.org/wiki/HTTP_persistent_connection .. _HTTP pipelining: http://en.wikipedia.org/wiki/HTTP_pipelining @@ -140,10 +140,11 @@ Server-Side HTTPS Support Akka HTTP supports TLS encryption on the server-side as well as on the :ref:`client-side `. -The central vehicle for configuring encryption is the ``HttpsContext``, which is defined as such: +The central vehicle for configuring encryption is the ``HttpsConnectionContext``, which can be created using +the static method ``ConnectionContext.https`` which is defined like this: -.. includecode2:: /../../akka-http-core/src/main/scala/akka/http/scaladsl/Http.scala - :snippet: https-context-impl +.. includecode:: /../../akka-http-core/src/main/scala/akka/http/scaladsl/ConnectionContext.scala + :include: https-context-creation On the server-side the ``bind``, and ``bindAndHandleXXX`` methods of the `akka.http.scaladsl.Http`_ extension define an optional ``httpsContext`` parameter, which can receive the HTTPS configuration in the form of an ``HttpsContext`` diff --git a/akka-docs-dev/rst/scala/http/migration-from-spray.rst b/akka-docs/rst/scala/http/migration-from-spray.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/migration-from-spray.rst rename to akka-docs/rst/scala/http/migration-from-spray.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/case-class-extraction.rst b/akka-docs/rst/scala/http/routing-dsl/case-class-extraction.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/case-class-extraction.rst rename to akka-docs/rst/scala/http/routing-dsl/case-class-extraction.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/alphabetically.rst b/akka-docs/rst/scala/http/routing-dsl/directives/alphabetically.rst similarity index 96% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/alphabetically.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/alphabetically.rst index 86a93aa8c4..07654b87b6 100644 --- a/akka-docs-dev/rst/scala/http/routing-dsl/directives/alphabetically.rst +++ b/akka-docs/rst/scala/http/routing-dsl/directives/alphabetically.rst @@ -64,7 +64,13 @@ Directive Description closest :ref:`-handleExceptions-` directive and its ``ExceptionHandler`` :ref:`-fileUpload-` Provides a stream of an uploaded file from a multipart request :ref:`-formField-` Extracts an HTTP form field from the request +:ref:`-formFieldMap-` Extracts a number of HTTP form field from the request as + a ``Map[String, String]`` +:ref:`-formFieldMultiMap-` Extracts a number of HTTP form field from the request as + a ``Map[String, List[String]`` :ref:`-formFields-` Extracts a number of HTTP form field from the request +:ref:`-formFieldSeq-` Extracts a number of HTTP form field from the request as + a ``Seq[(String, String)]`` :ref:`-get-` Rejects all non-GET requests :ref:`-getFromBrowseableDirectories-` Serves the content of the given directories as a file-system browser, i.e. files are sent and directories served as browseable listings @@ -80,11 +86,11 @@ Directive Description given ``ExceptionHandler`` :ref:`-handleRejections-` Transforms rejections produced by the inner route using the given ``RejectionHandler`` -:ref:`-handleWebsocketMessages-` Handles websocket requests with the given handler and rejects other requests - with an ``ExpectedWebsocketRequestRejection`` -:ref:`-handleWebsocketMessagesForProtocol-` Handles websocket requests with the given handler if the subprotocol matches - and rejects other requests with an ``ExpectedWebsocketRequestRejection`` or - an ``UnsupportedWebsocketSubprotocolRejection``. +:ref:`-handleWebSocketMessages-` Handles websocket requests with the given handler and rejects other requests + with an ``ExpectedWebSocketRequestRejection`` +:ref:`-handleWebSocketMessagesForProtocol-` Handles websocket requests with the given handler if the subprotocol matches + and rejects other requests with an ``ExpectedWebSocketRequestRejection`` or + an ``UnsupportedWebSocketSubprotocolRejection``. :ref:`-handleWith-` Completes the request using a given function :ref:`-head-` Rejects all non-HEAD requests :ref:`-headerValue-` Extracts an HTTP header value using a given ``HttpHeader ⇒ Option[T]`` diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/cancelRejection.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/cancelRejection.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/cancelRejection.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/cancelRejection.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/cancelRejections.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/cancelRejections.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/cancelRejections.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/cancelRejections.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/extract.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extract.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/extract.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extract.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/extractExecutionContext.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractExecutionContext.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/extractExecutionContext.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractExecutionContext.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/extractLog.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractLog.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/extractLog.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractLog.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/extractMaterializer.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractMaterializer.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/extractMaterializer.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractMaterializer.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/extractRequest.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractRequest.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/extractRequest.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractRequest.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/extractRequestContext.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractRequestContext.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/extractRequestContext.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractRequestContext.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/extractSettings.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractSettings.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/extractSettings.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractSettings.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/extractUnmatchedPath.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractUnmatchedPath.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/extractUnmatchedPath.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractUnmatchedPath.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/extractUri.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractUri.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/extractUri.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/extractUri.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapInnerRoute.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapInnerRoute.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapInnerRoute.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapInnerRoute.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapRejections.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapRejections.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapRejections.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapRejections.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapRequest.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapRequest.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapRequest.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapRequest.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapRequestContext.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapRequestContext.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapRequestContext.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapRequestContext.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapResponse.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapResponse.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapResponse.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapResponse.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapResponseEntity.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapResponseEntity.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapResponseEntity.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapResponseEntity.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapResponseHeaders.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapResponseHeaders.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapResponseHeaders.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapResponseHeaders.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapRouteResult.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapRouteResult.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapRouteResult.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapRouteResult.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapRouteResultFuture.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapRouteResultFuture.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapRouteResultFuture.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapRouteResultFuture.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapRouteResultPF.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapRouteResultPF.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapRouteResultPF.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapRouteResultPF.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapRouteResultWith.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapRouteResultWith.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapRouteResultWith.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapRouteResultWith.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapRouteResultWithPF.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapRouteResultWithPF.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapRouteResultWithPF.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapRouteResultWithPF.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapSettings.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapSettings.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapSettings.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapSettings.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapUnmatchedPath.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapUnmatchedPath.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/mapUnmatchedPath.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/mapUnmatchedPath.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/pass.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/pass.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/pass.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/pass.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/provide.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/provide.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/provide.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/provide.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/recoverRejections.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/recoverRejections.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/recoverRejections.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/recoverRejections.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/recoverRejectionsWith.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/recoverRejectionsWith.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/recoverRejectionsWith.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/recoverRejectionsWith.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/textract.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/textract.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/textract.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/textract.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/tprovide.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/tprovide.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/tprovide.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/tprovide.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/withExecutionContext.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/withExecutionContext.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/withExecutionContext.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/withExecutionContext.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/withLog.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/withLog.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/withLog.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/withLog.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/withMaterializer.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/withMaterializer.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/withMaterializer.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/withMaterializer.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/withSettings.rst b/akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/withSettings.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/basic-directives/withSettings.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/basic-directives/withSettings.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/by-trait.rst b/akka-docs/rst/scala/http/routing-dsl/directives/by-trait.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/by-trait.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/by-trait.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/cache-condition-directives/conditional.rst b/akka-docs/rst/scala/http/routing-dsl/directives/cache-condition-directives/conditional.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/cache-condition-directives/conditional.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/cache-condition-directives/conditional.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/cache-condition-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/cache-condition-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/cache-condition-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/cache-condition-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/coding-directives/decodeRequest.rst b/akka-docs/rst/scala/http/routing-dsl/directives/coding-directives/decodeRequest.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/coding-directives/decodeRequest.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/coding-directives/decodeRequest.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/coding-directives/decodeRequestWith.rst b/akka-docs/rst/scala/http/routing-dsl/directives/coding-directives/decodeRequestWith.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/coding-directives/decodeRequestWith.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/coding-directives/decodeRequestWith.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/coding-directives/encodeResponse.rst b/akka-docs/rst/scala/http/routing-dsl/directives/coding-directives/encodeResponse.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/coding-directives/encodeResponse.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/coding-directives/encodeResponse.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/coding-directives/encodeResponseWith.rst b/akka-docs/rst/scala/http/routing-dsl/directives/coding-directives/encodeResponseWith.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/coding-directives/encodeResponseWith.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/coding-directives/encodeResponseWith.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/coding-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/coding-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/coding-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/coding-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/coding-directives/requestEncodedWith.rst b/akka-docs/rst/scala/http/routing-dsl/directives/coding-directives/requestEncodedWith.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/coding-directives/requestEncodedWith.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/coding-directives/requestEncodedWith.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/coding-directives/responseEncodingAccepted.rst b/akka-docs/rst/scala/http/routing-dsl/directives/coding-directives/responseEncodingAccepted.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/coding-directives/responseEncodingAccepted.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/coding-directives/responseEncodingAccepted.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/cookie-directives/cookie.rst b/akka-docs/rst/scala/http/routing-dsl/directives/cookie-directives/cookie.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/cookie-directives/cookie.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/cookie-directives/cookie.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/cookie-directives/deleteCookie.rst b/akka-docs/rst/scala/http/routing-dsl/directives/cookie-directives/deleteCookie.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/cookie-directives/deleteCookie.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/cookie-directives/deleteCookie.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/cookie-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/cookie-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/cookie-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/cookie-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/cookie-directives/optionalCookie.rst b/akka-docs/rst/scala/http/routing-dsl/directives/cookie-directives/optionalCookie.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/cookie-directives/optionalCookie.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/cookie-directives/optionalCookie.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/cookie-directives/setCookie.rst b/akka-docs/rst/scala/http/routing-dsl/directives/cookie-directives/setCookie.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/cookie-directives/setCookie.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/cookie-directives/setCookie.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/custom-directives.rst b/akka-docs/rst/scala/http/routing-dsl/directives/custom-directives.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/custom-directives.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/custom-directives.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/debugging-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/debugging-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/debugging-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/debugging-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/debugging-directives/logRequest.rst b/akka-docs/rst/scala/http/routing-dsl/directives/debugging-directives/logRequest.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/debugging-directives/logRequest.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/debugging-directives/logRequest.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/debugging-directives/logRequestResult.rst b/akka-docs/rst/scala/http/routing-dsl/directives/debugging-directives/logRequestResult.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/debugging-directives/logRequestResult.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/debugging-directives/logRequestResult.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/debugging-directives/logResult.rst b/akka-docs/rst/scala/http/routing-dsl/directives/debugging-directives/logResult.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/debugging-directives/logResult.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/debugging-directives/logResult.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/execution-directives/handleExceptions.rst b/akka-docs/rst/scala/http/routing-dsl/directives/execution-directives/handleExceptions.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/execution-directives/handleExceptions.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/execution-directives/handleExceptions.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/execution-directives/handleRejections.rst b/akka-docs/rst/scala/http/routing-dsl/directives/execution-directives/handleRejections.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/execution-directives/handleRejections.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/execution-directives/handleRejections.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/execution-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/execution-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/execution-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/execution-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromBrowseableDirectories.rst b/akka-docs/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromBrowseableDirectories.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromBrowseableDirectories.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromBrowseableDirectories.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromBrowseableDirectory.rst b/akka-docs/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromBrowseableDirectory.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromBrowseableDirectory.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromBrowseableDirectory.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromDirectory.rst b/akka-docs/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromDirectory.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromDirectory.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromDirectory.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromFile.rst b/akka-docs/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromFile.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromFile.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromFile.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromResource.rst b/akka-docs/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromResource.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromResource.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromResource.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromResourceDirectory.rst b/akka-docs/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromResourceDirectory.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromResourceDirectory.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/file-and-resource-directives/getFromResourceDirectory.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/file-and-resource-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/file-and-resource-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/file-and-resource-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/file-and-resource-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/file-and-resource-directives/listDirectoryContents.rst b/akka-docs/rst/scala/http/routing-dsl/directives/file-and-resource-directives/listDirectoryContents.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/file-and-resource-directives/listDirectoryContents.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/file-and-resource-directives/listDirectoryContents.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/file-upload-directives/fileUpload.rst b/akka-docs/rst/scala/http/routing-dsl/directives/file-upload-directives/fileUpload.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/file-upload-directives/fileUpload.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/file-upload-directives/fileUpload.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/file-upload-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/file-upload-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/file-upload-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/file-upload-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/file-upload-directives/uploadedFile.rst b/akka-docs/rst/scala/http/routing-dsl/directives/file-upload-directives/uploadedFile.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/file-upload-directives/uploadedFile.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/file-upload-directives/uploadedFile.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/form-field-directives/formField.rst b/akka-docs/rst/scala/http/routing-dsl/directives/form-field-directives/formField.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/form-field-directives/formField.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/form-field-directives/formField.rst diff --git a/akka-docs/rst/scala/http/routing-dsl/directives/form-field-directives/formFieldMap.rst b/akka-docs/rst/scala/http/routing-dsl/directives/form-field-directives/formFieldMap.rst new file mode 100644 index 0000000000..fc20e6492e --- /dev/null +++ b/akka-docs/rst/scala/http/routing-dsl/directives/form-field-directives/formFieldMap.rst @@ -0,0 +1,29 @@ +.. _-formFieldMap-: + +formFieldMap +============ + +Signature +--------- + +.. includecode2:: /../../akka-http/src/main/scala/akka/http/scaladsl/server/directives/FormFieldDirectives.scala + :snippet: formFieldMap + +Description +----------- +Extracts all HTTP form fields at once as a ``Map[String, String]`` mapping form field names to form field values. + +If form data contain a field value several times, the map will contain the last one. + +See :ref:`-formFields-` for an in-depth description. + +Warning +------- +Use of this directive can result in performance degradation or even in ``OutOfMemoryError`` s. +See :ref:`-formFieldSeq-` for details. + +Example +------- + +.. includecode2:: ../../../../code/docs/http/scaladsl/server/directives/FormFieldDirectivesExamplesSpec.scala + :snippet: formFieldMap diff --git a/akka-docs/rst/scala/http/routing-dsl/directives/form-field-directives/formFieldMultiMap.rst b/akka-docs/rst/scala/http/routing-dsl/directives/form-field-directives/formFieldMultiMap.rst new file mode 100644 index 0000000000..3f023484e4 --- /dev/null +++ b/akka-docs/rst/scala/http/routing-dsl/directives/form-field-directives/formFieldMultiMap.rst @@ -0,0 +1,33 @@ +.. _-formFieldMultiMap-: + +formFieldMultiMap +================= + +Signature +--------- + +.. includecode2:: /../../akka-http/src/main/scala/akka/http/scaladsl/server/directives/FormFieldDirectives.scala + :snippet: formFieldMultiMap + +Description +----------- + +Extracts all HTTP form fields at once as a multi-map of type ``Map[String, List[String]`` mapping +a form name to a list of all its values. + +This directive can be used if form fields can occur several times. + +The order of values is *not* specified. + +See :ref:`-formFields-` for an in-depth description. + +Warning +------- +Use of this directive can result in performance degradation or even in ``OutOfMemoryError`` s. +See :ref:`-formFieldSeq-` for details. + +Example +------- + +.. includecode2:: ../../../../code/docs/http/scaladsl/server/directives/FormFieldDirectivesExamplesSpec.scala + :snippet: formFieldMultiMap diff --git a/akka-docs/rst/scala/http/routing-dsl/directives/form-field-directives/formFieldSeq.rst b/akka-docs/rst/scala/http/routing-dsl/directives/form-field-directives/formFieldSeq.rst new file mode 100644 index 0000000000..3ff16bb611 --- /dev/null +++ b/akka-docs/rst/scala/http/routing-dsl/directives/form-field-directives/formFieldSeq.rst @@ -0,0 +1,30 @@ +.. _-formFieldSeq-: + +formFieldSeq +============ + +Signature +--------- + +.. includecode2:: /../../akka-http/src/main/scala/akka/http/scaladsl/server/directives/FormFieldDirectives.scala + :snippet: formFieldSeq + +Description +----------- +Extracts all HTTP form fields at once in the original order as (name, value) tuples of type ``(String, String)``. + +This directive can be used if the exact order of form fields is important or if parameters can occur several times. + +See :ref:`-formFields-` for an in-depth description. + +Warning +------- +The directive reads all incoming HTT form fields without any configured upper bound. +It means, that requests with form fields holding significant amount of data (ie. during a file upload) +can cause performance issues or even an ``OutOfMemoryError`` s. + +Example +------- + +.. includecode2:: ../../../../code/docs/http/scaladsl/server/directives/FormFieldDirectivesExamplesSpec.scala + :snippet: formFieldSeq diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/form-field-directives/formFields.rst b/akka-docs/rst/scala/http/routing-dsl/directives/form-field-directives/formFields.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/form-field-directives/formFields.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/form-field-directives/formFields.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/form-field-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/form-field-directives/index.rst similarity index 62% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/form-field-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/form-field-directives/index.rst index 80c461f02f..416da4ee44 100644 --- a/akka-docs-dev/rst/scala/http/routing-dsl/directives/form-field-directives/index.rst +++ b/akka-docs/rst/scala/http/routing-dsl/directives/form-field-directives/index.rst @@ -7,4 +7,7 @@ FormFieldDirectives :maxdepth: 1 formField - formFields \ No newline at end of file + formFields + formFieldSeq + formFieldMap + formFieldMultiMap \ No newline at end of file diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/future-directives/completeOrRecoverWith.rst b/akka-docs/rst/scala/http/routing-dsl/directives/future-directives/completeOrRecoverWith.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/future-directives/completeOrRecoverWith.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/future-directives/completeOrRecoverWith.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/future-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/future-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/future-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/future-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/future-directives/onComplete.rst b/akka-docs/rst/scala/http/routing-dsl/directives/future-directives/onComplete.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/future-directives/onComplete.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/future-directives/onComplete.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/future-directives/onSuccess.rst b/akka-docs/rst/scala/http/routing-dsl/directives/future-directives/onSuccess.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/future-directives/onSuccess.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/future-directives/onSuccess.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/header-directives/headerValue.rst b/akka-docs/rst/scala/http/routing-dsl/directives/header-directives/headerValue.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/header-directives/headerValue.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/header-directives/headerValue.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/header-directives/headerValueByName.rst b/akka-docs/rst/scala/http/routing-dsl/directives/header-directives/headerValueByName.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/header-directives/headerValueByName.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/header-directives/headerValueByName.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/header-directives/headerValueByType.rst b/akka-docs/rst/scala/http/routing-dsl/directives/header-directives/headerValueByType.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/header-directives/headerValueByType.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/header-directives/headerValueByType.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/header-directives/headerValuePF.rst b/akka-docs/rst/scala/http/routing-dsl/directives/header-directives/headerValuePF.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/header-directives/headerValuePF.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/header-directives/headerValuePF.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/header-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/header-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/header-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/header-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/header-directives/optionalHeaderValue.rst b/akka-docs/rst/scala/http/routing-dsl/directives/header-directives/optionalHeaderValue.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/header-directives/optionalHeaderValue.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/header-directives/optionalHeaderValue.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/header-directives/optionalHeaderValueByName.rst b/akka-docs/rst/scala/http/routing-dsl/directives/header-directives/optionalHeaderValueByName.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/header-directives/optionalHeaderValueByName.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/header-directives/optionalHeaderValueByName.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/header-directives/optionalHeaderValueByType.rst b/akka-docs/rst/scala/http/routing-dsl/directives/header-directives/optionalHeaderValueByType.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/header-directives/optionalHeaderValueByType.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/header-directives/optionalHeaderValueByType.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/header-directives/optionalHeaderValuePF.rst b/akka-docs/rst/scala/http/routing-dsl/directives/header-directives/optionalHeaderValuePF.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/header-directives/optionalHeaderValuePF.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/header-directives/optionalHeaderValuePF.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/host-directives/extractHost.rst b/akka-docs/rst/scala/http/routing-dsl/directives/host-directives/extractHost.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/host-directives/extractHost.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/host-directives/extractHost.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/host-directives/host.rst b/akka-docs/rst/scala/http/routing-dsl/directives/host-directives/host.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/host-directives/host.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/host-directives/host.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/host-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/host-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/host-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/host-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/marshalling-directives/completeWith.rst b/akka-docs/rst/scala/http/routing-dsl/directives/marshalling-directives/completeWith.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/marshalling-directives/completeWith.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/marshalling-directives/completeWith.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/marshalling-directives/entity.rst b/akka-docs/rst/scala/http/routing-dsl/directives/marshalling-directives/entity.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/marshalling-directives/entity.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/marshalling-directives/entity.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/marshalling-directives/handleWith.rst b/akka-docs/rst/scala/http/routing-dsl/directives/marshalling-directives/handleWith.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/marshalling-directives/handleWith.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/marshalling-directives/handleWith.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/marshalling-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/marshalling-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/marshalling-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/marshalling-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/delete.rst b/akka-docs/rst/scala/http/routing-dsl/directives/method-directives/delete.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/delete.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/method-directives/delete.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/extractMethod.rst b/akka-docs/rst/scala/http/routing-dsl/directives/method-directives/extractMethod.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/extractMethod.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/method-directives/extractMethod.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/get.rst b/akka-docs/rst/scala/http/routing-dsl/directives/method-directives/get.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/get.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/method-directives/get.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/head.rst b/akka-docs/rst/scala/http/routing-dsl/directives/method-directives/head.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/head.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/method-directives/head.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/method-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/method-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/method.rst b/akka-docs/rst/scala/http/routing-dsl/directives/method-directives/method.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/method.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/method-directives/method.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/options.rst b/akka-docs/rst/scala/http/routing-dsl/directives/method-directives/options.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/options.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/method-directives/options.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/overrideMethodWithParameter.rst b/akka-docs/rst/scala/http/routing-dsl/directives/method-directives/overrideMethodWithParameter.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/overrideMethodWithParameter.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/method-directives/overrideMethodWithParameter.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/patch.rst b/akka-docs/rst/scala/http/routing-dsl/directives/method-directives/patch.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/patch.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/method-directives/patch.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/post.rst b/akka-docs/rst/scala/http/routing-dsl/directives/method-directives/post.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/post.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/method-directives/post.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/put.rst b/akka-docs/rst/scala/http/routing-dsl/directives/method-directives/put.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/method-directives/put.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/method-directives/put.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/misc-directives/extractClientIP.rst b/akka-docs/rst/scala/http/routing-dsl/directives/misc-directives/extractClientIP.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/misc-directives/extractClientIP.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/misc-directives/extractClientIP.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/misc-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/misc-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/misc-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/misc-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/misc-directives/rejectEmptyResponse.rst b/akka-docs/rst/scala/http/routing-dsl/directives/misc-directives/rejectEmptyResponse.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/misc-directives/rejectEmptyResponse.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/misc-directives/rejectEmptyResponse.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/misc-directives/requestEntityEmpty.rst b/akka-docs/rst/scala/http/routing-dsl/directives/misc-directives/requestEntityEmpty.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/misc-directives/requestEntityEmpty.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/misc-directives/requestEntityEmpty.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/misc-directives/requestEntityPresent.rst b/akka-docs/rst/scala/http/routing-dsl/directives/misc-directives/requestEntityPresent.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/misc-directives/requestEntityPresent.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/misc-directives/requestEntityPresent.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/misc-directives/selectPreferredLanguage.rst b/akka-docs/rst/scala/http/routing-dsl/directives/misc-directives/selectPreferredLanguage.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/misc-directives/selectPreferredLanguage.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/misc-directives/selectPreferredLanguage.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/misc-directives/validate.rst b/akka-docs/rst/scala/http/routing-dsl/directives/misc-directives/validate.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/misc-directives/validate.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/misc-directives/validate.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/parameter-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/parameter-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/parameter-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/parameter-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/parameter-directives/parameter.rst b/akka-docs/rst/scala/http/routing-dsl/directives/parameter-directives/parameter.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/parameter-directives/parameter.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/parameter-directives/parameter.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/parameter-directives/parameterMap.rst b/akka-docs/rst/scala/http/routing-dsl/directives/parameter-directives/parameterMap.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/parameter-directives/parameterMap.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/parameter-directives/parameterMap.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/parameter-directives/parameterMultiMap.rst b/akka-docs/rst/scala/http/routing-dsl/directives/parameter-directives/parameterMultiMap.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/parameter-directives/parameterMultiMap.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/parameter-directives/parameterMultiMap.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/parameter-directives/parameterSeq.rst b/akka-docs/rst/scala/http/routing-dsl/directives/parameter-directives/parameterSeq.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/parameter-directives/parameterSeq.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/parameter-directives/parameterSeq.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/parameter-directives/parameters.rst b/akka-docs/rst/scala/http/routing-dsl/directives/parameter-directives/parameters.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/parameter-directives/parameters.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/parameter-directives/parameters.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/path-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/path.rst b/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/path.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/path.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/path-directives/path.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/pathEnd.rst b/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/pathEnd.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/pathEnd.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/path-directives/pathEnd.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/pathEndOrSingleSlash.rst b/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/pathEndOrSingleSlash.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/pathEndOrSingleSlash.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/path-directives/pathEndOrSingleSlash.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/pathPrefix.rst b/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/pathPrefix.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/pathPrefix.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/path-directives/pathPrefix.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/pathPrefixTest.rst b/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/pathPrefixTest.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/pathPrefixTest.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/path-directives/pathPrefixTest.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/pathSingleSlash.rst b/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/pathSingleSlash.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/pathSingleSlash.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/path-directives/pathSingleSlash.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/pathSuffix.rst b/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/pathSuffix.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/pathSuffix.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/path-directives/pathSuffix.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/pathSuffixTest.rst b/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/pathSuffixTest.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/pathSuffixTest.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/path-directives/pathSuffixTest.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/rawPathPrefix.rst b/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/rawPathPrefix.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/rawPathPrefix.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/path-directives/rawPathPrefix.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/rawPathPrefixTest.rst b/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/rawPathPrefixTest.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/rawPathPrefixTest.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/path-directives/rawPathPrefixTest.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/redirectToNoTrailingSlashIfPresent.rst b/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/redirectToNoTrailingSlashIfPresent.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/redirectToNoTrailingSlashIfPresent.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/path-directives/redirectToNoTrailingSlashIfPresent.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/redirectToTrailingSlashIfMissing.rst b/akka-docs/rst/scala/http/routing-dsl/directives/path-directives/redirectToTrailingSlashIfMissing.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/path-directives/redirectToTrailingSlashIfMissing.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/path-directives/redirectToTrailingSlashIfMissing.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/range-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/range-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/range-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/range-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/range-directives/withRangeSupport.rst b/akka-docs/rst/scala/http/routing-dsl/directives/range-directives/withRangeSupport.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/range-directives/withRangeSupport.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/range-directives/withRangeSupport.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/respond-with-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/respond-with-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/respond-with-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/respond-with-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/respond-with-directives/respondWithDefaultHeader.rst b/akka-docs/rst/scala/http/routing-dsl/directives/respond-with-directives/respondWithDefaultHeader.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/respond-with-directives/respondWithDefaultHeader.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/respond-with-directives/respondWithDefaultHeader.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/respond-with-directives/respondWithDefaultHeaders.rst b/akka-docs/rst/scala/http/routing-dsl/directives/respond-with-directives/respondWithDefaultHeaders.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/respond-with-directives/respondWithDefaultHeaders.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/respond-with-directives/respondWithDefaultHeaders.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/respond-with-directives/respondWithHeader.rst b/akka-docs/rst/scala/http/routing-dsl/directives/respond-with-directives/respondWithHeader.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/respond-with-directives/respondWithHeader.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/respond-with-directives/respondWithHeader.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/respond-with-directives/respondWithHeaders.rst b/akka-docs/rst/scala/http/routing-dsl/directives/respond-with-directives/respondWithHeaders.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/respond-with-directives/respondWithHeaders.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/respond-with-directives/respondWithHeaders.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/route-directives/complete.rst b/akka-docs/rst/scala/http/routing-dsl/directives/route-directives/complete.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/route-directives/complete.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/route-directives/complete.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/route-directives/failWith.rst b/akka-docs/rst/scala/http/routing-dsl/directives/route-directives/failWith.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/route-directives/failWith.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/route-directives/failWith.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/route-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/route-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/route-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/route-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/route-directives/redirect.rst b/akka-docs/rst/scala/http/routing-dsl/directives/route-directives/redirect.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/route-directives/redirect.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/route-directives/redirect.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/route-directives/reject.rst b/akka-docs/rst/scala/http/routing-dsl/directives/route-directives/reject.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/route-directives/reject.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/route-directives/reject.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/scheme-directives/extractScheme.rst b/akka-docs/rst/scala/http/routing-dsl/directives/scheme-directives/extractScheme.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/scheme-directives/extractScheme.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/scheme-directives/extractScheme.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/scheme-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/scheme-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/scheme-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/scheme-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/scheme-directives/scheme.rst b/akka-docs/rst/scala/http/routing-dsl/directives/scheme-directives/scheme.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/scheme-directives/scheme.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/scheme-directives/scheme.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/authenticateBasic.rst b/akka-docs/rst/scala/http/routing-dsl/directives/security-directives/authenticateBasic.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/authenticateBasic.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/security-directives/authenticateBasic.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/authenticateBasicAsync.rst b/akka-docs/rst/scala/http/routing-dsl/directives/security-directives/authenticateBasicAsync.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/authenticateBasicAsync.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/security-directives/authenticateBasicAsync.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/authenticateBasicPF.rst b/akka-docs/rst/scala/http/routing-dsl/directives/security-directives/authenticateBasicPF.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/authenticateBasicPF.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/security-directives/authenticateBasicPF.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/authenticateBasicPFAsync.rst b/akka-docs/rst/scala/http/routing-dsl/directives/security-directives/authenticateBasicPFAsync.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/authenticateBasicPFAsync.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/security-directives/authenticateBasicPFAsync.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/authenticateOAuth2.rst b/akka-docs/rst/scala/http/routing-dsl/directives/security-directives/authenticateOAuth2.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/authenticateOAuth2.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/security-directives/authenticateOAuth2.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/authenticateOAuth2Async.rst b/akka-docs/rst/scala/http/routing-dsl/directives/security-directives/authenticateOAuth2Async.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/authenticateOAuth2Async.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/security-directives/authenticateOAuth2Async.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/authenticateOAuth2PF.rst b/akka-docs/rst/scala/http/routing-dsl/directives/security-directives/authenticateOAuth2PF.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/authenticateOAuth2PF.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/security-directives/authenticateOAuth2PF.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/authenticateOAuth2PFAsync.rst b/akka-docs/rst/scala/http/routing-dsl/directives/security-directives/authenticateOAuth2PFAsync.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/authenticateOAuth2PFAsync.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/security-directives/authenticateOAuth2PFAsync.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/authenticateOrRejectWithChallenge.rst b/akka-docs/rst/scala/http/routing-dsl/directives/security-directives/authenticateOrRejectWithChallenge.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/authenticateOrRejectWithChallenge.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/security-directives/authenticateOrRejectWithChallenge.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/authorize.rst b/akka-docs/rst/scala/http/routing-dsl/directives/security-directives/authorize.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/authorize.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/security-directives/authorize.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/extractCredentials.rst b/akka-docs/rst/scala/http/routing-dsl/directives/security-directives/extractCredentials.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/extractCredentials.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/security-directives/extractCredentials.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/security-directives/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/security-directives/index.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/security-directives/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/directives/websocket-directives/handleWebsocketMessages.rst b/akka-docs/rst/scala/http/routing-dsl/directives/websocket-directives/handleWebSocketMessages.rst similarity index 50% rename from akka-docs-dev/rst/scala/http/routing-dsl/directives/websocket-directives/handleWebsocketMessages.rst rename to akka-docs/rst/scala/http/routing-dsl/directives/websocket-directives/handleWebSocketMessages.rst index 9a94b83951..517a21cff3 100644 --- a/akka-docs-dev/rst/scala/http/routing-dsl/directives/websocket-directives/handleWebsocketMessages.rst +++ b/akka-docs/rst/scala/http/routing-dsl/directives/websocket-directives/handleWebSocketMessages.rst @@ -1,27 +1,27 @@ -.. _-handleWebsocketMessages-: +.. _-handleWebSocketMessages-: -handleWebsocketMessages +handleWebSocketMessages ======================= Signature --------- -.. includecode2:: /../../akka-http/src/main/scala/akka/http/scaladsl/server/directives/WebsocketDirectives.scala - :snippet: handleWebsocketMessages +.. includecode2:: /../../akka-http/src/main/scala/akka/http/scaladsl/server/directives/WebSocketDirectives.scala + :snippet: handleWebSocketMessages Description ----------- -The directive first checks if the request was a valid Websocket handshake request and if yes, it completes the request -with the passed handler. Otherwise, the request is rejected with an ``ExpectedWebsocketRequestRejection``. +The directive first checks if the request was a valid WebSocket handshake request and if yes, it completes the request +with the passed handler. Otherwise, the request is rejected with an ``ExpectedWebSocketRequestRejection``. -Websocket subprotocols offered in the ``Sec-Websocket-Protocol`` header of the request are ignored. If you want to -support several protocols use the :ref:`-handleWebsocketMessagesForProtocol-` directive, instead. +WebSocket subprotocols offered in the ``Sec-WebSocket-Protocol`` header of the request are ignored. If you want to +support several protocols use the :ref:`-handleWebSocketMessagesForProtocol-` directive, instead. -For more information about the Websocket support, see :ref:`server-side-websocket-support-scala`. +For more information about the WebSocket support, see :ref:`server-side-websocket-support-scala`. Example ------- -.. includecode2:: ../../../../code/docs/http/scaladsl/server/directives/WebsocketDirectivesExamplesSpec.scala +.. includecode2:: ../../../../code/docs/http/scaladsl/server/directives/WebSocketDirectivesExamplesSpec.scala :snippet: greeter-service diff --git a/akka-docs/rst/scala/http/routing-dsl/directives/websocket-directives/handleWebSocketMessagesForProtocol.rst b/akka-docs/rst/scala/http/routing-dsl/directives/websocket-directives/handleWebSocketMessagesForProtocol.rst new file mode 100644 index 0000000000..4fd55bd88e --- /dev/null +++ b/akka-docs/rst/scala/http/routing-dsl/directives/websocket-directives/handleWebSocketMessagesForProtocol.rst @@ -0,0 +1,31 @@ +.. _-handleWebSocketMessagesForProtocol-: + +handleWebSocketMessagesForProtocol +================================== + +Signature +--------- + +.. includecode2:: /../../akka-http/src/main/scala/akka/http/scaladsl/server/directives/WebSocketDirectives.scala + :snippet: handleWebSocketMessagesForProtocol + +Description +----------- +Handles WebSocket requests with the given handler if the given subprotocol is offered in the ``Sec-WebSocket-Protocol`` +header of the request and rejects other requests with an ``ExpectedWebSocketRequestRejection`` or an +``UnsupportedWebSocketSubprotocolRejection``. + +The directive first checks if the request was a valid WebSocket handshake request and if the request offers the passed +subprotocol name. If yes, the directive completes the request with the passed handler. Otherwise, the request is +either rejected with an ``ExpectedWebSocketRequestRejection`` or an ``UnsupportedWebSocketSubprotocolRejection``. + +To support several subprotocols, for example at the same path, several instances of ``handleWebSocketMessagesForProtocol`` can +be chained using ``~`` as you can see in the below example. + +For more information about the WebSocket support, see :ref:`server-side-websocket-support-scala`. + +Example +------- + +.. includecode2:: ../../../../code/docs/http/scaladsl/server/directives/WebSocketDirectivesExamplesSpec.scala + :snippet: handle-multiple-protocols diff --git a/akka-docs/rst/scala/http/routing-dsl/directives/websocket-directives/index.rst b/akka-docs/rst/scala/http/routing-dsl/directives/websocket-directives/index.rst new file mode 100644 index 0000000000..06113c1bfd --- /dev/null +++ b/akka-docs/rst/scala/http/routing-dsl/directives/websocket-directives/index.rst @@ -0,0 +1,10 @@ +.. _WebSocketDirectives: + +WebSocketDirectives +=================== + +.. toctree:: + :maxdepth: 1 + + handleWebSocketMessages + handleWebSocketMessagesForProtocol \ No newline at end of file diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/exception-handling.rst b/akka-docs/rst/scala/http/routing-dsl/exception-handling.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/exception-handling.rst rename to akka-docs/rst/scala/http/routing-dsl/exception-handling.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/index.rst b/akka-docs/rst/scala/http/routing-dsl/index.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/index.rst rename to akka-docs/rst/scala/http/routing-dsl/index.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/overview.rst b/akka-docs/rst/scala/http/routing-dsl/overview.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/overview.rst rename to akka-docs/rst/scala/http/routing-dsl/overview.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/path-matchers.rst b/akka-docs/rst/scala/http/routing-dsl/path-matchers.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/path-matchers.rst rename to akka-docs/rst/scala/http/routing-dsl/path-matchers.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/rejections.rst b/akka-docs/rst/scala/http/routing-dsl/rejections.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/rejections.rst rename to akka-docs/rst/scala/http/routing-dsl/rejections.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/routes.rst b/akka-docs/rst/scala/http/routing-dsl/routes.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/routes.rst rename to akka-docs/rst/scala/http/routing-dsl/routes.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/testkit.rst b/akka-docs/rst/scala/http/routing-dsl/testkit.rst similarity index 100% rename from akka-docs-dev/rst/scala/http/routing-dsl/testkit.rst rename to akka-docs/rst/scala/http/routing-dsl/testkit.rst diff --git a/akka-docs-dev/rst/scala/http/routing-dsl/websocket-support.rst b/akka-docs/rst/scala/http/routing-dsl/websocket-support.rst similarity index 79% rename from akka-docs-dev/rst/scala/http/routing-dsl/websocket-support.rst rename to akka-docs/rst/scala/http/routing-dsl/websocket-support.rst index 547123c29c..2748a0fdb4 100644 --- a/akka-docs-dev/rst/scala/http/routing-dsl/websocket-support.rst +++ b/akka-docs/rst/scala/http/routing-dsl/websocket-support.rst @@ -44,20 +44,20 @@ to create a streaming message from an Akka Stream source. Server API ---------- -The entrypoint for the Websocket API is the synthetic ``UpgradeToWebsocket`` header which is added to a request -if Akka HTTP encounters a Websocket upgrade request. +The entrypoint for the WebSocket API is the synthetic ``UpgradeToWebSocket`` header which is added to a request +if Akka HTTP encounters a WebSocket upgrade request. -The Websocket specification mandates that details of the Websocket connection are negotiated by placing special-purpose +The WebSocket specification mandates that details of the WebSocket connection are negotiated by placing special-purpose HTTP-headers into request and response of the HTTP upgrade. In Akka HTTP these HTTP-level details of the WebSocket handshake are hidden from the application and don't need to be managed manually. -Instead, the synthetic ``UpgradeToWebsocket`` represents a valid Websocket upgrade request. An application can detect -a Websocket upgrade request by looking for the ``UpgradeToWebsocket`` header. It can choose to accept the upgrade and -start a Websocket connection by responding to that request with an ``HttpResponse`` generated by one of the -``UpgradeToWebsocket.handleMessagesWith`` methods. In its most general form this method expects two arguments: -first, a handler ``Flow[Message, Message, Any]`` that will be used to handle Websocket messages on this connection. +Instead, the synthetic ``UpgradeToWebSocket`` represents a valid WebSocket upgrade request. An application can detect +a WebSocket upgrade request by looking for the ``UpgradeToWebSocket`` header. It can choose to accept the upgrade and +start a WebSocket connection by responding to that request with an ``HttpResponse`` generated by one of the +``UpgradeToWebSocket.handleMessagesWith`` methods. In its most general form this method expects two arguments: +first, a handler ``Flow[Message, Message, Any]`` that will be used to handle WebSocket messages on this connection. Second, the application can optionally choose one of the proposed application-level sub-protocols by inspecting the -values of ``UpgradeToWebsocket.requestedProtocols`` and pass the chosen protocol value to ``handleMessages``. +values of ``UpgradeToWebSocket.requestedProtocols`` and pass the chosen protocol value to ``handleMessages``. Handling Messages +++++++++++++++++ @@ -68,7 +68,7 @@ scenarios this fits very well and such a ``Flow`` can be constructed from a simp There are other use-cases, e.g. in a server-push model, where a server message is sent spontaneously, or in a true bi-directional scenario where input and output aren't logically connected. Providing the handler as a ``Flow`` in -these cases may not fit. Another method, ``UpgradeToWebsocket.handleMessagesWithSinkSource``, is provided +these cases may not fit. Another method, ``UpgradeToWebSocket.handleMessagesWithSinkSource``, is provided which allows to pass an output-generating ``Source[Message, Any]`` and an input-receiving ``Sink[Message, Any]`` independently. Note that a handler is required to consume the data stream of each message to make place for new messages. Otherwise, @@ -79,36 +79,36 @@ Example Let's look at an example_. -Websocket requests come in like any other requests. In the example, requests to ``/greeter`` are expected to be -Websocket requests: +WebSocket requests come in like any other requests. In the example, requests to ``/greeter`` are expected to be +WebSocket requests: -.. includecode:: ../../code/docs/http/scaladsl/server/WebsocketExampleSpec.scala +.. includecode:: ../../code/docs/http/scaladsl/server/WebSocketExampleSpec.scala :include: websocket-request-handling -It uses pattern matching on the path and then inspects the request to query for the ``UpgradeToWebsocket`` header. If -such a header is found, it is used to generate a response by passing a handler for Websocket messages to the +It uses pattern matching on the path and then inspects the request to query for the ``UpgradeToWebSocket`` header. If +such a header is found, it is used to generate a response by passing a handler for WebSocket messages to the ``handleMessages`` method. If no such header is found a "400 Bad Request" response is generated. The passed handler expects text messages where each message is expected to contain (a person's) name and then responds with another text message that contains a greeting: -.. includecode:: ../../code/docs/http/scaladsl/server/WebsocketExampleSpec.scala +.. includecode:: ../../code/docs/http/scaladsl/server/WebSocketExampleSpec.scala :include: websocket-handler Routing support --------------- -The routing DSL provides the :ref:`-handleWebsocketMessages-` directive to install a WebSocket handler if the request +The routing DSL provides the :ref:`-handleWebSocketMessages-` directive to install a WebSocket handler if the request was a WebSocket request. Otherwise, the directive rejects the request. Here's the above simple request handler rewritten as a route: -.. includecode2:: ../../code/docs/http/scaladsl/server/directives/WebsocketDirectivesExamplesSpec.scala +.. includecode2:: ../../code/docs/http/scaladsl/server/directives/WebSocketDirectivesExamplesSpec.scala :snippet: greeter-service -The example also includes code demonstrating the testkit support for Websocket services. It allows to create Websocket -requests to run against a route using `WS` which can be used to provide a mock Websocket probe that allows manual -testing of the Websocket handler's behavior if the request was accepted. +The example also includes code demonstrating the testkit support for WebSocket services. It allows to create WebSocket +requests to run against a route using `WS` which can be used to provide a mock WebSocket probe that allows manual +testing of the WebSocket handler's behavior if the request was accepted. -.. _example: @github@/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/WebsocketExampleSpec.scala +.. _example: @github@/akka-docs-dev/rst/scala/code/docs/http/scaladsl/server/WebSocketExampleSpec.scala diff --git a/akka-docs/rst/scala/remoting.rst b/akka-docs/rst/scala/remoting.rst index 9f9739337c..cdc1b42e92 100644 --- a/akka-docs/rst/scala/remoting.rst +++ b/akka-docs/rst/scala/remoting.rst @@ -480,8 +480,8 @@ There are lots of configuration properties that are related to remoting in Akka. .. _remote-configuration-nat: -Remote configuration for NAT and Docker ---------------------------------------- +Akka behind NAT or in a Docker container +---------------------------------------- In setups involving Network Address Translation (NAT), Load Balancers or Docker containers the hostname and port pair that akka binds to will be different than the "logical" diff --git a/akka-docs-dev/rst/scala/stream-index.rst b/akka-docs/rst/scala/stream/index.rst similarity index 69% rename from akka-docs-dev/rst/scala/stream-index.rst rename to akka-docs/rst/scala/stream/index.rst index 8abed2337f..e872c25093 100644 --- a/akka-docs-dev/rst/scala/stream-index.rst +++ b/akka-docs/rst/scala/stream/index.rst @@ -8,7 +8,7 @@ Streams stream-introduction stream-quickstart - ../stream-design + ../../general/stream/stream-design stream-flows-and-basics stream-graphs stream-composition @@ -19,7 +19,8 @@ Streams stream-io stream-parallelism stream-testkit - ../stages-overview + ../../general/stream/stages-overview stream-cookbook - ../stream-configuration + ../../general/stream/stream-configuration migration-guide-1.0-2.x-scala + migration-guide-2.0-2.4-scala diff --git a/akka-docs/rst/scala/stream/migration-guide-1.0-2.x-scala.rst b/akka-docs/rst/scala/stream/migration-guide-1.0-2.x-scala.rst new file mode 100644 index 0000000000..c48a2c4f81 --- /dev/null +++ b/akka-docs/rst/scala/stream/migration-guide-1.0-2.x-scala.rst @@ -0,0 +1,9 @@ +.. _migration-2.0-scala: + +########################## +Migration Guide 1.0 to 2.x +########################## + +For this migration guide see `the documentation for Akka Streams 2.0`_. + +.. _`the documentation for Akka Streams 2.0`: http://doc.akka.io/docs/akka-stream-and-http-experimental/2.0.2/scala/migration-guide-1.0-2.x-scala.html diff --git a/akka-docs/rst/scala/stream/migration-guide-2.0-2.4-scala.rst b/akka-docs/rst/scala/stream/migration-guide-2.0-2.4-scala.rst new file mode 100644 index 0000000000..d63b692a73 --- /dev/null +++ b/akka-docs/rst/scala/stream/migration-guide-2.0-2.4-scala.rst @@ -0,0 +1,123 @@ +.. _migration-streams-2.0-2.4-scala: + +############################## +Migration Guide 2.0.x to 2.4.x +############################## + +General notes +============= + +akka.Done and akka.NotUsed replacing Unit and BoxedUnit +------------------------------------------------------- + +To provide more clear signatures and have a unified API for both +Java and Scala two new types have been introduced: + +``akka.NotUsed`` is meant to be used instead of ``Unit`` in Scala +and ``BoxedUnit`` in Java to signify that the type parameter is required +but not actually used. This is commonly the case with ``Source``, ``Flow`` and ``Sink`` +that do not materialize into any value. + +``akka.Done`` is added for the use case where it is boxed inside another object to signify +completion but there is no actual value attached to the completion. It is used to replace +occurrences of ``Future`` with ``Future`` in Java and ``Future[Unit]`` with +``Future[Done]`` in Scala. + +All previous usage of ``Unit`` and ``BoxedUnit`` for these two cases in the akka streams APIs +has been updated. + +This means that Scala code like this:: + + Source[Int, Unit] source = Source.from(1 to 5) + Sink[Int, Future[Unit]] sink = Sink.ignore() + + +needs to be changed into:: + + Source[Int, NotUsed] source = Source.from(1 to 5) + Sink[Int, Future[Done]] sink = Sink.ignore() + +These changes apply to all the places where streams are used, which means that signatures +in the persistent query APIs also are affected. + +Removed ImplicitMaterializer +============================ + +The helper trait :class:`ImplicitMaterializer` has been removed as it was hard to find and the feature was not worth +the extra trait. Defining an implicit materializer inside an enclosing actor can be done this way:: + + final implicit val materializer: ActorMaterializer = ActorMaterializer(ActorMaterializerSettings(context.system)) + +Changed Operators +================= + +``expand()`` is now based on an Iterator +---------------------------------------- + +Previously the ``expand`` combinator required two functions as input: the first +one lifted incoming values into an extrapolation state and the second one +extracted values from that, possibly evolving that state. This has been +simplified into a single function that turns the incoming element into an +Iterator. + +The most prominent use-case previously was to just repeat the previously received value:: + + Flow[Int].expand(identity)(s => (s, s)) // This no longer works! + +In Akka 2.4.x this is simplified to: + +.. includecode:: ../code/docs/stream/MigrationsScala.scala#expand-continually + +If state needs to be be kept during the expansion process then this state will +need to be managed by the Iterator. The example of counting the number of +expansions might previously have looked like:: + + // This no longer works! + Flow[Int].expand((_, 0)){ case (in, count) => (in, count) -> (in, count + 1) } + +In Akka 2.4.x this is formulated like so: + +.. includecode:: ../code/docs/stream/MigrationsScala.scala#expand-state + +Changes in Akka HTTP +==================== + +Routing settings parameter name +------------------------------- + +``RoutingSettings`` were previously the only setting available on ``RequestContext``, +and were accessible via ``settings``. We now made it possible to configure the parsers +settings as well, so ``RoutingSettings`` is now ``routingSettings`` and ``ParserSettings`` is +now accessible via ``parserSettings``. + +Changed Sources / Sinks +======================= + +IO Sources / Sinks materialize IOResult +--------------------------------------- + +Materialized values of the following sources and sinks: + + * ``FileIO.fromFile`` + * ``FileIO.toFile`` + * ``StreamConverters.fromInputStream`` + * ``StreamConverters.fromOutputStream`` + +have been changed from ``Long`` to ``akka.stream.io.IOResult``. +This allows to signal more complicated completion scenarios. For example, on failure it is now possible +to return the exception and the number of bytes written until that exception occured. + +PushStage, PushPullStage and DetachedStage have been deprecated in favor of GraphStage +====================================================================================== + +The :class:`PushStage` :class:`PushPullStage` and :class:`DetachedStage` classes have been deprecated and +should be replaced by :class:`GraphStage` (:ref:`graphstage-scala`) which is now a single powerful API +for custom stream processing. + +Update procedure +---------------- + +Please consult the :class:`GraphStage` documentation (:ref:`graphstage-scala`) and the `previous migration guide`_ +on migrating from :class:`AsyncStage` to :class:`GraphStage`. + +.. _`previous migration guide`: http://doc.akka.io/docs/akka-stream-and-http-experimental/2.0.2/scala/migration-guide-1.0-2.x-scala.html#AsyncStage_has_been_replaced_by_GraphStage diff --git a/akka-docs-dev/rst/scala/stream-composition.rst b/akka-docs/rst/scala/stream/stream-composition.rst similarity index 89% rename from akka-docs-dev/rst/scala/stream-composition.rst rename to akka-docs/rst/scala/stream/stream-composition.rst index 592141b384..7fde29fc70 100644 --- a/akka-docs-dev/rst/scala/stream-composition.rst +++ b/akka-docs/rst/scala/stream/stream-composition.rst @@ -17,7 +17,7 @@ we illustrate the most common used stages viewed as "boxes". | -.. image:: ../images/compose_shapes.png +.. image:: ../../images/compose_shapes.png :align: center | @@ -42,13 +42,13 @@ hiding them behind a *shape* that looks like a :class:`Source`, :class:`Flow`, e | -.. image:: ../images/compose_composites.png +.. image:: ../../images/compose_composites.png :align: center | One interesting example above is a :class:`Flow` which is composed of a disconnected :class:`Sink` and :class:`Source`. -This can be achieved by using the ``wrap()`` constructor method on :class:`Flow` which takes the two parts as +This can be achieved by using the ``fromSinkAndSource()`` constructor method on :class:`Flow` which takes the two parts as parameters. The example :class:`BidiFlow` demonstrates that internally a module can be of arbitrary complexity, and the exposed @@ -63,7 +63,7 @@ that is built from a composite :class:`Source` and a composite :class:`Sink` (wh | -.. image:: ../images/compose_nested_flow.png +.. image:: ../../images/compose_nested_flow.png :align: center | @@ -78,7 +78,7 @@ with the rest of the graph), but this demonstrates the uniform underlying model. If we try to build a code snippet that corresponds to the above diagram, our first try might look like this: -.. includecode:: code/docs/stream/CompositionDocSpec.scala#non-nested-flow +.. includecode:: ../code/docs/stream/CompositionDocSpec.scala#non-nested-flow It is clear however that there is no nesting present in our first attempt, since the library cannot figure out where we intended to put composite module boundaries, it is our responsibility to do that. If we are using the @@ -87,7 +87,7 @@ methods ``withAttributes()`` or ``named()`` (where the latter is just a shorthan The following code demonstrates how to achieve the desired nesting: -.. includecode:: code/docs/stream/CompositionDocSpec.scala#nested-flow +.. includecode:: ../code/docs/stream/CompositionDocSpec.scala#nested-flow Once we have hidden the internals of our components, they act like any other built-in component of similar shape. If we hide some of the internals of our composites, the result looks just like if any other predefine component has been @@ -95,7 +95,7 @@ used: | -.. image:: ../images/compose_nested_flow_opaque.png +.. image:: ../../images/compose_nested_flow_opaque.png :align: center | @@ -103,7 +103,7 @@ used: If we look at usage of built-in components, and our custom components, there is no difference in usage as the code snippet below demonstrates. -.. includecode:: code/docs/stream/CompositionDocSpec.scala#reuse +.. includecode:: ../code/docs/stream/CompositionDocSpec.scala#reuse Composing complex systems ------------------------- @@ -119,7 +119,7 @@ As a first example, let's look at a more complex layout: | -.. image:: ../images/compose_graph.png +.. image:: ../../images/compose_graph.png :align: center | @@ -129,13 +129,13 @@ can be materialized) that encapsulates a non-trivial stream processing network. directed and non-directed cycles. The ``runnable()`` method of the :class:`GraphDSL` object allows the creation of a general, closed, and runnable graph. For example the network on the diagram can be realized like this: -.. includecode:: code/docs/stream/CompositionDocSpec.scala#complex-graph +.. includecode:: ../code/docs/stream/CompositionDocSpec.scala#complex-graph In the code above we used the implicit port numbering feature (to make the graph more readable and similar to the diagram) and we imported :class:`Source` s, :class:`Sink` s and :class:`Flow` s explicitly. It is possible to refer to the ports explicitly, and it is not necessary to import our linear stages via ``add()``, so another version might look like this: -.. includecode:: code/docs/stream/CompositionDocSpec.scala#complex-graph-alt +.. includecode:: ../code/docs/stream/CompositionDocSpec.scala#complex-graph-alt | @@ -146,14 +146,14 @@ from the previous example, what remains is a partial graph: | -.. image:: ../images/compose_graph_partial.png +.. image:: ../../images/compose_graph_partial.png :align: center | We can recreate a similar graph in code, using the DSL in a similar way than before: -.. includecode:: code/docs/stream/CompositionDocSpec.scala#partial-graph +.. includecode:: ../code/docs/stream/CompositionDocSpec.scala#partial-graph The only new addition is the return value of the builder block, which is a :class:`Shape`. All graphs (including :class:`Source`, :class:`BidiFlow`, etc) have a shape, which encodes the *typed* ports of the module. In our example @@ -166,31 +166,31 @@ it is a good practice to give names to modules to help debugging. | -.. image:: ../images/compose_graph_shape.png +.. image:: ../../images/compose_graph_shape.png :align: center | Since our partial graph has the right shape, it can be already used in the simpler, linear DSL: -.. includecode:: code/docs/stream/CompositionDocSpec.scala#partial-use +.. includecode:: ../code/docs/stream/CompositionDocSpec.scala#partial-use It is not possible to use it as a :class:`Flow` yet, though (i.e. we cannot call ``.filter()`` on it), but :class:`Flow` -has a ``wrap()`` method that just adds the DSL to a :class:`FlowShape`. There are similar methods on :class:`Source`, +has a ``fromGraph()`` method that just adds the DSL to a :class:`FlowShape`. There are similar methods on :class:`Source`, :class:`Sink` and :class:`BidiShape`, so it is easy to get back to the simpler DSL if a graph has the right shape. For convenience, it is also possible to skip the partial graph creation, and use one of the convenience creator methods. To demonstrate this, we will create the following graph: | -.. image:: ../images/compose_graph_flow.png +.. image:: ../../images/compose_graph_flow.png :align: center | The code version of the above closed graph might look like this: -.. includecode:: code/docs/stream/CompositionDocSpec.scala#partial-flow-dsl +.. includecode:: ../code/docs/stream/CompositionDocSpec.scala#partial-flow-dsl .. note:: All graph builder sections check if the resulting graph has all ports connected except the exposed ones and will @@ -199,7 +199,7 @@ The code version of the above closed graph might look like this: We are still in debt of demonstrating that :class:`RunnableGraph` is a component just like any other, which can be embedded in graphs. In the following snippet we embed one closed graph in another: -.. includecode:: code/docs/stream/CompositionDocSpec.scala#embed-closed +.. includecode:: ../code/docs/stream/CompositionDocSpec.scala#embed-closed The type of the imported module indicates that the imported module has a :class:`ClosedShape`, and so we are not able to wire it to anything else inside the enclosing closed graph. Nevertheless, this "island" is embedded properly, @@ -242,7 +242,7 @@ The propagation of the individual materialized values from the enclosed modules | -.. image:: ../images/compose_mat.png +.. image:: ../../images/compose_mat.png :align: center | @@ -251,27 +251,27 @@ To implement the above, first, we create a composite :class:`Source`, where the materialized type of :class:`Promise[Unit]`. By using the combiner function ``Keep.left``, the resulting materialized type is of the nested module (indicated by the color *red* on the diagram): -.. includecode:: code/docs/stream/CompositionDocSpec.scala#mat-combine-1 +.. includecode:: ../code/docs/stream/CompositionDocSpec.scala#mat-combine-1 Next, we create a composite :class:`Flow` from two smaller components. Here, the second enclosed :class:`Flow` has a materialized type of :class:`Future[OutgoingConnection]`, and we propagate this to the parent by using ``Keep.right`` as the combiner function (indicated by the color *yellow* on the diagram): -.. includecode:: code/docs/stream/CompositionDocSpec.scala#mat-combine-2 +.. includecode:: ../code/docs/stream/CompositionDocSpec.scala#mat-combine-2 As a third step, we create a composite :class:`Sink`, using our ``nestedFlow`` as a building block. In this snippet, both the enclosed :class:`Flow` and the folding :class:`Sink` has a materialized value that is interesting for us, so we use ``Keep.both`` to get a :class:`Pair` of them as the materialized type of ``nestedSink`` (indicated by the color *blue* on the diagram) -.. includecode:: code/docs/stream/CompositionDocSpec.scala#mat-combine-3 +.. includecode:: ../code/docs/stream/CompositionDocSpec.scala#mat-combine-3 As the last example, we wire together ``nestedSource`` and ``nestedSink`` and we use a custom combiner function to create a yet another materialized type of the resulting :class:`RunnableGraph`. This combiner function just ignores the :class:`Future[Sink]` part, and wraps the other two values in a custom case class :class:`MyClass` (indicated by color *purple* on the diagram): -.. includecode:: code/docs/stream/CompositionDocSpec.scala#mat-combine-4 +.. includecode:: ../code/docs/stream/CompositionDocSpec.scala#mat-combine-4 @@ -293,7 +293,7 @@ unless they override them with a custom value. The code below, a modification of an earlier example sets the ``inputBuffer`` attribute on certain modules, but not on others: -.. includecode:: code/docs/stream/CompositionDocSpec.scala#attributes-inheritance +.. includecode:: ../code/docs/stream/CompositionDocSpec.scala#attributes-inheritance The effect is, that each module inherits the ``inputBuffer`` attribute from its enclosing parent, unless it has the same attribute explicitly set. ``nestedSource`` gets the default attributes from the materializer itself. ``nestedSink`` @@ -302,7 +302,7 @@ except the ``map`` stage which has again an explicitly provided attribute overri | -.. image:: ../images/compose_attributes.png +.. image:: ../../images/compose_attributes.png :align: center | diff --git a/akka-docs-dev/rst/scala/stream-cookbook.rst b/akka-docs/rst/scala/stream/stream-cookbook.rst similarity index 89% rename from akka-docs-dev/rst/scala/stream-cookbook.rst rename to akka-docs/rst/scala/stream/stream-cookbook.rst index 4174f78953..823f98f5ab 100644 --- a/akka-docs-dev/rst/scala/stream-cookbook.rst +++ b/akka-docs/rst/scala/stream/stream-cookbook.rst @@ -32,12 +32,12 @@ Logging elements of a stream The simplest solution is to simply use a ``map`` operation and use ``println`` to print the elements received to the console. While this recipe is rather simplistic, it is often suitable for a quick debug session. -.. includecode:: code/docs/stream/cookbook/RecipeLoggingElements.scala#println-debug +.. includecode:: ../code/docs/stream/cookbook/RecipeLoggingElements.scala#println-debug Another approach to logging is to use ``log()`` operation which allows configuring logging for elements flowing through the stream as well as completion and erroring. -.. includecode:: code/docs/stream/cookbook/RecipeLoggingElements.scala#log-custom +.. includecode:: ../code/docs/stream/cookbook/RecipeLoggingElements.scala#log-custom Flattening a stream of sequences -------------------------------- @@ -49,12 +49,12 @@ The ``mapConcat`` operation can be used to implement a one-to-many transformatio in the form of ``In => immutable.Seq[Out]``. In this case we want to map a ``Seq`` of elements to the elements in the collection itself, so we can just call ``mapConcat(identity)``. -.. includecode:: code/docs/stream/cookbook/RecipeFlattenSeq.scala#flattening-seqs +.. includecode:: ../code/docs/stream/cookbook/RecipeFlattenSeq.scala#flattening-seqs Draining a stream to a strict collection ---------------------------------------- -**Situation:** A finite sequence of elements is given as a stream, but a scala collection is needed instead. +**Situation:** A finite sequence of elements is given as a stream, but a Scala collection is needed instead. In this recipe we will use the ``grouped`` stream operation that groups incoming elements into a stream of limited size collections (it can be seen as the almost opposite version of the "Flattening a stream of sequences" recipe @@ -63,7 +63,7 @@ with maximum size of ``MaxAllowedSeqSize`` and then we take the first element of :class:`Future` containing a sequence with all the elements of the original up to ``MaxAllowedSeqSize`` size (further elements are dropped). -.. includecode:: code/docs/stream/cookbook/RecipeToStrict.scala#draining-to-seq +.. includecode:: ../code/docs/stream/cookbook/RecipeToStrict.scala#draining-to-seq Calculating the digest of a ByteString stream --------------------------------------------- @@ -83,7 +83,7 @@ we can emit further elements ``onPull`` is called again, and we see ``ctx.isFini source has been depleted already). Since we only want to emit a final element it is enough to call ``ctx.pushAndFinish`` passing the digest ByteString to be emitted. -.. includecode:: code/docs/stream/cookbook/RecipeDigest.scala#calculating-digest +.. includecode:: ../code/docs/stream/cookbook/RecipeDigest.scala#calculating-digest .. _cookbook-parse-lines-scala: @@ -96,7 +96,7 @@ needs to be parsed. The :class:`Framing` helper object contains a convenience method to parse messages from a stream of ``ByteStrings``: -.. includecode:: code/docs/stream/cookbook/RecipeParseLines.scala#parse-lines +.. includecode:: ../code/docs/stream/cookbook/RecipeParseLines.scala#parse-lines Implementing reduce-by-key -------------------------- @@ -111,7 +111,7 @@ we have a stream of streams, where every substream will serve identical words. To count the words, we need to process the stream of streams (the actual groups containing identical words). ``groupBy`` returns a :class:`SubFlow`, which means that we transform the resulting substreams directly. In this case we use -the ``fold`` combinator to aggregate the word itself and the number of its +the ``reduce`` combinator to aggregate the word itself and the number of its occurrences within a tuple :class:`(String, Integer)`. Each substream will then emit one final value—precisely such a pair—when the overall input completes. As a last step we merge back these values from the substreams into one single @@ -126,17 +126,17 @@ any given time. If the ``groupBy`` operator encounters more keys than this number then the stream cannot continue without violating its resource bound, in this case ``groupBy`` will terminate with a failure. -.. includecode:: code/docs/stream/cookbook/RecipeReduceByKey.scala#word-count +.. includecode:: ../code/docs/stream/cookbook/RecipeReduceByKey.scala#word-count By extracting the parts specific to *wordcount* into * a ``groupKey`` function that defines the groups -* a ``foldZero`` that defines the zero element used by the fold on the substream given the group key -* a ``fold`` function that does the actual reduction +* a ``map`` map each element to value that is used by the reduce on the substream +* a ``reduce`` function that does the actual reduction we get a generalized version below: -.. includecode:: code/docs/stream/cookbook/RecipeReduceByKey.scala#reduce-by-key-general +.. includecode:: ../code/docs/stream/cookbook/RecipeReduceByKey.scala#reduce-by-key-general .. note:: Please note that the reduce-by-key version we discussed above is sequential @@ -157,7 +157,7 @@ To achieve the desired result, we attack the problem in two steps: * Then we take this new stream of message topic pairs (containing a separate pair for each topic a given message belongs to) and feed it into groupBy, using the topic as the group key. -.. includecode:: code/docs/stream/cookbook/RecipeMultiGroupBy.scala#multi-groupby +.. includecode:: ../code/docs/stream/cookbook/RecipeMultiGroupBy.scala#multi-groupby Working with Graphs =================== @@ -174,14 +174,14 @@ trigger signal arrives. This recipe solves the problem by simply zipping the stream of ``Message`` elments with the stream of ``Trigger`` signals. Since ``Zip`` produces pairs, we simply map the output stream selecting the first element of the pair. -.. includecode:: code/docs/stream/cookbook/RecipeManualTrigger.scala#manually-triggered-stream +.. includecode:: ../code/docs/stream/cookbook/RecipeManualTrigger.scala#manually-triggered-stream Alternatively, instead of using a ``Zip``, and then using ``map`` to get the first element of the pairs, we can avoid creating the pairs in the first place by using ``ZipWith`` which takes a two argument function to produce the output element. If this function would return a pair of the two argument it would be exactly the behavior of ``Zip`` so ``ZipWith`` is a generalization of zipping. -.. includecode:: code/docs/stream/cookbook/RecipeManualTrigger.scala#manually-triggered-stream-zipwith +.. includecode:: ../code/docs/stream/cookbook/RecipeManualTrigger.scala#manually-triggered-stream-zipwith .. _cookbook-balance-scala: @@ -199,7 +199,7 @@ The graph consists of a ``Balance`` node which is a special fan-out operation th downstream consumers. In a ``for`` loop we wire all of our desired workers as outputs of this balancer element, then we wire the outputs of these workers to a ``Merge`` element that will collect the results from the workers. -.. includecode:: code/docs/stream/cookbook/RecipeWorkerPool.scala#worker-pool +.. includecode:: ../code/docs/stream/cookbook/RecipeWorkerPool.scala#worker-pool Working with rate ================= @@ -223,7 +223,7 @@ case this is ``identity`` so our folding state starts form the message itself. T special: given the aggregate value (the last message) and the new element (the freshest element) our aggregate state becomes simply the freshest element. This choice of functions results in a simple dropping operation. -.. includecode:: code/docs/stream/cookbook/RecipeSimpleDrop.scala#simple-drop +.. includecode:: ../code/docs/stream/cookbook/RecipeSimpleDrop.scala#simple-drop Dropping broadcast ------------------ @@ -238,7 +238,7 @@ defining a dropping strategy instead of the default ``Backpressure``. This allow between the different consumers (the buffer smooths out small rate variances), but also allows faster consumers to progress by dropping from the buffer of the slow consumers if necessary. -.. includecode:: code/docs/stream/cookbook/RecipeDroppyBroadcast.scala#droppy-bcast +.. includecode:: ../code/docs/stream/cookbook/RecipeDroppyBroadcast.scala#droppy-bcast Collecting missed ticks ----------------------- @@ -258,7 +258,7 @@ We will use ``conflate`` to solve the problem. Conflate takes two functions: As a result, we have a flow of ``Int`` where the number represents the missed ticks. A number 0 means that we were able to consume the tick fast enough (i.e. zero means: 1 non-missed tick + 0 missed ticks) -.. includecode:: code/docs/stream/cookbook/RecipeMissedTicks.scala#missed-ticks +.. includecode:: ../code/docs/stream/cookbook/RecipeMissedTicks.scala#missed-ticks Create a stream processor that repeats the last element seen ------------------------------------------------------------ @@ -276,7 +276,7 @@ to feed the downstream if no upstream element is ready yet. In the ``onPush()`` of ``onPull()``). The downstream ``onPull`` handler is very similar, we immediately relieve the downstream by emitting ``currentValue``. -.. includecode:: code/docs/stream/cookbook/RecipeHold.scala#hold-version-1 +.. includecode:: ../code/docs/stream/cookbook/RecipeHold.scala#hold-version-1 While it is relatively simple, the drawback of the first version is that it needs an arbitrary initial element which is not always possible to provide. Hence, we create a second version where the downstream might need to wait in one single @@ -287,9 +287,9 @@ We introduce a boolean variable ``waitingFirstValue`` to denote whether the firs a null can be used with the same purpose). In the downstream ``onPull()`` handler the difference from the previous version is that we call ``holdDownstream()`` if the first element is not yet available and thus blocking our downstream. The upstream ``onPush()`` handler sets ``waitingFirstValue`` to false, and after checking if ``holdDownstream()`` has been called it -either releaves the upstream producer, or both the upstream producer and downstream consumer by calling ``pushAndPull()`` +either relieves the upstream producer, or both the upstream producer and downstream consumer by calling ``pushAndPull()`` -.. includecode:: code/docs/stream/cookbook/RecipeHold.scala#hold-version-2 +.. includecode:: ../code/docs/stream/cookbook/RecipeHold.scala#hold-version-2 Globally limiting the rate of a set of streams ---------------------------------------------- @@ -310,13 +310,13 @@ of the sender is added to a queue. Once the timer for replenishing the pending p message, we increment the pending permits counter and send a reply to each of the waiting senders. If there are more waiting senders than permits available we will stay in the ``closed`` state. -.. includecode:: code/docs/stream/cookbook/RecipeGlobalRateLimit.scala#global-limiter-actor +.. includecode:: ../code/docs/stream/cookbook/RecipeGlobalRateLimit.scala#global-limiter-actor To create a Flow that uses this global limiter actor we use the ``mapAsync`` function with the combination of the ``ask`` pattern. We also define a timeout, so if a reply is not received during the configured maximum wait period the returned future from ``ask`` will fail, which will fail the corresponding stream as well. -.. includecode:: code/docs/stream/cookbook/RecipeGlobalRateLimit.scala#global-limiter-flow +.. includecode:: ../code/docs/stream/cookbook/RecipeGlobalRateLimit.scala#global-limiter-flow .. note:: The global actor used for limiting introduces a global bottleneck. You might want to assign a dedicated dispatcher @@ -342,7 +342,7 @@ which implements the following logic: Both ``onPush()`` and ``onPull()`` calls ``emitChunkOrPull()`` the only difference is that the push handler also stores the incoming chunk by appending to the end of the buffer. -.. includecode:: code/docs/stream/cookbook/RecipeByteStrings.scala#bytestring-chunker +.. includecode:: ../code/docs/stream/cookbook/RecipeByteStrings.scala#bytestring-chunker Limit the number of bytes passing through a stream of ByteStrings ----------------------------------------------------------------- @@ -354,7 +354,7 @@ This recipe uses a :class:`PushStage` to implement the desired feature. In the o ``onPush()`` we just update a counter and see if it gets larger than ``maximumBytes``. If a violation happens we signal failure, otherwise we forward the chunk we have received. -.. includecode:: code/docs/stream/cookbook/RecipeByteStrings.scala#bytes-limiter +.. includecode:: ../code/docs/stream/cookbook/RecipeByteStrings.scala#bytes-limiter Compact ByteStrings in a stream of ByteStrings ---------------------------------------------- @@ -366,7 +366,7 @@ chain we want to have clean copies that are no longer referencing the original B The recipe is a simple use of map, calling the ``compact()`` method of the :class:`ByteString` elements. This does copying of the underlying arrays, so this should be the last element of a long chain if used. -.. includecode:: code/docs/stream/cookbook/RecipeByteStrings.scala#compacting-bytestrings +.. includecode:: ../code/docs/stream/cookbook/RecipeByteStrings.scala#compacting-bytestrings Injecting keep-alive messages into a stream of ByteStrings ---------------------------------------------------------- @@ -376,4 +376,4 @@ but only if this does not interfere with normal traffic. There is a built-in operation that allows to do this directly: -.. includecode:: code/docs/stream/cookbook/RecipeKeepAlive.scala#inject-keepalive +.. includecode:: ../code/docs/stream/cookbook/RecipeKeepAlive.scala#inject-keepalive diff --git a/akka-docs-dev/rst/scala/stream-customize.rst b/akka-docs/rst/scala/stream/stream-customize.rst similarity index 93% rename from akka-docs-dev/rst/scala/stream-customize.rst rename to akka-docs/rst/scala/stream/stream-customize.rst index 679c9f1ea2..9b349cb1dc 100644 --- a/akka-docs-dev/rst/scala/stream-customize.rst +++ b/akka-docs/rst/scala/stream/stream-customize.rst @@ -29,7 +29,7 @@ As a first motivating example, we will build a new :class:`Source` that will sim cancelled. To start, we need to define the "interface" of our stage, which is called *shape* in Akka Streams terminology (this is explained in more detail in the section :ref:`composition-scala`). This is how this looks like: -.. includecode:: code/docs/stream/GraphStageDocSpec.scala#boilerplate-example +.. includecode:: ../code/docs/stream/GraphStageDocSpec.scala#boilerplate-example As you see, in itself the :class:`GraphStage` only defines the ports of this stage and a shape that contains the ports. It also has, a currently unimplemented method called ``createLogic``. If you recall, stages are reusable in multiple @@ -46,7 +46,7 @@ override ``onPull()`` which indicates that we are free to emit a single element. to stop the stage, we don't need to override it. In the ``onPull`` callback we will simply emit the next number. This is how it looks like in the end: -.. includecode:: code/docs/stream/GraphStageDocSpec.scala#custom-source-example +.. includecode:: ../code/docs/stream/GraphStageDocSpec.scala#custom-source-example Instances of the above :class:`GraphStage` are subclasses of ``Graph[SourceShape[Int],Unit]`` which means that they are already usable in many situations, but do not provide the DSL methods we usually have for other @@ -54,7 +54,7 @@ that they are already usable in many situations, but do not provide the DSL meth ``Source.fromGraph`` (see :ref:`composition-scala` for more details about graphs and DSLs). Now we can use the source as any other built-in one: -.. includecode:: code/docs/stream/GraphStageDocSpec.scala#simple-source-usage +.. includecode:: ../code/docs/stream/GraphStageDocSpec.scala#simple-source-usage Port states, InHandler and OutHandler ------------------------------------- @@ -88,7 +88,7 @@ in that state. | -.. image:: ../images/outport_transitions.png +.. image:: ../../images/outport_transitions.png :align: center | @@ -103,7 +103,7 @@ The following operations are available for *input* ports: The events corresponding to an *input* port can be received in an :class:`InHandler` instance registered to the input port using ``setHandler(in, handler)``. This handler has three callbacks: -* ``onPush()`` is called when the output port has now a new element. Now it is possible to aquire this element using +* ``onPush()`` is called when the output port has now a new element. Now it is possible to acquire this element using ``grab(in)`` and/or call ``pull(in)`` on the port to request the next element. It is not mandatory to grab the element, but if it is pulled while the element has not been grabbed it will drop the buffered element. * ``onUpstreamFinish()`` is called once the upstream has completed and no longer can be pulled for new elements. @@ -124,7 +124,7 @@ in that state. | -.. image:: ../images/inport_transitions.png +.. image:: ../../images/inport_transitions.png :align: center | @@ -170,7 +170,7 @@ flowing downstream. | -.. image:: ../images/graph_stage_conceptual.png +.. image:: ../../images/graph_stage_conceptual.png :align: center :width: 500 @@ -181,7 +181,7 @@ To illustrate these concepts we create a small :class:`GraphStage` that implemen | -.. image:: ../images/graph_stage_map.png +.. image:: ../../images/graph_stage_map.png :align: center :width: 300 @@ -190,7 +190,7 @@ To illustrate these concepts we create a small :class:`GraphStage` that implemen Map calls ``push(out)`` from the ``onPush()`` handler and it also calls ``pull()`` from the ``onPull`` handler resulting in the conceptual wiring above, and fully expressed in code below: -.. includecode:: code/docs/stream/GraphStageDocSpec.scala#one-to-one +.. includecode:: ../code/docs/stream/GraphStageDocSpec.scala#one-to-one Map is a typical example of a one-to-one transformation of a stream where demand is passed along upstream elements passed on downstream. @@ -200,7 +200,7 @@ filter. The conceptual wiring of ``Filter`` looks like this: | -.. image:: ../images/graph_stage_filter.png +.. image:: ../../images/graph_stage_filter.png :align: center :width: 300 @@ -212,14 +212,14 @@ we return the “ball” to our upstream so that we get the new element. This is example by adding a conditional in the ``onPush`` handler and decide between a ``pull(in)`` or ``push(out)`` call (and of course not having a mapping ``f`` function). -.. includecode:: code/docs/stream/GraphStageDocSpec.scala#many-to-one +.. includecode:: ../code/docs/stream/GraphStageDocSpec.scala#many-to-one To complete the picture we define a one-to-many transformation as the next step. We chose a straightforward example stage that emits every upstream element twice downstream. The conceptual wiring of this stage looks like this: | -.. image:: ../images/graph_stage_duplicate.png +.. image:: ../../images/graph_stage_duplicate.png :align: center :width: 300 @@ -229,7 +229,7 @@ This is a stage that has state: an option with the last element it has seen indi has duplicated this last element already or not. We must also make sure to emit the extra element if the upstream completes. -.. includecode:: code/docs/stream/GraphStageDocSpec.scala#one-to-many +.. includecode:: ../code/docs/stream/GraphStageDocSpec.scala#one-to-many In this case a pull from downstream might be consumed by the stage itself rather than passed along upstream as the stage might contain an element it wants to @@ -242,7 +242,7 @@ This example can be simplified by replacing the usage of a mutable state with ca ``emitMultiple`` which will replace the handlers, emit each of multiple elements and then reinstate the original handlers: -.. includecode:: code/docs/stream/GraphStageDocSpec.scala#simpler-one-to-many +.. includecode:: ../code/docs/stream/GraphStageDocSpec.scala#simpler-one-to-many Finally, to demonstrate all of the stages above, we put them together into a processing chain, @@ -251,7 +251,7 @@ which conceptually would correspond to the following structure: | -.. image:: ../images/graph_stage_chain.png +.. image:: ../../images/graph_stage_chain.png :align: center :width: 700 @@ -259,7 +259,7 @@ which conceptually would correspond to the following structure: In code this is only a few lines, using the ``via`` use our custom stages in a stream: -.. includecode:: code/docs/stream/GraphStageDocSpec.scala#graph-stage-chain +.. includecode:: ../code/docs/stream/GraphStageDocSpec.scala#graph-stage-chain If we attempt to draw the sequence of events, it shows that there is one "event token" in circulation in a potential chain of stages, just like our conceptual "railroad tracks" representation predicts. @@ -267,7 +267,7 @@ in circulation in a potential chain of stages, just like our conceptual "railroa | -.. image:: ../images/graph_stage_tracks_1.png +.. image:: ../../images/graph_stage_tracks_1.png :align: center :width: 700 @@ -305,7 +305,7 @@ In this sample the stage toggles between open and closed, where open means no el stage starts out as closed but as soon as an element is pushed downstream the gate becomes open for a duration of time during which it will consume and drop upstream messages: -.. includecode:: code/docs/stream/GraphStageDocSpec.scala#timed +.. includecode:: ../code/docs/stream/GraphStageDocSpec.scala#timed Using asynchronous side-channels @@ -325,7 +325,7 @@ Sharing the AsyncCallback from the constructor risks race conditions, therefore This example shows an asynchronous side channel graph stage that starts dropping elements when a future completes: -.. includecode:: code/docs/stream/GraphStageDocSpec.scala#async-side-channel +.. includecode:: ../code/docs/stream/GraphStageDocSpec.scala#async-side-channel Integration with actors ----------------------- @@ -361,7 +361,7 @@ stage logic the materialized value must be provided In this sample the materialized value is a future containing the first element to go through the stream: -.. includecode:: code/docs/stream/GraphStageDocSpec.scala#materialized +.. includecode:: ../code/docs/stream/GraphStageDocSpec.scala#materialized Using attributes to affect the behavior of a stage @@ -400,7 +400,7 @@ is seen from downstream. | -.. image:: ../images/graph_stage_detached_tracks_1.png +.. image:: ../../images/graph_stage_detached_tracks_1.png :align: center :width: 500 @@ -412,7 +412,7 @@ into the buffer stage. | -.. image:: ../images/graph_stage_detached_tracks_2.png +.. image:: ../../images/graph_stage_detached_tracks_2.png :align: center :width: 500 @@ -424,7 +424,7 @@ initialization. The buffer has demand for up to two elements without any downstr The following code example demonstrates a buffer class corresponding to the message sequence chart above. -.. includecode:: code/docs/stream/GraphStageDocSpec.scala#detached +.. includecode:: ../code/docs/stream/GraphStageDocSpec.scala#detached Thread safety of custom processing stages diff --git a/akka-docs-dev/rst/scala/stream-error.rst b/akka-docs/rst/scala/stream/stream-error.rst similarity index 82% rename from akka-docs-dev/rst/scala/stream-error.rst rename to akka-docs/rst/scala/stream/stream-error.rst index 760e1cb868..6333d447dc 100644 --- a/akka-docs-dev/rst/scala/stream-error.rst +++ b/akka-docs/rst/scala/stream/stream-error.rst @@ -28,11 +28,11 @@ There are three ways to handle exceptions from application code: By default the stopping strategy is used for all exceptions, i.e. the stream will be completed with failure when an exception is thrown. -.. includecode:: code/docs/stream/FlowErrorDocSpec.scala#stop +.. includecode:: ../code/docs/stream/FlowErrorDocSpec.scala#stop The default supervision strategy for a stream can be defined on the settings of the materializer. -.. includecode:: code/docs/stream/FlowErrorDocSpec.scala#resume +.. includecode:: ../code/docs/stream/FlowErrorDocSpec.scala#resume Here you can see that all ``ArithmeticException`` will resume the processing, i.e. the elements that cause the division by zero are effectively dropped. @@ -44,12 +44,12 @@ elements that cause the division by zero are effectively dropped. The supervision strategy can also be defined for all operators of a flow. -.. includecode:: code/docs/stream/FlowErrorDocSpec.scala#resume-section +.. includecode:: ../code/docs/stream/FlowErrorDocSpec.scala#resume-section ``Restart`` works in a similar way as ``Resume`` with the addition that accumulated state, if any, of the failing processing stage will be reset. -.. includecode:: code/docs/stream/FlowErrorDocSpec.scala#restart-section +.. includecode:: ../code/docs/stream/FlowErrorDocSpec.scala#restart-section Errors from mapAsync ==================== @@ -61,11 +61,11 @@ discard those that cannot be found. We start with the tweet stream of authors: -.. includecode:: code/docs/stream/IntegrationDocSpec.scala#tweet-authors +.. includecode:: ../code/docs/stream/IntegrationDocSpec.scala#tweet-authors Assume that we can lookup their email address using: -.. includecode:: code/docs/stream/IntegrationDocSpec.scala#email-address-lookup2 +.. includecode:: ../code/docs/stream/IntegrationDocSpec.scala#email-address-lookup2 The ``Future`` is completed with ``Failure`` if the email is not found. @@ -73,7 +73,7 @@ Transforming the stream of authors to a stream of email addresses by using the ` service can be done with ``mapAsync`` and we use ``Supervision.resumingDecider`` to drop unknown email addresses: -.. includecode:: code/docs/stream/IntegrationDocSpec.scala#email-addresses-mapAsync-supervision +.. includecode:: ../code/docs/stream/IntegrationDocSpec.scala#email-addresses-mapAsync-supervision If we would not use ``Resume`` the default stopping strategy would complete the stream with failure on the first ``Future`` that was completed with ``Failure``. diff --git a/akka-docs-dev/rst/scala/stream-flows-and-basics.rst b/akka-docs/rst/scala/stream/stream-flows-and-basics.rst similarity index 96% rename from akka-docs-dev/rst/scala/stream-flows-and-basics.rst rename to akka-docs/rst/scala/stream/stream-flows-and-basics.rst index 285618b5c3..c9c6864b8c 100644 --- a/akka-docs-dev/rst/scala/stream-flows-and-basics.rst +++ b/akka-docs/rst/scala/stream/stream-flows-and-basics.rst @@ -76,7 +76,7 @@ starting up Actors). Thanks to Flows being simply a description of the processin thread-safe, and freely shareable*, which means that it is for example safe to share and send them between actors, to have one actor prepare the work, and then have it be materialized at some completely different place in the code. -.. includecode:: code/docs/stream/FlowDocSpec.scala#materialization-in-steps +.. includecode:: ../code/docs/stream/FlowDocSpec.scala#materialization-in-steps After running (materializing) the ``RunnableGraph[T]`` we get back the materialized value of type T. Every stream processing stage can produce a materialized value, and it is the responsibility of the user to combine them to a new type. @@ -90,12 +90,12 @@ there is a convenience method called ``runWith()`` available for ``Sink``, ``Sou a supplied ``Source`` (in order to run a ``Sink``), a ``Sink`` (in order to run a ``Source``) or both a ``Source`` and a ``Sink`` (in order to run a ``Flow``, since it has neither attached yet). -.. includecode:: code/docs/stream/FlowDocSpec.scala#materialization-runWith +.. includecode:: ../code/docs/stream/FlowDocSpec.scala#materialization-runWith It is worth pointing out that since processing stages are *immutable*, connecting them returns a new processing stage, instead of modifying the existing instance, so while constructing long flows, remember to assign the new value to a variable or run it: -.. includecode:: code/docs/stream/FlowDocSpec.scala#source-immutable +.. includecode:: ../code/docs/stream/FlowDocSpec.scala#source-immutable .. note:: By default Akka Streams elements support **exactly one** downstream processing stage. @@ -112,7 +112,7 @@ In the example below we create two running materialized instance of the stream t variable, and both materializations give us a different ``Future`` from the map even though we used the same ``sink`` to refer to the future: -.. includecode:: code/docs/stream/FlowDocSpec.scala#stream-reuse +.. includecode:: ../code/docs/stream/FlowDocSpec.scala#stream-reuse Defining sources, sinks and flows --------------------------------- @@ -120,11 +120,11 @@ Defining sources, sinks and flows The objects :class:`Source` and :class:`Sink` define various ways to create sources and sinks of elements. The following examples show some of the most useful constructs (refer to the API documentation for more details): -.. includecode:: code/docs/stream/FlowDocSpec.scala#source-sink +.. includecode:: ../code/docs/stream/FlowDocSpec.scala#source-sink There are various ways to wire up different parts of a stream, the following examples show some of the available options: -.. includecode:: code/docs/stream/FlowDocSpec.scala#flow-connecting +.. includecode:: ../code/docs/stream/FlowDocSpec.scala#flow-connecting Illegal stream elements ----------------------- @@ -242,13 +242,13 @@ consequences: The first point can be countered by pre-fusing and then reusing a stream blueprint as sketched below: -.. includecode:: code/docs/stream/FlowDocSpec.scala#explicit-fusing +.. includecode:: ../code/docs/stream/FlowDocSpec.scala#explicit-fusing In order to balance the effects of the second and third bullet points you will have to insert asynchronous boundaries manually into your flows and graphs by way of adding ``Attributes.asyncBoundary`` to pieces that shall communicate with the rest of the graph in an asynchronous fashion. -.. includecode:: code/docs/stream/FlowDocSpec.scala#flow-async +.. includecode:: ../code/docs/stream/FlowDocSpec.scala#flow-async In this example we create two regions within the flow which will be executed in one Actor each—assuming that adding and multiplying integers is an extremely costly operation this will lead to a performance gain since two CPUs can @@ -258,7 +258,7 @@ by adding information to the flow graph that has been constructed up to this poi | -.. image:: ../images/asyncBoundary.png +.. image:: ../../images/asyncBoundary.png :align: center :width: 700 @@ -291,7 +291,7 @@ to somehow express how these values should be composed to a final value when we many combinator methods have variants that take an additional argument, a function, that will be used to combine the resulting values. Some examples of using these combiners are illustrated in the example below. -.. includecode:: code/docs/stream/FlowDocSpec.scala#flow-mat-combine +.. includecode:: ../code/docs/stream/FlowDocSpec.scala#flow-mat-combine .. note:: diff --git a/akka-docs-dev/rst/scala/stream-graphs.rst b/akka-docs/rst/scala/stream/stream-graphs.rst similarity index 91% rename from akka-docs-dev/rst/scala/stream-graphs.rst rename to akka-docs/rst/scala/stream/stream-graphs.rst index 4f6f77e273..314298904b 100644 --- a/akka-docs-dev/rst/scala/stream-graphs.rst +++ b/akka-docs/rst/scala/stream/stream-graphs.rst @@ -45,14 +45,14 @@ One of the goals of the GraphDSL DSL is to look similar to how one would draw a simple to translate a design from whiteboard to code and be able to relate those two. Let's illustrate this by translating the below hand drawn graph into Akka Streams: -.. image:: ../images/simple-graph-example.png +.. image:: ../../images/simple-graph-example.png Such graph is simple to translate to the Graph DSL since each linear element corresponds to a :class:`Flow`, and each circle corresponds to either a :class:`Junction` or a :class:`Source` or :class:`Sink` if it is beginning or ending a :class:`Flow`. Junctions must always be created with defined type parameters, as otherwise the ``Nothing`` type will be inferred. -.. includecode:: code/docs/stream/FlowGraphDocSpec.scala#simple-flow-graph +.. includecode:: ../code/docs/stream/FlowGraphDocSpec.scala#simple-flow-graph .. note:: Junction *reference equality* defines *graph node equality* (i.e. the same merge *instance* used in a GraphDSL @@ -80,7 +80,7 @@ In the example below we prepare a graph that consists of two parallel streams, in which we re-use the same instance of :class:`Flow`, yet it will properly be materialized as two connections between the corresponding Sources and Sinks: -.. includecode:: code/docs/stream/FlowGraphDocSpec.scala#flow-graph-reusing-a-flow +.. includecode:: ../code/docs/stream/FlowGraphDocSpec.scala#flow-graph-reusing-a-flow .. _partial-flow-graph-scala: @@ -103,7 +103,7 @@ Let's imagine we want to provide users with a specialized element that given 3 i the greatest int value of each zipped triple. We'll want to expose 3 input ports (unconnected sources) and one output port (unconnected sink). -.. includecode:: code/docs/stream/StreamPartialFlowGraphDocSpec.scala#simple-partial-flow-graph +.. includecode:: ../code/docs/stream/StreamPartialFlowGraphDocSpec.scala#simple-partial-flow-graph As you can see, first we construct the partial graph that contains all the zipping and comparing of stream elements. This partial graph will have three inputs and one output, wherefore we use the :class:`UniformFanInShape`. @@ -143,12 +143,12 @@ from the function passed in . The single outlet must be provided to the ``Source Refer to the example below, in which we create a Source that zips together two numbers, to see this graph construction in action: -.. includecode:: code/docs/stream/StreamPartialFlowGraphDocSpec.scala#source-from-partial-flow-graph +.. includecode:: ../code/docs/stream/StreamPartialFlowGraphDocSpec.scala#source-from-partial-flow-graph Similarly the same can be done for a ``Sink[T]``, using ``SinkShape.of`` in which case the provided value must be an ``Inlet[T]``. For defining a ``Flow[T]`` we need to expose both an inlet and an outlet: -.. includecode:: code/docs/stream/StreamPartialFlowGraphDocSpec.scala#flow-from-partial-flow-graph +.. includecode:: ../code/docs/stream/StreamPartialFlowGraphDocSpec.scala#flow-from-partial-flow-graph Combining Sources and Sinks with simplified API ----------------------------------------------- @@ -157,11 +157,11 @@ There is a simplified API you can use to combine sources and sinks with junction ``Merge[In]`` and ``Concat[A]`` without the need for using the Graph DSL. The combine method takes care of constructing the necessary graph underneath. In following example we combine two sources into one (fan-in): -.. includecode:: code/docs/stream/StreamPartialFlowGraphDocSpec.scala#source-combine +.. includecode:: ../code/docs/stream/StreamPartialFlowGraphDocSpec.scala#source-combine The same can be done for a ``Sink[T]`` but in this case it will be fan-out: -.. includecode:: code/docs/stream/StreamPartialFlowGraphDocSpec.scala#sink-combine +.. includecode:: ../code/docs/stream/StreamPartialFlowGraphDocSpec.scala#sink-combine Building reusable Graph components ---------------------------------- @@ -178,7 +178,7 @@ where jobs of higher priority can be sent. Altogether, our junction will have two input ports of type ``I`` (for the normal and priority jobs) and an output port of type ``O``. To represent this interface, we need to define a custom :class:`Shape`. The following lines show how to do that. -.. includecode:: code/docs/stream/FlowGraphDocSpec.scala#flow-graph-components-shape +.. includecode:: ../code/docs/stream/FlowGraphDocSpec.scala#flow-graph-components-shape .. _predefined-shapes: @@ -198,20 +198,20 @@ boilerplate: Since our shape has two input ports and one output port, we can just use the :class:`FanInShape` DSL to define our custom shape: -.. includecode:: code/docs/stream/FlowGraphDocSpec.scala#flow-graph-components-shape2 +.. includecode:: ../code/docs/stream/FlowGraphDocSpec.scala#flow-graph-components-shape2 Now that we have a :class:`Shape` we can wire up a Graph that represents our worker pool. First, we will merge incoming normal and priority jobs using ``MergePreferred``, then we will send the jobs to a ``Balance`` junction which will fan-out to a configurable number of workers (flows), finally we merge all these results together and send them out through our only output port. This is expressed by the following code: -.. includecode:: code/docs/stream/FlowGraphDocSpec.scala#flow-graph-components-create +.. includecode:: ../code/docs/stream/FlowGraphDocSpec.scala#flow-graph-components-create All we need to do now is to use our custom junction in a graph. The following code simulates some simple workers and jobs using plain strings and prints out the results. Actually we used *two* instances of our worker pool junction using ``add()`` twice. -.. includecode:: code/docs/stream/FlowGraphDocSpec.scala#flow-graph-components-use +.. includecode:: ../code/docs/stream/FlowGraphDocSpec.scala#flow-graph-components-use .. _bidi-flow-scala: @@ -228,14 +228,14 @@ this purpose exists the special type :class:`BidiFlow` which is a graph that has exactly two open inlets and two open outlets. The corresponding shape is called :class:`BidiShape` and is defined like this: -.. includecode:: ../../../akka-stream/src/main/scala/akka/stream/Shape.scala +.. includecode:: ../../../../akka-stream/src/main/scala/akka/stream/Shape.scala :include: bidi-shape :exclude: implementation-details-elided A bidirectional flow is defined just like a unidirectional :class:`Flow` as demonstrated for the codec mentioned above: -.. includecode:: code/docs/stream/BidiFlowDocSpec.scala +.. includecode:: ../code/docs/stream/BidiFlowDocSpec.scala :include: codec :exclude: implementation-details-elided @@ -244,7 +244,7 @@ case of a functional 1:1 transformation there is a concise convenience method as shown on the last line. The implementation of the two functions is not difficult either: -.. includecode:: code/docs/stream/BidiFlowDocSpec.scala#codec-impl +.. includecode:: ../code/docs/stream/BidiFlowDocSpec.scala#codec-impl In this way you could easily integrate any other serialization library that turns an object into a sequence of bytes. @@ -254,11 +254,11 @@ a framing protocol means that any received chunk of bytes may correspond to zero or more messages. This is best implemented using a :class:`GraphStage` (see also :ref:`graphstage-scala`). -.. includecode:: code/docs/stream/BidiFlowDocSpec.scala#framing +.. includecode:: ../code/docs/stream/BidiFlowDocSpec.scala#framing With these implementations we can build a protocol stack and test it: -.. includecode:: code/docs/stream/BidiFlowDocSpec.scala#compose +.. includecode:: ../code/docs/stream/BidiFlowDocSpec.scala#compose This example demonstrates how :class:`BidiFlow` subgraphs can be hooked together and also turned around with the ``.reversed`` method. The test @@ -276,12 +276,12 @@ can be used in the graph as an ordinary source or outlet, and which will eventua If the materialized value is needed at more than one place, it is possible to call ``materializedValue`` any number of times to acquire the necessary number of outlets. -.. includecode:: code/docs/stream/FlowGraphDocSpec.scala#flow-graph-matvalue +.. includecode:: ../code/docs/stream/FlowGraphDocSpec.scala#flow-graph-matvalue Be careful not to introduce a cycle where the materialized value actually contributes to the materialized value. The following example demonstrates a case where the materialized ``Future`` of a fold is fed back to the fold itself. -.. includecode:: code/docs/stream/FlowGraphDocSpec.scala#flow-graph-matvalue-cycle +.. includecode:: ../code/docs/stream/FlowGraphDocSpec.scala#flow-graph-matvalue-cycle .. _graph-cycles-scala: @@ -302,7 +302,7 @@ a ``Merge`` junction. The graph DSL allows the connection arrows to be reversed, which is particularly handy when writing cycles—as we will see there are cases where this is very helpful. -.. includecode:: code/docs/stream/GraphCyclesSpec.scala#deadlocked +.. includecode:: ../code/docs/stream/GraphCyclesSpec.scala#deadlocked Running this we observe that after a few numbers have been printed, no more elements are logged to the console - all processing stops after some time. After some investigation we observe that: @@ -320,7 +320,7 @@ If we modify our feedback loop by replacing the ``Merge`` junction with a ``Merg before trying the other lower priority input ports. Since we feed back through the preferred port it is always guaranteed that the elements in the cycles can flow. -.. includecode:: code/docs/stream/GraphCyclesSpec.scala#unfair +.. includecode:: ../code/docs/stream/GraphCyclesSpec.scala#unfair If we run the example we see that the same sequence of numbers are printed over and over again, but the processing does not stop. Hence, we avoided the deadlock, but ``source`` is still @@ -335,7 +335,7 @@ of initial elements from ``source``. To make our cycle both live (not deadlocking) and fair we can introduce a dropping element on the feedback arc. In this case we chose the ``buffer()`` operation giving it a dropping strategy ``OverflowStrategy.dropHead``. -.. includecode:: code/docs/stream/GraphCyclesSpec.scala#dropping +.. includecode:: ../code/docs/stream/GraphCyclesSpec.scala#dropping If we run this example we see that @@ -354,7 +354,7 @@ the beginning instead. To achieve this we modify our first graph by replacing th Since ``ZipWith`` takes one element from ``source`` *and* from the feedback arc to inject one element into the cycle, we maintain the balance of elements. -.. includecode:: code/docs/stream/GraphCyclesSpec.scala#zipping-dead +.. includecode:: ../code/docs/stream/GraphCyclesSpec.scala#zipping-dead Still, when we try to run the example it turns out that no element is printed at all! After some investigation we realize that: @@ -366,7 +366,7 @@ These two conditions are a typical "chicken-and-egg" problem. The solution is to element into the cycle that is independent from ``source``. We do this by using a ``Concat`` junction on the backwards arc that injects a single element using ``Source.single``. -.. includecode:: code/docs/stream/GraphCyclesSpec.scala#zipping-live +.. includecode:: ../code/docs/stream/GraphCyclesSpec.scala#zipping-live When we run the above example we see that processing starts and never stops. The important takeaway from this example is that balanced cycles often need an initial "kick-off" element to be injected into the cycle. diff --git a/akka-docs-dev/rst/scala/stream-integrations.rst b/akka-docs/rst/scala/stream/stream-integrations.rst similarity index 86% rename from akka-docs-dev/rst/scala/stream-integrations.rst rename to akka-docs/rst/scala/stream/stream-integrations.rst index ba63fcd1f4..b710ccda17 100644 --- a/akka-docs-dev/rst/scala/stream-integrations.rst +++ b/akka-docs/rst/scala/stream/stream-integrations.rst @@ -68,7 +68,7 @@ stream publisher that keeps track of the subscription life cycle and requested e Here is an example of such an actor. It dispatches incoming jobs to the attached subscriber: -.. includecode:: code/docs/stream/ActorPublisherDocSpec.scala#job-manager +.. includecode:: ../code/docs/stream/ActorPublisherDocSpec.scala#job-manager You send elements to the stream by calling ``onNext``. You are allowed to send as many elements as have been requested by the stream subscriber. This amount can be inquired with @@ -100,7 +100,7 @@ More detailed information can be found in the API documentation. This is how it can be used as input :class:`Source` to a :class:`Flow`: -.. includecode:: code/docs/stream/ActorPublisherDocSpec.scala#actor-publisher-usage +.. includecode:: ../code/docs/stream/ActorPublisherDocSpec.scala#actor-publisher-usage A publisher that is created with ``Sink.asPublisher`` supports a specified number of subscribers. Additional subscription attempts will be rejected with an :class:`IllegalStateException`. @@ -115,7 +115,7 @@ messages from the stream. It can also receive other, non-stream messages, in the Here is an example of such an actor. It dispatches incoming jobs to child worker actors: -.. includecode:: code/docs/stream/ActorSubscriberDocSpec.scala#worker-pool +.. includecode:: ../code/docs/stream/ActorSubscriberDocSpec.scala#worker-pool Subclass must define the ``RequestStrategy`` to control stream back pressure. After each incoming message the ``ActorSubscriber`` will automatically invoke @@ -133,7 +133,7 @@ More detailed information can be found in the API documentation. This is how it can be used as output :class:`Sink` to a :class:`Flow`: -.. includecode:: code/docs/stream/ActorSubscriberDocSpec.scala#actor-subscriber-usage +.. includecode:: ../code/docs/stream/ActorSubscriberDocSpec.scala#actor-subscriber-usage Integrating with External Services ================================== @@ -144,24 +144,24 @@ performed with ``mapAsync`` or ``mapAsyncUnordered``. For example, sending emails to the authors of selected tweets using an external email service: -.. includecode:: code/docs/stream/IntegrationDocSpec.scala#email-server-send +.. includecode:: ../code/docs/stream/IntegrationDocSpec.scala#email-server-send We start with the tweet stream of authors: -.. includecode:: code/docs/stream/IntegrationDocSpec.scala#tweet-authors +.. includecode:: ../code/docs/stream/IntegrationDocSpec.scala#tweet-authors Assume that we can lookup their email address using: -.. includecode:: code/docs/stream/IntegrationDocSpec.scala#email-address-lookup +.. includecode:: ../code/docs/stream/IntegrationDocSpec.scala#email-address-lookup Transforming the stream of authors to a stream of email addresses by using the ``lookupEmail`` service can be done with ``mapAsync``: -.. includecode:: code/docs/stream/IntegrationDocSpec.scala#email-addresses-mapAsync +.. includecode:: ../code/docs/stream/IntegrationDocSpec.scala#email-addresses-mapAsync Finally, sending the emails: -.. includecode:: code/docs/stream/IntegrationDocSpec.scala#send-emails +.. includecode:: ../code/docs/stream/IntegrationDocSpec.scala#send-emails ``mapAsync`` is applying the given function that is calling out to the external service to each of the elements as they pass through this processing step. The function returns a :class:`Future` @@ -183,23 +183,23 @@ result stream onwards for further processing or storage. Note that ``mapAsync`` preserves the order of the stream elements. In this example the order is not important and then we can use the more efficient ``mapAsyncUnordered``: -.. includecode:: code/docs/stream/IntegrationDocSpec.scala#external-service-mapAsyncUnordered +.. includecode:: ../code/docs/stream/IntegrationDocSpec.scala#external-service-mapAsyncUnordered In the above example the services conveniently returned a :class:`Future` of the result. If that is not the case you need to wrap the call in a :class:`Future`. If the service call involves blocking you must also make sure that you run it on a dedicated execution context, to avoid starvation and disturbance of other tasks in the system. -.. includecode:: code/docs/stream/IntegrationDocSpec.scala#blocking-mapAsync +.. includecode:: ../code/docs/stream/IntegrationDocSpec.scala#blocking-mapAsync The configuration of the ``"blocking-dispatcher"`` may look something like: -.. includecode:: code/docs/stream/IntegrationDocSpec.scala#blocking-dispatcher-config +.. includecode:: ../code/docs/stream/IntegrationDocSpec.scala#blocking-dispatcher-config An alternative for blocking calls is to perform them in a ``map`` operation, still using a dedicated dispatcher for that operation. -.. includecode:: code/docs/stream/IntegrationDocSpec.scala#blocking-map +.. includecode:: ../code/docs/stream/IntegrationDocSpec.scala#blocking-map However, that is not exactly the same as ``mapAsync``, since the ``mapAsync`` may run several calls concurrently, but ``map`` performs them one at a time. @@ -207,7 +207,7 @@ several calls concurrently, but ``map`` performs them one at a time. For a service that is exposed as an actor, or if an actor is used as a gateway in front of an external service, you can use ``ask``: -.. includecode:: code/docs/stream/IntegrationDocSpec.scala#save-tweets +.. includecode:: ../code/docs/stream/IntegrationDocSpec.scala#save-tweets Note that if the ``ask`` is not completed within the given timeout the stream is completed with failure. If that is not desired outcome you can use ``recover`` on the ``ask`` :class:`Future`. @@ -236,14 +236,14 @@ successive calls as long as there is downstream demand of several elements. Here is a fictive service that we can use to illustrate these aspects. -.. includecode:: code/docs/stream/IntegrationDocSpec.scala#sometimes-slow-service +.. includecode:: ../code/docs/stream/IntegrationDocSpec.scala#sometimes-slow-service Elements starting with a lower case character are simulated to take longer time to process. Here is how we can use it with ``mapAsync``: -.. includecode:: code/docs/stream/IntegrationDocSpec.scala#sometimes-slow-mapAsync +.. includecode:: ../code/docs/stream/IntegrationDocSpec.scala#sometimes-slow-mapAsync The output may look like this: @@ -300,7 +300,7 @@ calls are limited by the buffer size (4) of the :class:`ActorMaterializerSetting Here is how we can use the same service with ``mapAsyncUnordered``: -.. includecode:: code/docs/stream/IntegrationDocSpec.scala#sometimes-slow-mapAsyncUnordered +.. includecode:: ../code/docs/stream/IntegrationDocSpec.scala#sometimes-slow-mapAsyncUnordered The output may look like this: @@ -378,19 +378,19 @@ An incomplete list of other implementations: The two most important interfaces in Reactive Streams are the :class:`Publisher` and :class:`Subscriber`. -.. includecode:: code/docs/stream/ReactiveStreamsDocSpec.scala#imports +.. includecode:: ../code/docs/stream/ReactiveStreamsDocSpec.scala#imports Let us assume that a library provides a publisher of tweets: -.. includecode:: code/docs/stream/ReactiveStreamsDocSpec.scala#tweets-publisher +.. includecode:: ../code/docs/stream/ReactiveStreamsDocSpec.scala#tweets-publisher and another library knows how to store author handles in a database: -.. includecode:: code/docs/stream/ReactiveStreamsDocSpec.scala#author-storage-subscriber +.. includecode:: ../code/docs/stream/ReactiveStreamsDocSpec.scala#author-storage-subscriber Using an Akka Streams :class:`Flow` we can transform the stream and connect those: -.. includecode:: code/docs/stream/ReactiveStreamsDocSpec.scala +.. includecode:: ../code/docs/stream/ReactiveStreamsDocSpec.scala :include: authors,connect-all The :class:`Publisher` is used as an input :class:`Source` to the flow and the @@ -400,24 +400,24 @@ A :class:`Flow` can also be also converted to a :class:`RunnableGraph[Processor[ materializes to a :class:`Processor` when ``run()`` is called. ``run()`` itself can be called multiple times, resulting in a new :class:`Processor` instance each time. -.. includecode:: code/docs/stream/ReactiveStreamsDocSpec.scala#flow-publisher-subscriber +.. includecode:: ../code/docs/stream/ReactiveStreamsDocSpec.scala#flow-publisher-subscriber A publisher can be connected to a subscriber with the ``subscribe`` method. It is also possible to expose a :class:`Source` as a :class:`Publisher` by using the Publisher-:class:`Sink`: -.. includecode:: code/docs/stream/ReactiveStreamsDocSpec.scala#source-publisher +.. includecode:: ../code/docs/stream/ReactiveStreamsDocSpec.scala#source-publisher -A publisher that is created with ``Sink.asPublisher(false)`` supports only a single subscription. +A publisher that is created with ``Sink.asPublisher(fanout = false)`` supports only a single subscription. Additional subscription attempts will be rejected with an :class:`IllegalStateException`. A publisher that supports multiple subscribers using fan-out/broadcasting is created as follows: -.. includecode:: code/docs/stream/ReactiveStreamsDocSpec.scala +.. includecode:: ../code/docs/stream/ReactiveStreamsDocSpec.scala :include: author-alert-subscriber,author-storage-subscriber -.. includecode:: code/docs/stream/ReactiveStreamsDocSpec.scala#source-fanoutPublisher +.. includecode:: ../code/docs/stream/ReactiveStreamsDocSpec.scala#source-fanoutPublisher The input buffer size of the stage controls how far apart the slowest subscriber can be from the fastest subscriber before slowing down the stream. @@ -425,11 +425,11 @@ before slowing down the stream. To make the picture complete, it is also possible to expose a :class:`Sink` as a :class:`Subscriber` by using the Subscriber-:class:`Source`: -.. includecode:: code/docs/stream/ReactiveStreamsDocSpec.scala#sink-subscriber +.. includecode:: ../code/docs/stream/ReactiveStreamsDocSpec.scala#sink-subscriber It is also possible to use re-wrap :class:`Processor` instances as a :class:`Flow` by passing a factory function that will create the :class:`Processor` instances: -.. includecode:: code/docs/stream/ReactiveStreamsDocSpec.scala#use-processor +.. includecode:: ../code/docs/stream/ReactiveStreamsDocSpec.scala#use-processor Please note that a factory is necessary to achieve reusability of the resulting :class:`Flow`. diff --git a/akka-docs-dev/rst/scala/stream-introduction.rst b/akka-docs/rst/scala/stream/stream-introduction.rst similarity index 100% rename from akka-docs-dev/rst/scala/stream-introduction.rst rename to akka-docs/rst/scala/stream/stream-introduction.rst diff --git a/akka-docs-dev/rst/scala/stream-io.rst b/akka-docs/rst/scala/stream/stream-io.rst similarity index 93% rename from akka-docs-dev/rst/scala/stream-io.rst rename to akka-docs/rst/scala/stream/stream-io.rst index 5b8352a8b4..bb59479a04 100644 --- a/akka-docs-dev/rst/scala/stream-io.rst +++ b/akka-docs/rst/scala/stream/stream-io.rst @@ -19,7 +19,7 @@ Accepting connections: Echo Server In order to implement a simple EchoServer we ``bind`` to a given address, which returns a ``Source[IncomingConnection, Future[ServerBinding]]``, which will emit an :class:`IncomingConnection` element for each new connection that the Server should handle: -.. includecode:: code/docs/stream/io/StreamTcpDocSpec.scala#echo-server-simple-bind +.. includecode:: ../code/docs/stream/io/StreamTcpDocSpec.scala#echo-server-simple-bind Next, we simply handle *each* incoming connection using a :class:`Flow` which will be used as the processing stage to handle and emit ByteStrings from and to the TCP Socket. Since one :class:`ByteString` does not have to necessarily @@ -28,7 +28,7 @@ helper Flow to chunk the inputs up into actual lines of text. The last boolean argument indicates that we require an explicit line ending even for the last message before the connection is closed. In this example we simply add exclamation marks to each incoming text message and push it through the flow: -.. includecode:: code/docs/stream/io/StreamTcpDocSpec.scala#echo-server-simple-handle +.. includecode:: ../code/docs/stream/io/StreamTcpDocSpec.scala#echo-server-simple-handle Notice that while most building blocks in Akka Streams are reusable and freely shareable, this is *not* the case for the incoming connection Flow, since it directly corresponds to an existing, already accepted connection its handling can @@ -52,7 +52,7 @@ Let's say we know a server has exposed a simple command line interface over TCP, and would like to interact with it using Akka Streams over TCP. To open an outgoing connection socket we use the ``outgoingConnection`` method: -.. includecode:: code/docs/stream/io/StreamTcpDocSpec.scala#repl-client +ąio/StreamTcpDocSpec.scala#repl-client The ``repl`` flow we use to handle the server interaction first prints the servers response, then awaits on input from the command line (this blocking call is used here just for the sake of simplicity) and converts it to a @@ -84,7 +84,7 @@ Thankfully in most situations finding the right spot to start the conversation i to the protocol we are trying to implement using Streams. In chat-like applications, which our examples resemble, it makes sense to make the Server initiate the conversation by emitting a "hello" message: -.. includecode:: code/docs/stream/io/StreamTcpDocSpec.scala#welcome-banner-chat-server +.. includecode:: ../code/docs/stream/io/StreamTcpDocSpec.scala#welcome-banner-chat-server The way we constructed a :class:`Flow` using the :class:`GraphDSL` is explained in detail in :ref:`constructing-sources-sinks-flows-from-partial-graphs-scala`, however the basic concepts is rather simple– @@ -113,7 +113,7 @@ on files. Streaming data from a file is as easy as creating a `FileIO.fromFile` given a target file, and an optional ``chunkSize`` which determines the buffer size determined as one "element" in such stream: -.. includecode:: code/docs/stream/io/StreamFileDocSpec.scala#file-source +.. includecode:: ../code/docs/stream/io/StreamFileDocSpec.scala#file-source Please note that these processing stages are backed by Actors and by default are configured to run on a pre-configured threadpool-backed dispatcher dedicated for File IO. This is very important as it isolates the blocking file IO operations from the rest @@ -121,4 +121,4 @@ of the ActorSystem allowing each dispatcher to be utilised in the most efficient dispatcher for file IO operations globally, you can do so by changing the ``akka.stream.blocking-io-dispatcher``, or for a specific stage by specifying a custom Dispatcher in code, like this: -.. includecode:: code/docs/stream/io/StreamFileDocSpec.scala#custom-dispatcher-code +.. includecode:: ../code/docs/stream/io/StreamFileDocSpec.scala#custom-dispatcher-code diff --git a/akka-docs-dev/rst/scala/stream-parallelism.rst b/akka-docs/rst/scala/stream/stream-parallelism.rst similarity index 94% rename from akka-docs-dev/rst/scala/stream-parallelism.rst rename to akka-docs/rst/scala/stream/stream-parallelism.rst index 84969dbec3..c48de591f7 100644 --- a/akka-docs-dev/rst/scala/stream-parallelism.rst +++ b/akka-docs/rst/scala/stream/stream-parallelism.rst @@ -23,7 +23,7 @@ are two pancakes being cooked at the same time, one being cooked on its first si completion. This is how this setup would look like implemented as a stream: -.. includecode:: code/docs/stream/FlowParallelismDocSpec.scala#pipelining +.. includecode:: ../code/docs/stream/FlowParallelismDocSpec.scala#pipelining The two ``map`` stages in sequence (encapsulated in the "frying pan" flows) will be executed in a pipelined way, basically doing the same as Roland with his frying pans: @@ -51,7 +51,7 @@ the results on a shared plate. Whenever a pan becomes empty, he takes the next s In essence he parallelizes the same process over multiple pans. This is how this setup will look like if implemented using streams: -.. includecode:: code/docs/stream/FlowParallelismDocSpec.scala#parallelism +.. includecode:: ../code/docs/stream/FlowParallelismDocSpec.scala#parallelism The benefit of parallelizing is that it is easy to scale. In the pancake example it is easy to add a third frying pan with Patrik's method, but Roland cannot add a third frying pan, @@ -74,7 +74,7 @@ First, let's look at how we can parallelize pipelined processing stages. In the will employ two chefs, each working using Roland's pipelining method, but we use the two chefs in parallel, just like Patrik used the two frying pans. This is how it looks like if expressed as streams: -.. includecode:: code/docs/stream/FlowParallelismDocSpec.scala#parallel-pipeline +.. includecode:: ../code/docs/stream/FlowParallelismDocSpec.scala#parallel-pipeline The above pattern works well if there are many independent jobs that do not depend on the results of each other, but the jobs themselves need multiple processing steps where each step builds on the result of @@ -91,7 +91,7 @@ It is also possible to organize parallelized stages into pipelines. This would m This is again straightforward to implement with the streams API: -.. includecode:: code/docs/stream/FlowParallelismDocSpec.scala#pipelined-parallel +.. includecode:: ../code/docs/stream/FlowParallelismDocSpec.scala#pipelined-parallel This usage pattern is less common but might be usable if a certain step in the pipeline might take wildly different times to finish different jobs. The reason is that there are more balance-merge steps in this pattern diff --git a/akka-docs-dev/rst/scala/stream-quickstart.rst b/akka-docs/rst/scala/stream/stream-quickstart.rst similarity index 88% rename from akka-docs-dev/rst/scala/stream-quickstart.rst rename to akka-docs/rst/scala/stream/stream-quickstart.rst index 2bea600bdf..23624a6e4a 100644 --- a/akka-docs-dev/rst/scala/stream-quickstart.rst +++ b/akka-docs/rst/scala/stream/stream-quickstart.rst @@ -15,7 +15,7 @@ allow to control what should happen in such scenarios. Here's the data model we'll be working with throughout the quickstart examples: -.. includecode:: code/docs/stream/TwitterStreamQuickstartDocSpec.scala#model +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#model .. note:: If you would like to get an overview of the used vocabulary first instead of diving head-first @@ -30,7 +30,7 @@ like for example finding all twitter handles of users who tweet about ``#akka``. In order to prepare our environment by creating an :class:`ActorSystem` and :class:`ActorMaterializer`, which will be responsible for materializing and running the streams we are about to create: -.. includecode:: code/docs/stream/TwitterStreamQuickstartDocSpec.scala#materializer-setup +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#materializer-setup The :class:`ActorMaterializer` can optionally take :class:`ActorMaterializerSettings` which can be used to define materialization properties, such as default buffer sizes (see also :ref:`stream-buffers-scala`), the dispatcher to @@ -38,7 +38,7 @@ be used by the pipeline etc. These can be overridden with ``withAttributes`` on Let's assume we have a stream of tweets readily available. In Akka this is expressed as a :class:`Source[Out, M]`: -.. includecode:: code/docs/stream/TwitterStreamQuickstartDocSpec.scala#tweet-source +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#tweet-source Streams always start flowing from a :class:`Source[Out,M1]` then can continue through :class:`Flow[In,Out,M2]` elements or more advanced graph elements to finally be consumed by a :class:`Sink[In,M3]` (ignore the type parameters ``M1``, ``M2`` @@ -49,7 +49,7 @@ The operations should look familiar to anyone who has used the Scala Collections however they operate on streams and not collections of data (which is a very important distinction, as some operations only make sense in streaming and vice versa): -.. includecode:: code/docs/stream/TwitterStreamQuickstartDocSpec.scala#authors-filter-map +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#authors-filter-map Finally in order to :ref:`materialize ` and run the stream computation we need to attach the Flow to a :class:`Sink` that will get the Flow running. The simplest way to do this is to call @@ -57,18 +57,18 @@ the Flow to a :class:`Sink` that will get the Flow running. The simplest way to the :class:`Sink` `companion object `_. For now let's simply print each author: -.. includecode:: code/docs/stream/TwitterStreamQuickstartDocSpec.scala#authors-foreachsink-println +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#authors-foreachsink-println or by using the shorthand version (which are defined only for the most popular Sinks such as ``Sink.fold`` and ``Sink.foreach``): -.. includecode:: code/docs/stream/TwitterStreamQuickstartDocSpec.scala#authors-foreach-println +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#authors-foreach-println Materializing and running a stream always requires a :class:`Materializer` to be in implicit scope (or passed in explicitly, like this: ``.run(materializer)``). The complete snippet looks like this: -.. includecode:: code/docs/stream/TwitterStreamQuickstartDocSpec.scala#first-sample +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#first-sample Flattening sequences in streams ------------------------------- @@ -77,7 +77,7 @@ we might want to map from one element to a number of elements and receive a "fla works on Scala Collections. In order to get a flattened stream of hashtags from our stream of tweets we can use the ``mapConcat`` combinator: -.. includecode:: code/docs/stream/TwitterStreamQuickstartDocSpec.scala#hashtags-mapConcat +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#hashtags-mapConcat .. note:: The name ``flatMap`` was consciously avoided due to its proximity with for-comprehensions and monadic composition. @@ -104,7 +104,7 @@ at the expense of not reading as familiarly as collection transformations. Graphs are constructed using :class:`GraphDSL` like this: -.. includecode:: code/docs/stream/TwitterStreamQuickstartDocSpec.scala#flow-graph-broadcast +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#flow-graph-broadcast As you can see, inside the :class:`GraphDSL` we use an implicit graph builder ``b`` to mutably construct the graph using the ``~>`` "edge operator" (also read as "connect" or "via" or "to"). The operator is provided implicitly @@ -118,7 +118,7 @@ The runnable graph can then be ``run()`` to materialize a stream out of it. Both :class:`Graph` and :class:`RunnableGraph` are *immutable, thread-safe, and freely shareable*. A graph can also have one of several other shapes, with one or more unconnected ports. Having unconnected ports -expresses a grapth that is a *partial graph*. Concepts around composing and nesting graphs in large structures are +expresses a graph that is a *partial graph*. Concepts around composing and nesting graphs in large structures are explained in detail in :ref:`composition-scala`. It is also possible to wrap complex computation graphs as Flows, Sinks or Sources, which will be explained in detail in :ref:`constructing-sources-sinks-flows-from-partial-graphs-scala`. @@ -136,7 +136,7 @@ in either ``OutOfMemoryError`` s or other severe degradations of service respons and must be handled explicitly. For example, if we are only interested in the "*most recent tweets, with a buffer of 10 elements*" this can be expressed using the ``buffer`` element: -.. includecode:: code/docs/stream/TwitterStreamQuickstartDocSpec.scala#tweets-slow-consumption-dropHead +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#tweets-slow-consumption-dropHead The ``buffer`` element takes an explicit and required ``OverflowStrategy``, which defines how the buffer should react when it receives another element while it is full. Strategies provided include dropping the oldest element (``dropHead``), @@ -155,7 +155,7 @@ but in general it is possible to deal with finite streams and come up with a nic First, let's write such an element counter using ``Sink.fold`` and see how the types look like: -.. includecode:: code/docs/stream/TwitterStreamQuickstartDocSpec.scala#tweets-fold-count +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#tweets-fold-count First we prepare a reusable ``Flow`` that will change each incoming tweet into an integer of value ``1``. We'll use this in order to combine those with a ``Sink.fold`` that will sum all ``Int`` elements of the stream and make its result available as @@ -181,13 +181,13 @@ and materialized multiple times, because it is just the "blueprint" of the strea for example one that consumes a live stream of tweets within a minute, the materialized values for those two materializations will be different, as illustrated by this example: -.. includecode:: code/docs/stream/TwitterStreamQuickstartDocSpec.scala#tweets-runnable-flow-materialized-twice +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#tweets-runnable-flow-materialized-twice Many elements in Akka Streams provide materialized values which can be used for obtaining either results of computation or steering these elements which will be discussed in detail in :ref:`stream-materialization-scala`. Summing up this section, now we know what happens behind the scenes when we run this one-liner, which is equivalent to the multi line version above: -.. includecode:: code/docs/stream/TwitterStreamQuickstartDocSpec.scala#tweets-fold-count-oneline +.. includecode:: ../code/docs/stream/TwitterStreamQuickstartDocSpec.scala#tweets-fold-count-oneline .. note:: ``runWith()`` is a convenience method that automatically ignores the materialized value of any other stages except diff --git a/akka-docs-dev/rst/scala/stream-rate.rst b/akka-docs/rst/scala/stream/stream-rate.rst similarity index 87% rename from akka-docs-dev/rst/scala/stream-rate.rst rename to akka-docs/rst/scala/stream/stream-rate.rst index 3e43493afa..78f342ac08 100644 --- a/akka-docs-dev/rst/scala/stream-rate.rst +++ b/akka-docs/rst/scala/stream/stream-rate.rst @@ -8,7 +8,7 @@ Akka Streams processing stages are asynchronous and pipelined by default which m an element to its downstream consumer is able to immediately process the next message. To demonstrate what we mean by this, let's take a look at the following example: -.. includecode:: code/docs/stream/StreamBuffersRateSpec.scala#pipelining +.. includecode:: ../code/docs/stream/StreamBuffersRateSpec.scala#pipelining Running the above example, one of the possible outputs looks like this: @@ -64,16 +64,16 @@ to a level suitable for the throughput requirements of the application. Default Alternatively they can be set by passing a :class:`ActorMaterializerSettings` to the materializer: -.. includecode:: code/docs/stream/StreamBuffersRateSpec.scala#materializer-buffer +.. includecode:: ../code/docs/stream/StreamBuffersRateSpec.scala#materializer-buffer If the buffer size needs to be set for segments of a :class:`Flow` only, it is possible by defining a separate :class:`Flow` with these attributes: -.. includecode:: code/docs/stream/StreamBuffersRateSpec.scala#section-buffer +.. includecode:: ../code/docs/stream/StreamBuffersRateSpec.scala#section-buffer Here is an example of a code that demonstrate some of the issues caused by internal buffers: -.. includecode:: code/docs/stream/StreamBuffersRateSpec.scala#buffering-abstraction-leak +.. includecode:: ../code/docs/stream/StreamBuffersRateSpec.scala#buffering-abstraction-leak Running the above example one would expect the number *3* to be printed in every 3 seconds (the ``cUndefinedSourceonflate`` step here is configured so that it counts the number of elements received before the downstream ``ZipWith`` consumes them). What @@ -97,7 +97,7 @@ pipeline of an application. The example below will ensure that 1000 jobs (but not more) are dequeued from an external (imaginary) system and stored locally in memory - relieving the external system: -.. includecode:: code/docs/stream/StreamBuffersRateSpec.scala#explicit-buffers-backpressure +.. includecode:: ../code/docs/stream/StreamBuffersRateSpec.scala#explicit-buffers-backpressure The next example will also queue up 1000 jobs locally, but if there are more jobs waiting in the imaginary external systems, it makes space for the new element by @@ -105,12 +105,12 @@ dropping one element from the *tail* of the buffer. Dropping from the tail is a it must be noted that this will drop the *youngest* waiting job. If some "fairness" is desired in the sense that we want to be nice to jobs that has been waiting for long, then this option can be useful. -.. includecode:: code/docs/stream/StreamBuffersRateSpec.scala#explicit-buffers-droptail +.. includecode:: ../code/docs/stream/StreamBuffersRateSpec.scala#explicit-buffers-droptail Instead of dropping the youngest element from the tail of the buffer a new element can be dropped without enqueueing it to the buffer at all. -.. includecode:: code/docs/stream/StreamBuffersRateSpec.scala#explicit-buffers-dropnew +.. includecode:: ../code/docs/stream/StreamBuffersRateSpec.scala#explicit-buffers-dropnew Here is another example with a queue of 1000 jobs, but it makes space for the new element by dropping one element from the *head* of the buffer. This is the *oldest* @@ -119,13 +119,13 @@ resent if not processed in a certain period. The oldest element will be retransmitted soon, (in fact a retransmitted duplicate might be already in the queue!) so it makes sense to drop it first. -.. includecode:: code/docs/stream/StreamBuffersRateSpec.scala#explicit-buffers-drophead +.. includecode:: ../code/docs/stream/StreamBuffersRateSpec.scala#explicit-buffers-drophead Compared to the dropping strategies above, dropBuffer drops all the 1000 jobs it has enqueued once the buffer gets full. This aggressive strategy is useful when dropping jobs is preferred to delaying jobs. -.. includecode:: code/docs/stream/StreamBuffersRateSpec.scala#explicit-buffers-dropbuffer +.. includecode:: ../code/docs/stream/StreamBuffersRateSpec.scala#explicit-buffers-dropbuffer If our imaginary external job provider is a client using our API, we might want to enforce that the client cannot have more than 1000 queued jobs @@ -133,7 +133,7 @@ otherwise we consider it flooding and terminate the connection. This is easily achievable by the error strategy which simply fails the stream once the buffer gets full. -.. includecode:: code/docs/stream/StreamBuffersRateSpec.scala#explicit-buffers-fail +.. includecode:: ../code/docs/stream/StreamBuffersRateSpec.scala#explicit-buffers-fail Rate transformation =================== @@ -145,13 +145,13 @@ When a fast producer can not be informed to slow down by backpressure or some ot Below is an example snippet that summarizes fast stream of elements to a standart deviation, mean and count of elements that have arrived while the stats have been calculated. -.. includecode:: code/docs/stream/RateTransformationDocSpec.scala#conflate-summarize +.. includecode:: ../code/docs/stream/RateTransformationDocSpec.scala#conflate-summarize This example demonstrates that such flow's rate is decoupled. The element rate at the start of the flow can be much higher that the element rate at the end of the flow. Another possible use of ``conflate`` is to not consider all elements for summary when producer starts getting too fast. Example below demonstrates how ``conflate`` can be used to implement random drop of elements when consumer is not able to keep up with the producer. -.. includecode:: code/docs/stream/RateTransformationDocSpec.scala#conflate-sample +.. includecode:: ../code/docs/stream/RateTransformationDocSpec.scala#conflate-sample Understanding expand -------------------- @@ -160,10 +160,10 @@ Expand helps to deal with slow producers which are unable to keep up with the de As a simple use of ``expand`` here is a flow that sends the same element to consumer when producer does not send any new elements. -.. includecode:: code/docs/stream/RateTransformationDocSpec.scala#expand-last +.. includecode:: ../code/docs/stream/RateTransformationDocSpec.scala#expand-last Expand also allows to keep some state between demand requests from the downstream. Leveraging this, here is a flow that tracks and reports a drift between fast consumer and slow producer. -.. includecode:: code/docs/stream/RateTransformationDocSpec.scala#expand-drift +.. includecode:: ../code/docs/stream/RateTransformationDocSpec.scala#expand-drift Note that all of the elements coming from upstream will go through ``expand`` at least once. This means that the output of this flow is going to report a drift of zero if producer is fast enough, or a larger drift otherwise. diff --git a/akka-docs-dev/rst/scala/stream-testkit.rst b/akka-docs/rst/scala/stream/stream-testkit.rst similarity index 83% rename from akka-docs-dev/rst/scala/stream-testkit.rst rename to akka-docs/rst/scala/stream/stream-testkit.rst index 8b5811b77a..377315e26c 100644 --- a/akka-docs-dev/rst/scala/stream-testkit.rst +++ b/akka-docs/rst/scala/stream/stream-testkit.rst @@ -25,20 +25,20 @@ elements from a predefined collection, running a constructed test flow and asserting on the results that sink produced. Here is an example of a test for a sink: -.. includecode:: code/docs/stream/StreamTestKitDocSpec.scala#strict-collection +.. includecode:: ../code/docs/stream/StreamTestKitDocSpec.scala#strict-collection The same strategy can be applied for sources as well. In the next example we have a source that produces an infinite stream of elements. Such source can be tested by asserting that first arbitrary number of elements hold some condition. Here the ``grouped`` combinator and ``Sink.head`` are very useful. -.. includecode:: code/docs/stream/StreamTestKitDocSpec.scala#grouped-infinite +.. includecode:: ../code/docs/stream/StreamTestKitDocSpec.scala#grouped-infinite When testing a flow we need to attach a source and a sink. As both stream ends are under our control, we can choose sources that tests various edge cases of the flow and sinks that ease assertions. -.. includecode:: code/docs/stream/StreamTestKitDocSpec.scala#folded-stream +.. includecode:: ../code/docs/stream/StreamTestKitDocSpec.scala#folded-stream TestKit ======= @@ -51,7 +51,7 @@ One of the more straightforward tests would be to materialize stream to a :class:`Future` and then use ``pipe`` pattern to pipe the result of that future to the probe. -.. includecode:: code/docs/stream/StreamTestKitDocSpec.scala#pipeto-testprobe +.. includecode:: ../code/docs/stream/StreamTestKitDocSpec.scala#pipeto-testprobe Instead of materializing to a future, we can use a :class:`Sink.actorRef` that sends all incoming elements to the given :class:`ActorRef`. Now we can use @@ -59,13 +59,13 @@ assertion methods on :class:`TestProbe` and expect elements one by one as they arrive. We can also assert stream completion by expecting for ``onCompleteMessage`` which was given to :class:`Sink.actorRef`. -.. includecode:: code/docs/stream/StreamTestKitDocSpec.scala#sink-actorref +.. includecode:: ../code/docs/stream/StreamTestKitDocSpec.scala#sink-actorref Similarly to :class:`Sink.actorRef` that provides control over received elements, we can use :class:`Source.actorRef` and have full control over elements to be sent. -.. includecode:: code/docs/stream/StreamTestKitDocSpec.scala#source-actorref +.. includecode:: ../code/docs/stream/StreamTestKitDocSpec.scala#source-actorref Streams TestKit =============== @@ -83,20 +83,20 @@ provide sources and sinks that materialize to probes that allow fluent API. A sink returned by ``TestSink.probe`` allows manual control over demand and assertions over elements coming downstream. -.. includecode:: code/docs/stream/StreamTestKitDocSpec.scala#test-sink-probe +.. includecode:: ../code/docs/stream/StreamTestKitDocSpec.scala#test-sink-probe A source returned by ``TestSource.probe`` can be used for asserting demand or controlling when stream is completed or ended with an error. -.. includecode:: code/docs/stream/StreamTestKitDocSpec.scala#test-source-probe +.. includecode:: ../code/docs/stream/StreamTestKitDocSpec.scala#test-source-probe You can also inject exceptions and test sink behaviour on error conditions. -.. includecode:: code/docs/stream/StreamTestKitDocSpec.scala#injecting-failure +.. includecode:: ../code/docs/stream/StreamTestKitDocSpec.scala#injecting-failure Test source and sink can be used together in combination when testing flows. -.. includecode:: code/docs/stream/StreamTestKitDocSpec.scala#test-source-and-sink +.. includecode:: ../code/docs/stream/StreamTestKitDocSpec.scala#test-source-and-sink Fuzzing Mode diff --git a/akka-docs/src/test/resources/application.conf b/akka-docs/src/test/resources/application.conf new file mode 100644 index 0000000000..b4d39bb8d2 --- /dev/null +++ b/akka-docs/src/test/resources/application.conf @@ -0,0 +1 @@ +akka.loggers = ["akka.testkit.TestEventListener"] diff --git a/akka-http-core/RunWebsocketAutobahnTestSuite.md b/akka-http-core/RunWebSocketAutobahnTestSuite.md similarity index 100% rename from akka-http-core/RunWebsocketAutobahnTestSuite.md rename to akka-http-core/RunWebSocketAutobahnTestSuite.md diff --git a/akka-http-core/build.sbt b/akka-http-core/build.sbt new file mode 100644 index 0000000000..b48c8cdc38 --- /dev/null +++ b/akka-http-core/build.sbt @@ -0,0 +1,8 @@ +import akka._ +import com.typesafe.tools.mima.plugin.MimaKeys + +AkkaBuild.defaultSettings +Formatting.formatSettings +OSGi.httpCore +Dependencies.httpCore +MimaKeys.previousArtifacts := akkaStreamAndHttpPreviousArtifacts("akka-http-core").value diff --git a/akka-http-core/src/main/java/akka/http/impl/util/Util.java b/akka-http-core/src/main/java/akka/http/impl/util/Util.java index 8ba7217116..3bf4d22921 100644 --- a/akka-http-core/src/main/java/akka/http/impl/util/Util.java +++ b/akka-http-core/src/main/java/akka/http/impl/util/Util.java @@ -1,12 +1,12 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.util; import akka.http.impl.model.JavaUri; import akka.http.javadsl.model.Uri; -import akka.japi.Option; +import scala.compat.java8.OptionConverters; import scala.None$; import scala.collection.immutable.Map$; import scala.collection.immutable.Seq; @@ -14,6 +14,7 @@ import akka.stream.scaladsl.Source; import java.util.Arrays; import java.util.Map; +import java.util.Optional; /** * Contains internal helper methods. @@ -22,8 +23,8 @@ public abstract class Util { @SuppressWarnings("unchecked") // no support for covariance of option in Java // needed to provide covariant conversions that the Java interfaces don't provide automatically. // The alternative would be having to cast around everywhere instead of doing it here in a central place. - public static Option convertOption(scala.Option o) { - return (Option)(Object) akka.japi.Option.fromScalaOption(o); + public static Optional convertOption(scala.Option o) { + return (Optional)(Object) OptionConverters.toJava(o); } @SuppressWarnings("unchecked") // no support for covariance of Publisher in Java // needed to provide covariant conversions that the Java interfaces don't provide automatically. @@ -35,13 +36,12 @@ public abstract class Util { public static Source upcastSource(Source p) { return (Source)(Object) p; } - @SuppressWarnings("unchecked") public static scala.collection.immutable.Map convertMapToScala(Map map) { return emptyMap.$plus$plus(scala.collection.JavaConverters.mapAsScalaMapConverter(map).asScala()); } @SuppressWarnings("unchecked") // contains an upcast - public static scala.Option convertOptionToScala(Option o) { - return ((Option) o).asScala(); + public static scala.Option convertOptionalToScala(Optional o) { + return OptionConverters.toScala((Optional) o); } public static final scala.collection.immutable.Map emptyMap = @@ -57,7 +57,6 @@ public abstract class Util { public static Seq convertIterable(Iterable els) { return scala.collection.JavaConverters.iterableAsScalaIterableConverter((Iterable)els).asScala().toVector(); } - @SuppressWarnings("unchecked") public static Seq convertArray(T[] els) { return Util.convertIterable(Arrays.asList(els)); } @@ -66,23 +65,14 @@ public abstract class Util { return ((JavaUri) uri).uri(); } - public static akka.japi.Option lookupInRegistry(ObjectRegistry registry, int key) { + public static Optional lookupInRegistry(ObjectRegistry registry, int key) { return Util.convertOption(registry.getForKey(key)); } - public static akka.japi.Option lookupInRegistry(ObjectRegistry registry, String key) { + public static Optional lookupInRegistry(ObjectRegistry registry, String key) { return Util.lookupInRegistry(registry, key); } - public static akka.japi.Option lookupInRegistry(ObjectRegistry registry, K key) { + public static Optional lookupInRegistry(ObjectRegistry registry, K key) { return Util.convertOption(registry.getForKey(key)); } - /** - * Temporary replacement for akka.japi.Option.getOrElse until it gets released there. - * - * FIXME: remove in favor of a proper japi.Option.getOrElse - */ - public static B getOrElse(Option option, B defaultValue) { - if (option.isDefined()) return option.get(); - else return defaultValue; - } } diff --git a/akka-http-core/src/main/java/akka/http/javadsl/HttpsContext.java b/akka-http-core/src/main/java/akka/http/javadsl/HttpsContext.java deleted file mode 100644 index e0dfad72a5..0000000000 --- a/akka-http-core/src/main/java/akka/http/javadsl/HttpsContext.java +++ /dev/null @@ -1,48 +0,0 @@ -/** - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package akka.http.javadsl; - -import javax.net.ssl.SSLContext; -import javax.net.ssl.SSLParameters; - -import akka.japi.Option; -import akka.japi.Util; -import akka.stream.io.ClientAuth; - -import java.util.Collection; - -public abstract class HttpsContext { - - public abstract SSLContext getSslContext(); - - public abstract Option> getEnabledCipherSuites(); - - public abstract Option> getEnabledProtocols(); - - public abstract Option getClientAuth(); - - public abstract Option getSslParameters(); - - //#http-context-creation - public static HttpsContext create(SSLContext sslContext, - Option> enabledCipherSuites, - Option> enabledProtocols, - Option clientAuth, - Option sslParameters) - //#http-context-creation - { - final scala.Option> ecs; - if (enabledCipherSuites.isDefined()) ecs = scala.Option.apply(Util.immutableSeq(enabledCipherSuites.get())); - else ecs = scala.Option.empty(); - final scala.Option> ep; - if(enabledProtocols.isDefined()) ep = scala.Option.apply(Util.immutableSeq(enabledProtocols.get())); - else ep = scala.Option.empty(); - return new akka.http.scaladsl.HttpsContext(sslContext, - ecs, - ep, - clientAuth.asScala(), - sslParameters.asScala()); - } -} diff --git a/akka-http-core/src/main/java/akka/http/javadsl/TimeoutAccess.java b/akka-http-core/src/main/java/akka/http/javadsl/TimeoutAccess.java new file mode 100644 index 0000000000..db833edd1d --- /dev/null +++ b/akka-http-core/src/main/java/akka/http/javadsl/TimeoutAccess.java @@ -0,0 +1,44 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package akka.http.javadsl; + +import akka.http.javadsl.model.HttpRequest; +import akka.http.javadsl.model.HttpResponse; +import akka.japi.Function; +import scala.concurrent.duration.Duration; + +/** + * Enables programmatic access to the server-side request timeout logic. + */ +public interface TimeoutAccess { + + /** + * Tries to set a new timeout. + * The timeout period is measured as of the point in time that the end of the request has been received, + * which may be in the past or in the future! + * Use `Duration.Inf` to completely disable request timeout checking for this request. + * + * Due to the inherent raciness it is not guaranteed that the update will be applied before + * the previously set timeout has expired! + */ + void updateTimeout(Duration timeout); + + /** + * Tries to set a new timeout handler, which produces the timeout response for a + * given request. Note that the handler must produce the response synchronously and shouldn't block! + * + * Due to the inherent raciness it is not guaranteed that the update will be applied before + * the previously set timeout has expired! + */ + void updateHandler(Function handler); + + /** + * Tries to set a new timeout and handler at the same time. + * + * Due to the inherent raciness it is not guaranteed that the update will be applied before + * the previously set timeout has expired! + */ + void update(Duration timeout, Function handler); +} diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/BodyPartEntity.java b/akka-http-core/src/main/java/akka/http/javadsl/model/BodyPartEntity.java index 50349b6661..5b183e333c 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/BodyPartEntity.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/BodyPartEntity.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/ContentRange.java b/akka-http-core/src/main/java/akka/http/javadsl/model/ContentRange.java index c21e286a61..c0f3919a20 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/ContentRange.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/ContentRange.java @@ -1,23 +1,26 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; import akka.http.scaladsl.model.ContentRange$; -import akka.japi.Option; + +import java.util.Optional; +import java.util.OptionalLong; +import scala.compat.java8.OptionConverters; public abstract class ContentRange { public abstract boolean isByteContentRange(); public abstract boolean isSatisfiable(); public abstract boolean isOther(); - public abstract Option getSatisfiableFirst(); - public abstract Option getSatisfiableLast(); + public abstract OptionalLong getSatisfiableFirst(); + public abstract OptionalLong getSatisfiableLast(); - public abstract Option getOtherValue(); + public abstract Optional getOtherValue(); - public abstract Option getInstanceLength(); + public abstract OptionalLong getInstanceLength(); public static ContentRange create(long first, long last) { return ContentRange$.MODULE$.apply(first, last); @@ -26,8 +29,8 @@ public abstract class ContentRange { return ContentRange$.MODULE$.apply(first, last, instanceLength); } @SuppressWarnings("unchecked") - public static ContentRange create(long first, long last, Option instanceLength) { - return ContentRange$.MODULE$.apply(first, last, ((Option) (Object) instanceLength).asScala()); + public static ContentRange create(long first, long last, OptionalLong instanceLength) { + return ContentRange$.MODULE$.apply(first, last, OptionConverters.toScala(instanceLength)); } public static ContentRange createUnsatisfiable(long length) { return new akka.http.scaladsl.model.ContentRange.Unsatisfiable(length); diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/ContentType.java b/akka-http-core/src/main/java/akka/http/javadsl/model/ContentType.java deleted file mode 100644 index 9d7d2cf34a..0000000000 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/ContentType.java +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package akka.http.javadsl.model; - -import akka.japi.Option; - -/** - * Represents an Http content-type. A content-type consists of a media-type and an optional charset. - */ -public interface ContentType { - - /** - * The media-type of this content-type. - */ - MediaType mediaType(); - - /** - * True if this ContentType is non-textual. - */ - boolean binary(); - - /** - * Returns the charset if this ContentType is non-binary. - */ - Option getCharsetOption(); - - interface Binary extends ContentType { - } - - interface NonBinary extends ContentType { - HttpCharset charset(); - } - - interface WithFixedCharset extends NonBinary { - } - - interface WithCharset extends NonBinary { - } -} diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/ContentTypeRange.java b/akka-http-core/src/main/java/akka/http/javadsl/model/ContentTypeRange.java index cf40b35cb0..9424b1e728 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/ContentTypeRange.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/ContentTypeRange.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/ContentTypes.java b/akka-http-core/src/main/java/akka/http/javadsl/model/ContentTypes.java index 2abd492741..1e3955fc95 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/ContentTypes.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/ContentTypes.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/DateTime.java b/akka-http-core/src/main/java/akka/http/javadsl/model/DateTime.java index a1a8277a1d..f660da0982 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/DateTime.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/DateTime.java @@ -1,12 +1,13 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; -import akka.japi.Option; import akka.http.impl.util.Util; +import java.util.Optional; + /** * Immutable, fast and efficient Date + Time implementation without any dependencies. * Does not support TimeZones, all DateTime values are always GMT based. @@ -101,7 +102,7 @@ public abstract class DateTime { * Returns a new DateTime instance parsed from IsoDateTimeString as Some(dateTime). Returns None if * parsing has failed. */ - public static Option fromIsoDateTimeString(String isoDateTimeString) { + public static Optional fromIsoDateTimeString(String isoDateTimeString) { return Util.convertOption(akka.http.scaladsl.model.DateTime.fromIsoDateTimeString(isoDateTimeString)); } diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/FormData.java b/akka-http-core/src/main/java/akka/http/javadsl/model/FormData.java index 46c0b22843..8f0a589583 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/FormData.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/FormData.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; @@ -40,6 +40,7 @@ public final class FormData { /** * Creates the FormData from the given parameters. */ + @SafeVarargs public static FormData create(Pair... params) { return new FormData(Query.create(params)); } diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/Host.java b/akka-http-core/src/main/java/akka/http/javadsl/model/Host.java index baa40fd7d5..ab3f3ad196 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/Host.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/Host.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpCharset.java b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpCharset.java index 053d476ca9..4cef3b4462 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpCharset.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpCharset.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpCharsetRange.java b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpCharsetRange.java index 2691d739cf..8d212a40f8 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpCharsetRange.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpCharsetRange.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpCharsetRanges.java b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpCharsetRanges.java index 98a78a20f6..38084ccf6b 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpCharsetRanges.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpCharsetRanges.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpCharsets.java b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpCharsets.java index d6eac41a16..a658287a3c 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpCharsets.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpCharsets.java @@ -1,12 +1,13 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; import akka.http.impl.util.Util; import akka.http.scaladsl.model.HttpCharsets$; -import akka.japi.Option; + +import java.util.Optional; /** * Contains a set of predefined charsets. @@ -31,7 +32,7 @@ public final class HttpCharsets { /** * Returns Some(charset) if the charset with the given name was found and None otherwise. */ - public static Option lookup(String name) { + public static Optional lookup(String name) { return Util.lookupInRegistry(HttpCharsets$.MODULE$, name); } } diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpEntities.java b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpEntities.java index 7c7372b7cf..fe3e4d63ab 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpEntities.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpEntities.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpEntity.java b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpEntity.java index 33a083d68f..a52444c1db 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpEntity.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpEntity.java @@ -1,16 +1,17 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; import akka.http.impl.util.Util; import akka.http.scaladsl.model.HttpEntity$; -import akka.japi.Option; import akka.stream.Materializer; import akka.stream.javadsl.Source; import akka.util.ByteString; -import scala.concurrent.Future; + +import java.util.OptionalLong; +import java.util.concurrent.CompletionStage; /** * Represents the entity of an Http message. An entity consists of the content-type of the data @@ -77,7 +78,7 @@ public interface HttpEntity { /** * Returns Some(contentLength) if the length is defined and none otherwise. */ - Option getContentLengthOption(); + OptionalLong getContentLengthOption(); /** * Returns a stream of data bytes this entity consists of. @@ -132,7 +133,7 @@ public interface HttpEntity { * Use getDataBytes and stream processing instead if the expected data is big or * is likely to take a long time. */ - Future toStrict(long timeoutMillis, Materializer materializer); + CompletionStage toStrict(long timeoutMillis, Materializer materializer); /** * The entity type which consists of a predefined fixed ByteString of data. diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpHeader.java b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpHeader.java index 666dec3c87..45d0171bbc 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpHeader.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpHeader.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; @@ -34,4 +34,14 @@ public abstract class HttpHeader { * Returns !is(nameInLowerCase). */ public abstract boolean isNot(String nameInLowerCase); + + /** + * Returns true iff the header is to be rendered in requests. + */ + public abstract boolean renderInRequests(); + + /** + * Returns true iff the header is to be rendered in responses. + */ + public abstract boolean renderInResponses(); } diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMessage.java b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMessage.java index 61f64c25bb..056e00dd18 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMessage.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMessage.java @@ -1,13 +1,13 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; -import akka.japi.Option; import akka.util.ByteString; import java.io.File; +import java.util.Optional; /** * The base type for an Http message (request or response). @@ -37,13 +37,13 @@ public interface HttpMessage { * Try to find the first header with the given name (case-insensitive) and return * Some(header), otherwise this method returns None. */ - Option getHeader(String headerName); + Optional getHeader(String headerName); /** * Try to find the first header of the given class and return * Some(header), otherwise this method returns None. */ - Option getHeader(Class headerClass); + Optional getHeader(Class headerClass); /** * The entity of this message. diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMethod.java b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMethod.java index c0ab260bda..fb86379270 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMethod.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMethod.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; @@ -9,6 +9,13 @@ package akka.http.javadsl.model; * and static constructors to create custom ones. */ public abstract class HttpMethod { + + /** + * Returns the name of the method, always equal to [[value]]. + */ + public final String name() { + return value(); + } /** * Returns the name of the method. */ diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMethods.java b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMethods.java index 873bf6428c..01e9fed294 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMethods.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpMethods.java @@ -1,13 +1,14 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; import akka.http.impl.util.Util; -import akka.japi.Option; import akka.http.scaladsl.model.HttpMethods$; +import java.util.Optional; + /** * Contains static constants for predefined method types. */ @@ -34,7 +35,7 @@ public final class HttpMethods { /** * Looks up a predefined HTTP method with the given name. */ - public static Option lookup(String name) { + public static Optional lookup(String name) { return Util.lookupInRegistry(HttpMethods$.MODULE$, name); } } diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpProtocol.java b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpProtocol.java index e8c358d5ad..1cd5fe059c 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpProtocol.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpProtocol.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpProtocols.java b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpProtocols.java index 5fc71f3e07..fa29092327 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpProtocols.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpProtocols.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpRequest.java b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpRequest.java index a9fd6f52ca..35fd6436c5 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpRequest.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpRequest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpResponse.java b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpResponse.java index af933983fb..d8cab543b6 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/HttpResponse.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/HttpResponse.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/MediaRange.java b/akka-http-core/src/main/java/akka/http/javadsl/model/MediaRange.java index 2a0f121f20..293c54f5e6 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/MediaRange.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/MediaRange.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/MediaRanges.java b/akka-http-core/src/main/java/akka/http/javadsl/model/MediaRanges.java index dece75e105..678f4b0156 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/MediaRanges.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/MediaRanges.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/MediaType.java b/akka-http-core/src/main/java/akka/http/javadsl/model/MediaType.java deleted file mode 100644 index b53721cc38..0000000000 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/MediaType.java +++ /dev/null @@ -1,67 +0,0 @@ -/** - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package akka.http.javadsl.model; - -/** - * Represents an Http media-type. A media-type consists of a main-type and a sub-type. - */ -public interface MediaType { - - /** - * The main-type of this media-type. - */ - String mainType(); - - /** - * The sub-type of this media-type. - */ - String subType(); - - /** - * True when this media-type is generally compressible. - */ - boolean isCompressible(); - - /** - * True when this media-type is not character-based. - */ - boolean binary(); - - boolean isApplication(); - boolean isAudio(); - boolean isImage(); - boolean isMessage(); - boolean isMultipart(); - boolean isText(); - boolean isVideo(); - - /** - * Creates a media-range from this media-type. - */ - MediaRange toRange(); - - /** - * Creates a media-range from this media-type with a given qValue. - */ - MediaRange toRange(float qValue); - - interface Binary extends MediaType { - ContentType.Binary toContentType(); - } - - interface NonBinary extends MediaType { - } - - interface WithFixedCharset extends NonBinary { - ContentType.WithFixedCharset toContentType(); - } - - interface WithOpenCharset extends NonBinary { - ContentType.WithCharset toContentType(HttpCharset charset); - } - - interface Multipart extends WithOpenCharset { - } -} diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/MediaTypes.java b/akka-http-core/src/main/java/akka/http/javadsl/model/MediaTypes.java index 8c3324eceb..695ab8a66d 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/MediaTypes.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/MediaTypes.java @@ -1,13 +1,13 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; import akka.http.impl.util.Util; import akka.http.scaladsl.model.MediaTypes$; -import akka.japi.Option; -import scala.collection.immutable.List; + +import java.util.Optional; /** * Contains the set of predefined media-types. @@ -197,7 +197,7 @@ public abstract class MediaTypes { /** * Looks up a media-type with the given main-type and sub-type. */ - public static Option lookup(String mainType, String subType) { + public static Optional lookup(String mainType, String subType) { return Util., MediaType, akka.http.scaladsl.model.MediaType>lookupInRegistry(MediaTypes$.MODULE$, new scala.Tuple2(mainType, subType)); } } diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/Multipart.java b/akka-http-core/src/main/java/akka/http/javadsl/model/Multipart.java index 14bb0b2ac6..4981390028 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/Multipart.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/Multipart.java @@ -1,15 +1,16 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; import java.util.Map; -import scala.concurrent.Future; +import java.util.Optional; +import java.util.concurrent.CompletionStage; + import akka.http.javadsl.model.headers.ContentDisposition; import akka.http.javadsl.model.headers.ContentDispositionType; import akka.http.javadsl.model.headers.RangeUnit; -import akka.japi.Option; import akka.stream.Materializer; import akka.stream.javadsl.Source; @@ -30,9 +31,9 @@ public interface Multipart { /** * Converts this content into its strict counterpart. * The given `timeout` denotes the max time that an individual part must be read in. - * The Future is failed with an TimeoutException if one part isn't read completely after the given timeout. + * The CompletionStage is failed with an TimeoutException if one part isn't read completely after the given timeout. */ - Future toStrict(long timeoutMillis, Materializer materializer); + CompletionStage toStrict(long timeoutMillis, Materializer materializer); /** * Creates an entity from this multipart object. @@ -52,13 +53,13 @@ public interface Multipart { Iterable getHeaders(); - Option getContentDispositionHeader(); + Optional getContentDispositionHeader(); Map getDispositionParams(); - Option getDispositionType(); + Optional getDispositionType(); - Future toStrict(long timeoutMillis, Materializer materializer); + CompletionStage toStrict(long timeoutMillis, Materializer materializer); interface Strict extends Multipart.BodyPart { HttpEntity.Strict getEntity(); @@ -71,7 +72,7 @@ public interface Multipart { interface General extends Multipart { Source getParts(); - Future toStrict(long timeoutMillis, Materializer materializer); + CompletionStage toStrict(long timeoutMillis, Materializer materializer); interface Strict extends Multipart.General, Multipart.Strict { Source getParts(); @@ -80,7 +81,7 @@ public interface Multipart { } interface BodyPart extends Multipart.BodyPart { - Future toStrict(long timeoutMillis, Materializer materializer); + CompletionStage toStrict(long timeoutMillis, Materializer materializer); interface Strict extends Multipart.General.BodyPart, Multipart.BodyPart.Strict { } @@ -94,7 +95,7 @@ public interface Multipart { interface FormData extends Multipart { Source getParts(); - Future toStrict(long timeoutMillis, Materializer materializer); + CompletionStage toStrict(long timeoutMillis, Materializer materializer); interface Strict extends Multipart.FormData, Multipart.Strict { Source getParts(); @@ -106,9 +107,9 @@ public interface Multipart { String getName(); Map getAdditionalDispositionParams(); Iterable getAdditionalHeaders(); - Option getFilename(); + Optional getFilename(); - Future toStrict(long timeoutMillis, Materializer materializer); + CompletionStage toStrict(long timeoutMillis, Materializer materializer); interface Strict extends Multipart.FormData.BodyPart, Multipart.BodyPart.Strict { } @@ -122,7 +123,7 @@ public interface Multipart { interface ByteRanges extends Multipart { Source getParts(); - Future toStrict(long timeoutMillis, Materializer materializer); + CompletionStage toStrict(long timeoutMillis, Materializer materializer); interface Strict extends Multipart.ByteRanges, Multipart.Strict { Source getParts(); @@ -136,7 +137,7 @@ public interface Multipart { Iterable getAdditionalHeaders(); akka.http.javadsl.model.headers.ContentRange getContentRangeHeader(); - Future toStrict(long timeoutMillis, Materializer materializer); + CompletionStage toStrict(long timeoutMillis, Materializer materializer); interface Strict extends Multipart.ByteRanges.BodyPart, Multipart.BodyPart.Strict { } diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/Query.java b/akka-http-core/src/main/java/akka/http/javadsl/model/Query.java index 6a1472de98..dc3a881c75 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/Query.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/Query.java @@ -1,12 +1,11 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; import akka.http.impl.model.JavaQuery; import akka.http.scaladsl.model.*; -import akka.japi.Option; import akka.japi.Pair; import akka.parboiled2.CharPredicate; import akka.parboiled2.ParserInput$; @@ -14,12 +13,13 @@ import akka.parboiled2.ParserInput$; import java.nio.charset.Charset; import java.util.List; import java.util.Map; +import java.util.Optional; public abstract class Query { /** * Returns the value of the first parameter with the given key if it exists. */ - public abstract Option get(String key); + public abstract Optional get(String key); /** * Returns the value of the first parameter with the given key or the provided default value. @@ -94,6 +94,7 @@ public abstract class Query { /** * Returns a Query from the given parameters. */ + @SafeVarargs public static Query create(Pair... params) { return new JavaQuery(UriJavaAccessor.queryApply(params)); } diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/RemoteAddress.java b/akka-http-core/src/main/java/akka/http/javadsl/model/RemoteAddress.java index d4eca91fd3..8e391dd46a 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/RemoteAddress.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/RemoteAddress.java @@ -1,18 +1,18 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; -import akka.japi.Option; - import java.net.InetAddress; import java.net.InetSocketAddress; +import java.util.Optional; +import scala.compat.java8.OptionConverters; public abstract class RemoteAddress { public abstract boolean isUnknown(); - public abstract Option getAddress(); + public abstract Optional getAddress(); /** * Returns a port if defined or 0 otherwise. @@ -21,7 +21,7 @@ public abstract class RemoteAddress { public static final RemoteAddress UNKNOWN = akka.http.scaladsl.model.RemoteAddress.Unknown$.MODULE$; public static RemoteAddress create(InetAddress address) { - return akka.http.scaladsl.model.RemoteAddress.apply(address, Option.none().asScala()); + return akka.http.scaladsl.model.RemoteAddress.apply(address, OptionConverters.toScala(Optional.empty())); } public static RemoteAddress create(InetSocketAddress address) { return akka.http.scaladsl.model.RemoteAddress.apply(address); diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/RequestEntity.java b/akka-http-core/src/main/java/akka/http/javadsl/model/RequestEntity.java index e442b7e0ee..66c4ff82f5 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/RequestEntity.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/RequestEntity.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/ResponseEntity.java b/akka-http-core/src/main/java/akka/http/javadsl/model/ResponseEntity.java index 877fab13d3..322a250715 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/ResponseEntity.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/ResponseEntity.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/StatusCode.java b/akka-http-core/src/main/java/akka/http/javadsl/model/StatusCode.java index 0ab20c0219..dc1cc687a6 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/StatusCode.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/StatusCode.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/StatusCodes.java b/akka-http-core/src/main/java/akka/http/javadsl/model/StatusCodes.java index 74edbe89d0..7ef68470c6 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/StatusCodes.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/StatusCodes.java @@ -1,12 +1,13 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; import akka.http.impl.util.Util; import akka.http.scaladsl.model.StatusCodes$; -import akka.japi.Option; + +import java.util.Optional; /** * Contains the set of predefined status-codes along with static methods to access and create custom @@ -109,7 +110,7 @@ public final class StatusCodes { /** * Looks up a status-code by numeric code and returns Some(code). Returns None otherwise. */ - public static Option lookup(int intValue) { + public static Optional lookup(int intValue) { return Util.lookupInRegistry(StatusCodes$.MODULE$, intValue); } } diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/TransferEncoding.java b/akka-http-core/src/main/java/akka/http/javadsl/model/TransferEncoding.java index 3940c67f8b..2288bb2e84 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/TransferEncoding.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/TransferEncoding.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/TransferEncodings.java b/akka-http-core/src/main/java/akka/http/javadsl/model/TransferEncodings.java index 203cf2d039..a331eeba05 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/TransferEncodings.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/TransferEncodings.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/UniversalEntity.java b/akka-http-core/src/main/java/akka/http/javadsl/model/UniversalEntity.java index 7cb1945375..aab7759329 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/UniversalEntity.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/UniversalEntity.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/Uri.java b/akka-http-core/src/main/java/akka/http/javadsl/model/Uri.java index 1ef18e4991..9d4cec58e8 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/Uri.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/Uri.java @@ -1,180 +1,180 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; +import java.nio.charset.Charset; + import akka.http.impl.model.JavaUri; import akka.http.scaladsl.model.UriJavaAccessor; -import akka.japi.Option; -import akka.japi.Pair; import akka.parboiled2.ParserInput$; -import java.nio.charset.Charset; -import java.util.List; -import java.util.Map; +import java.util.Optional; /** * Represents an Uri. Use the `withX` methods to create modified copies of a given instance. */ public abstract class Uri { - /** - * Returns if this is an absolute Uri. - */ - public abstract boolean isAbsolute(); + /** + * Returns if this is an absolute Uri. + */ + public abstract boolean isAbsolute(); - /** - * Returns if this is a relative Uri. - */ - public abstract boolean isRelative(); + /** + * Returns if this is a relative Uri. + */ + public abstract boolean isRelative(); - /** - * Returns if this is an empty Uri. - */ - public abstract boolean isEmpty(); + /** + * Returns if this is an empty Uri. + */ + public abstract boolean isEmpty(); - /** - * Returns the scheme of this Uri. - */ - public abstract String scheme(); + /** + * Returns the scheme of this Uri. + */ + public abstract String scheme(); - /** - * Returns the Host of this Uri. - */ - public abstract Host host(); + /** + * Returns the Host of this Uri. + */ + public abstract Host host(); - /** - * Returns the port of this Uri. - */ - public abstract int port(); + /** + * Returns the port of this Uri. + */ + public abstract int port(); - /** - * Returns the user-info of this Uri. - */ - public abstract String userInfo(); + /** + * Returns the user-info of this Uri. + */ + public abstract String userInfo(); - /** - * Returns a String representation of the path of this Uri. - */ - public abstract String path(); + /** + * Returns a String representation of the path of this Uri. + */ + public abstract String path(); - /** - * Returns the the path segments of this Uri as an Iterable. - */ - public abstract Iterable pathSegments(); + /** + * Returns the the path segments of this Uri as an Iterable. + */ + public abstract Iterable pathSegments(); - /** - * Returns a decoded String representation of the query of this Uri. - */ - public abstract Option queryString(Charset charset); + /** + * Returns a decoded String representation of the query of this Uri. + */ + public abstract Optional queryString(Charset charset); - /** - * Returns an undecoded String representation of the query of this Uri. - */ - public abstract Option rawQueryString(); + /** + * Returns an undecoded String representation of the query of this Uri. + */ + public abstract Optional rawQueryString(); - /** - * Returns the parsed Query instance of this Uri. - */ - public abstract Query query(); + /** + * Returns the parsed Query instance of this Uri. + */ + public abstract Query query(); - /** - * Returns the parsed Query instance of this Uri using the given charset and parsing mode. - */ - public abstract Query query(Charset charset, akka.http.scaladsl.model.Uri.ParsingMode mode); + /** + * Returns the parsed Query instance of this Uri using the given charset and parsing mode. + */ + public abstract Query query(Charset charset, akka.http.scaladsl.model.Uri.ParsingMode mode); - /** - * Returns the fragment part of this Uri. - */ - public abstract Option fragment(); + /** + * Returns the fragment part of this Uri. + */ + public abstract Optional fragment(); - /** - * Returns a copy of this instance with a new scheme. - */ - public abstract Uri scheme(String scheme); + /** + * Returns a copy of this instance with a new scheme. + */ + public abstract Uri scheme(String scheme); - /** - * Returns a copy of this instance with a new Host. - */ - public abstract Uri host(Host host); + /** + * Returns a copy of this instance with a new Host. + */ + public abstract Uri host(Host host); - /** - * Returns a copy of this instance with a new host. - */ - public abstract Uri host(String host); + /** + * Returns a copy of this instance with a new host. + */ + public abstract Uri host(String host); - /** - * Returns a copy of this instance with a new port. - */ - public abstract Uri port(int port); + /** + * Returns a copy of this instance with a new port. + */ + public abstract Uri port(int port); - /** - * Returns a copy of this instance with new user-info. - */ - public abstract Uri userInfo(String userInfo); + /** + * Returns a copy of this instance with new user-info. + */ + public abstract Uri userInfo(String userInfo); - /** - * Returns a copy of this instance with a new path. - */ - public abstract Uri path(String path); + /** + * Returns a copy of this instance with a new path. + */ + public abstract Uri path(String path); - /** - * Returns a copy of this instance with a path segment added at the end. - */ - public abstract Uri addPathSegment(String segment); + /** + * Returns a copy of this instance with a path segment added at the end. + */ + public abstract Uri addPathSegment(String segment); - /** - * Returns a copy of this instance with a new query. - */ - public abstract Uri rawQueryString(String rawQuery); + /** + * Returns a copy of this instance with a new query. + */ + public abstract Uri rawQueryString(String rawQuery); - /** - * Returns a copy of this instance with a new query. - */ - public abstract Uri query(Query query); + /** + * Returns a copy of this instance with a new query. + */ + public abstract Uri query(Query query); - /** - * Returns a copy of this instance that is relative. - */ - public abstract Uri toRelative(); + /** + * Returns a copy of this instance that is relative. + */ + public abstract Uri toRelative(); - /** - * Returns a copy of this instance with a new fragment. - */ - public abstract Uri fragment(String fragment); + /** + * Returns a copy of this instance with a new fragment. + */ + public abstract Uri fragment(String fragment); - /** - * Returns a copy of this instance with a new optional fragment. - */ - public abstract Uri fragment(Option fragment); + /** + * Returns a copy of this instance with a new optional fragment. + */ + public abstract Uri fragment(Optional fragment); - public static final akka.http.scaladsl.model.Uri.ParsingMode STRICT = UriJavaAccessor.pmStrict(); - public static final akka.http.scaladsl.model.Uri.ParsingMode RELAXED = UriJavaAccessor.pmRelaxed(); + public static final akka.http.scaladsl.model.Uri.ParsingMode STRICT = UriJavaAccessor.pmStrict(); + public static final akka.http.scaladsl.model.Uri.ParsingMode RELAXED = UriJavaAccessor.pmRelaxed(); - /** - * Creates a default Uri to be modified using the modification methods. - */ - public static final Uri EMPTY = new JavaUri(akka.http.scaladsl.model.Uri.Empty$.MODULE$); + /** + * Creates a default Uri to be modified using the modification methods. + */ + public static final Uri EMPTY = new JavaUri(akka.http.scaladsl.model.Uri.Empty$.MODULE$); - /** - * Returns a Uri created by parsing the given string representation. - */ - public static Uri create(String uri) { - return new JavaUri(akka.http.scaladsl.model.Uri.apply(uri)); - } + /** + * Returns a Uri created by parsing the given string representation. + */ + public static Uri create(String uri) { + return new JavaUri(akka.http.scaladsl.model.Uri.apply(uri)); + } - /** - * Returns a Uri created by parsing the given string representation with the provided parsing mode. - */ - public static Uri create(String uri, akka.http.scaladsl.model.Uri.ParsingMode parsingMode) { - return new JavaUri(akka.http.scaladsl.model.Uri.apply(ParserInput$.MODULE$.apply(uri), parsingMode)); - } + /** + * Returns a Uri created by parsing the given string representation with the provided parsing mode. + */ + public static Uri create(String uri, akka.http.scaladsl.model.Uri.ParsingMode parsingMode) { + return new JavaUri(akka.http.scaladsl.model.Uri.apply(ParserInput$.MODULE$.apply(uri), parsingMode)); + } - /** - * Returns a Uri created by parsing the given string representation with the provided charset and parsing mode. - */ - public static Uri create(String uri, Charset charset, akka.http.scaladsl.model.Uri.ParsingMode parsingMode) { - return new JavaUri(akka.http.scaladsl.model.Uri.apply(ParserInput$.MODULE$.apply(uri), charset, parsingMode)); - } + /** + * Returns a Uri created by parsing the given string representation with the provided charset and parsing mode. + */ + public static Uri create(String uri, Charset charset, akka.http.scaladsl.model.Uri.ParsingMode parsingMode) { + return new JavaUri(akka.http.scaladsl.model.Uri.apply(ParserInput$.MODULE$.apply(uri), charset, parsingMode)); + } + + public static interface ParsingMode {} } diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Accept.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Accept.java index f720148e40..29bebb24ef 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Accept.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Accept.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AcceptCharset.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AcceptCharset.java index b776629771..43f85d7b76 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AcceptCharset.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AcceptCharset.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AcceptEncoding.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AcceptEncoding.java index 967dc02754..710eec2d3e 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AcceptEncoding.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AcceptEncoding.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AcceptLanguage.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AcceptLanguage.java index 238502d7a8..95c561a307 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AcceptLanguage.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AcceptLanguage.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AcceptRanges.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AcceptRanges.java index 257a4cdd4c..fd183b0e9b 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AcceptRanges.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AcceptRanges.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlAllowCredentials.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlAllowCredentials.java index 7acaffbb63..bdbf018632 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlAllowCredentials.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlAllowCredentials.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlAllowHeaders.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlAllowHeaders.java index a4884f8522..aaa429c25c 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlAllowHeaders.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlAllowHeaders.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlAllowMethods.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlAllowMethods.java index f98315cdd1..a7025db556 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlAllowMethods.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlAllowMethods.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlAllowOrigin.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlAllowOrigin.java index fe7d44be69..284fd5b931 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlAllowOrigin.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlAllowOrigin.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlExposeHeaders.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlExposeHeaders.java index 702793ceb1..a685994481 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlExposeHeaders.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlExposeHeaders.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlMaxAge.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlMaxAge.java index c817095d39..6337399677 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlMaxAge.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlMaxAge.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlRequestHeaders.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlRequestHeaders.java index 9ba6718fd7..30514d44d4 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlRequestHeaders.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlRequestHeaders.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlRequestMethod.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlRequestMethod.java index fd44f214b3..25db1cc5c1 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlRequestMethod.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/AccessControlRequestMethod.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Age.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Age.java index 92c15d1efd..2d4cf9e68d 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Age.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Age.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Allow.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Allow.java index e0bdc4c3e1..dee8c4af36 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Allow.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Allow.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Authorization.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Authorization.java index 276deba442..ebe4240d77 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Authorization.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Authorization.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/BasicHttpCredentials.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/BasicHttpCredentials.java index f9f0fc0440..dce8bdadee 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/BasicHttpCredentials.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/BasicHttpCredentials.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ByteRange.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ByteRange.java index ac092ec651..47b3740bbe 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ByteRange.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ByteRange.java @@ -1,21 +1,22 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; import akka.http.scaladsl.model.headers.ByteRange$; -import akka.japi.Option; + +import java.util.OptionalLong; public abstract class ByteRange { public abstract boolean isSlice(); public abstract boolean isFromOffset(); public abstract boolean isSuffix(); - public abstract Option getSliceFirst(); - public abstract Option getSliceLast(); - public abstract Option getOffset(); - public abstract Option getSuffixLength(); + public abstract OptionalLong getSliceFirst(); + public abstract OptionalLong getSliceLast(); + public abstract OptionalLong getOffset(); + public abstract OptionalLong getSuffixLength(); public static ByteRange createSlice(long first, long last) { return ByteRange$.MODULE$.apply(first, last); diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/CacheControl.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/CacheControl.java index b92be0911d..117100136c 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/CacheControl.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/CacheControl.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/CacheDirective.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/CacheDirective.java index 459f79bfaa..b2c8849ac3 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/CacheDirective.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/CacheDirective.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/CacheDirectives.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/CacheDirectives.java index 141ffb15c3..a6662a32cb 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/CacheDirectives.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/CacheDirectives.java @@ -1,9 +1,14 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; +import scala.compat.java8.OptionConverters; + +import java.util.Optional; +import java.util.OptionalLong; + public final class CacheDirectives { private CacheDirectives() {} @@ -11,10 +16,10 @@ public final class CacheDirectives { return new akka.http.scaladsl.model.headers.CacheDirectives.max$minusage(deltaSeconds); } public static CacheDirective MAX_STALE() { - return new akka.http.scaladsl.model.headers.CacheDirectives.max$minusstale(akka.japi.Option.none().asScala()); + return new akka.http.scaladsl.model.headers.CacheDirectives.max$minusstale(OptionConverters.toScala(Optional.empty())); } public static CacheDirective MAX_STALE(long deltaSeconds) { - return new akka.http.scaladsl.model.headers.CacheDirectives.max$minusstale(akka.japi.Option.some((Object) deltaSeconds).asScala()); + return new akka.http.scaladsl.model.headers.CacheDirectives.max$minusstale(OptionConverters.toScala(OptionalLong.of(deltaSeconds))); } public static CacheDirective MIN_FRESH(long deltaSeconds) { return new akka.http.scaladsl.model.headers.CacheDirectives.min$minusfresh(deltaSeconds); diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentDisposition.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentDisposition.java index daa23f649e..8947370cec 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentDisposition.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentDisposition.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentDispositionType.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentDispositionType.java index 7675d54094..24ca2b2155 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentDispositionType.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentDispositionType.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentDispositionTypes.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentDispositionTypes.java index 8b8db97071..832e1f6a44 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentDispositionTypes.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentDispositionTypes.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentEncoding.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentEncoding.java index eae71c72aa..79e80082db 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentEncoding.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentEncoding.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentRange.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentRange.java index b537e000a1..d5be21518b 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentRange.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentRange.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentType.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentType.java index 5933b876fa..9acd0f1eae 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentType.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ContentType.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Cookie.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Cookie.java index cc5c4a7f71..be6a02d836 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Cookie.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Cookie.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/CustomHeader.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/CustomHeader.java index 7313313a2e..9f643adb18 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/CustomHeader.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/CustomHeader.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; @@ -7,6 +7,4 @@ package akka.http.javadsl.model.headers; public abstract class CustomHeader extends akka.http.scaladsl.model.HttpHeader { public abstract String name(); public abstract String value(); - - protected abstract boolean suppressRendering(); } diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Date.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Date.java index bbf578d698..a5e15a558c 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Date.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Date.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ETag.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ETag.java index a485469ae8..95f0d06a1d 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ETag.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ETag.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/EntityTag.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/EntityTag.java index c635efb78a..324bdbd849 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/EntityTag.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/EntityTag.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/EntityTagRange.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/EntityTagRange.java index 085f20df11..deafee9ca8 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/EntityTagRange.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/EntityTagRange.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Expires.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Expires.java index e52a8694ad..e2c0a1e44e 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Expires.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Expires.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Host.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Host.java index 1ce64adad5..d2e33c7a5f 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Host.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Host.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpChallenge.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpChallenge.java index 772b424a44..6d0e797060 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpChallenge.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpChallenge.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpCookie.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpCookie.java index 8b83d50dcd..19f66bc66a 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpCookie.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpCookie.java @@ -1,25 +1,28 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; import akka.http.javadsl.model.DateTime; import akka.http.impl.util.Util; -import akka.japi.Option; +import scala.compat.java8.OptionConverters; + +import java.util.Optional; +import java.util.OptionalLong; public abstract class HttpCookie { public abstract String name(); public abstract String value(); public abstract HttpCookiePair pair(); - public abstract Option getExpires(); - public abstract Option getMaxAge(); - public abstract Option getDomain(); - public abstract Option getPath(); + public abstract Optional getExpires(); + public abstract OptionalLong getMaxAge(); + public abstract Optional getDomain(); + public abstract Optional getPath(); public abstract boolean secure(); public abstract boolean httpOnly(); - public abstract Option getExtension(); + public abstract Optional getExtension(); public static HttpCookie create(String name, String value) { return new akka.http.scaladsl.model.headers.HttpCookie( @@ -28,11 +31,11 @@ public abstract class HttpCookie { false, false, Util.scalaNone()); } - public static HttpCookie create(String name, String value, Option domain, Option path) { + public static HttpCookie create(String name, String value, Optional domain, Optional path) { return new akka.http.scaladsl.model.headers.HttpCookie( name, value, Util.scalaNone(), Util.scalaNone(), - domain.asScala(), path.asScala(), + OptionConverters.toScala(domain), OptionConverters.toScala(path), false, false, Util.scalaNone()); } @@ -40,22 +43,22 @@ public abstract class HttpCookie { public static HttpCookie create( String name, String value, - Option expires, - Option maxAge, - Option domain, - Option path, + Optional expires, + OptionalLong maxAge, + Optional domain, + Optional path, boolean secure, boolean httpOnly, - Option extension) { + Optional extension) { return new akka.http.scaladsl.model.headers.HttpCookie( name, value, - Util.convertOptionToScala(expires), - ((Option) (Object) maxAge).asScala(), - domain.asScala(), - path.asScala(), + Util.convertOptionalToScala(expires), + OptionConverters.toScala(maxAge), + OptionConverters.toScala(domain), + OptionConverters.toScala(path), secure, httpOnly, - extension.asScala()); + OptionConverters.toScala(extension)); } /** diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpCookiePair.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpCookiePair.java index f8f698316e..2c71a0a300 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpCookiePair.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpCookiePair.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpCredentials.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpCredentials.java index 639c7b0136..9303d617f1 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpCredentials.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpCredentials.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpEncoding.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpEncoding.java index 89cdc7ba49..f0cdd63a75 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpEncoding.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpEncoding.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpEncodingRange.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpEncodingRange.java index a7b27a2798..a615be65d5 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpEncodingRange.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpEncodingRange.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpEncodings.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpEncodings.java index f15282411a..fa4fbdfc70 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpEncodings.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpEncodings.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpOrigin.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpOrigin.java index e3d87d7340..3123e38d1f 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpOrigin.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpOrigin.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpOriginRange.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpOriginRange.java index 821259ee06..3bd88b030f 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpOriginRange.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/HttpOriginRange.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/IfMatch.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/IfMatch.java index e3086df285..7debbbaf26 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/IfMatch.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/IfMatch.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/IfModifiedSince.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/IfModifiedSince.java index 4d5c9db3cd..66df2eb3be 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/IfModifiedSince.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/IfModifiedSince.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/IfNoneMatch.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/IfNoneMatch.java index f2265c8cb9..995ffcddb9 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/IfNoneMatch.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/IfNoneMatch.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/IfUnmodifiedSince.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/IfUnmodifiedSince.java index 3c60adc96c..753b75d91e 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/IfUnmodifiedSince.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/IfUnmodifiedSince.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Language.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Language.java index d6c91bc37e..dd743cc420 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Language.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Language.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/LanguageRange.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/LanguageRange.java index 68fea5fbb7..5fad85b685 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/LanguageRange.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/LanguageRange.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/LastModified.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/LastModified.java index bf52122c4c..df3b2dab03 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/LastModified.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/LastModified.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Link.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Link.java index 1be966459e..a30e639b27 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Link.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Link.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/LinkParam.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/LinkParam.java index f82dae8467..d6ab57e47a 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/LinkParam.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/LinkParam.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/LinkParams.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/LinkParams.java index a8206e1b3a..bc7eab4eed 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/LinkParams.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/LinkParams.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/LinkValue.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/LinkValue.java index ad0729ed84..aba13b83ea 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/LinkValue.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/LinkValue.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Location.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Location.java index 1d3697655d..253bcc524d 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Location.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Location.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/OAuth2BearerToken.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/OAuth2BearerToken.java index d2f20ad9ca..66dc4be805 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/OAuth2BearerToken.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/OAuth2BearerToken.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Origin.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Origin.java index 086ba31882..f12141e69b 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Origin.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Origin.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ProductVersion.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ProductVersion.java index 846038c8a8..25b7838cc7 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ProductVersion.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ProductVersion.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ProxyAuthenticate.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ProxyAuthenticate.java index 47ad02ce32..22700dd3f5 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ProxyAuthenticate.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ProxyAuthenticate.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ProxyAuthorization.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ProxyAuthorization.java index 26e645118b..93ac683af0 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ProxyAuthorization.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/ProxyAuthorization.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Range.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Range.java index 873a386d13..542994b717 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Range.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Range.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/RangeUnit.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/RangeUnit.java index beded19459..8ba28c7f04 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/RangeUnit.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/RangeUnit.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/RangeUnits.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/RangeUnits.java index 963374bea6..2f62b82fe5 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/RangeUnits.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/RangeUnits.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/RawHeader.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/RawHeader.java index aec58b09f7..d82f7ef59c 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/RawHeader.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/RawHeader.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/RawRequestURI.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/RawRequestURI.java index 394cca3f8c..702d827495 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/RawRequestURI.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/RawRequestURI.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Referer.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Referer.java index a307a02a25..4bf215b833 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Referer.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Referer.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/RemoteAddress.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/RemoteAddress.java index 1584e3f7f9..cad3d119bf 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/RemoteAddress.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/RemoteAddress.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Server.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Server.java index 9a8fb71b87..35fef4596e 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Server.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/Server.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/SetCookie.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/SetCookie.java index 1a4e96bd86..d74a8fe174 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/SetCookie.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/SetCookie.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/TimeoutAccess.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/TimeoutAccess.java new file mode 100644 index 0000000000..af11f7258a --- /dev/null +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/TimeoutAccess.java @@ -0,0 +1,16 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package akka.http.javadsl.model.headers; + +/** + * Model for the synthetic `Timeout-Access` header. + */ +public abstract class TimeoutAccess extends akka.http.scaladsl.model.HttpHeader { + public abstract akka.http.javadsl.TimeoutAccess timeoutAccess(); + + public static TimeoutAccess create(akka.http.javadsl.TimeoutAccess timeoutAccess) { + return new akka.http.scaladsl.model.headers.Timeout$minusAccess((akka.http.scaladsl.TimeoutAccess) timeoutAccess); + } +} diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/TransferEncoding.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/TransferEncoding.java index f8dd276385..ff92536a14 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/TransferEncoding.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/TransferEncoding.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/UserAgent.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/UserAgent.java index 5797ebb328..797bc32215 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/UserAgent.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/UserAgent.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/WWWAuthenticate.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/WWWAuthenticate.java index 3919ae41eb..dd56fb80d2 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/WWWAuthenticate.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/WWWAuthenticate.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/XForwardedFor.java b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/XForwardedFor.java index 48b479590a..5af417fda1 100644 --- a/akka-http-core/src/main/java/akka/http/javadsl/model/headers/XForwardedFor.java +++ b/akka-http-core/src/main/java/akka/http/javadsl/model/headers/XForwardedFor.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.headers; diff --git a/akka-http-core/src/main/resources/reference.conf b/akka-http-core/src/main/resources/reference.conf index 55da20ba6e..6eb274c636 100644 --- a/akka-http-core/src/main/resources/reference.conf +++ b/akka-http-core/src/main/resources/reference.conf @@ -18,6 +18,18 @@ akka.http { # Set to `infinite` to completely disable idle connection timeouts. idle-timeout = 60 s + # Defines the default time period within which the application has to + # produce an HttpResponse for any given HttpRequest it received. + # The timeout begins to run when the *end* of the request has been + # received, so even potentially long uploads can have a short timeout. + # Set to `infinite` to completely disable request timeout checking. + # + # If this setting is not `infinite` the HTTP server layer attaches a + # `Timeout-Access` header to the request, which enables programmatic + # customization of the timeout period and timeout response for each + # request individually. + request-timeout = 20 s + # The time period within which the TCP binding process must be completed. # Set to `infinite` to disable. bind-timeout = 1s @@ -273,13 +285,16 @@ akka.http { max-chunk-ext-length = 256 max-chunk-size = 1m - # Maximum content length which should not be exceeded by incoming HttpRequests. - # For file uploads which use the entityBytes Source of an incoming HttpRequest it is safe to - # set this to a very high value (or to `infinite` if feeling very adventurous) as the streaming - # upload will be back-pressured properly by Akka Streams. - # Please note however that this setting is a global property, and is applied to all incoming requests, - # not only file uploads consumed in a streaming fashion, so pick this limit wisely. - max-content-length = 8m + # Default maximum content length which should not be exceeded by incoming request entities. + # Can be changed at runtime (to a higher or lower value) via the `HttpEntity::withSizeLimit` method. + # Note that it is not necessarily a problem to set this to a high value as all stream operations + # are always properly backpressured. + # Nevertheless you might want to apply some limit in order to prevent a single client from consuming + # an excessive amount of server resources. + # + # Set to `infinite` to completely disable entity length checks. (Even then you can still apply one + # programmatically via `withSizeLimit`.) + max-content-length = 8m # Sets the strictness mode for parsing request target URIs. # The following values are defined: diff --git a/akka-http-core/src/main/scala/akka/http/ClientConnectionSettings.scala b/akka-http-core/src/main/scala/akka/http/ClientConnectionSettings.scala deleted file mode 100644 index 9562d23168..0000000000 --- a/akka-http-core/src/main/scala/akka/http/ClientConnectionSettings.scala +++ /dev/null @@ -1,71 +0,0 @@ -/** - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package akka.http - -import java.util.Random - -import akka.http.impl.engine.ws.Randoms -import akka.io.Inet.SocketOption - -import scala.concurrent.duration.{ Duration, FiniteDuration } -import scala.collection.immutable - -import com.typesafe.config.Config -import akka.actor.ActorSystem - -import akka.http.impl.util._ - -import akka.http.scaladsl.model.headers.`User-Agent` - -final case class ClientConnectionSettings( - userAgentHeader: Option[`User-Agent`], - connectingTimeout: FiniteDuration, - idleTimeout: Duration, - requestHeaderSizeHint: Int, - websocketRandomFactory: () ⇒ Random, - socketOptions: immutable.Traversable[SocketOption], - parserSettings: ParserSettings) { - - require(connectingTimeout >= Duration.Zero, "connectingTimeout must be >= 0") - require(requestHeaderSizeHint > 0, "request-size-hint must be > 0") -} - -object ClientConnectionSettings extends SettingsCompanion[ClientConnectionSettings]("akka.http.client") { - def fromSubConfig(root: Config, inner: Config) = { - val c = inner.withFallback(root.getConfig(prefix)) - apply( - c.getString("user-agent-header").toOption.map(`User-Agent`(_)), - c getFiniteDuration "connecting-timeout", - c getPotentiallyInfiniteDuration "idle-timeout", - c getIntBytes "request-header-size-hint", - Randoms.SecureRandomInstances, // can currently only be overridden from code - SocketOptionSettings.fromSubConfig(root, c.getConfig("socket-options")), - ParserSettings.fromSubConfig(root, c.getConfig("parsing"))) - } - - /** - * Creates an instance of ClientConnectionSettings using the configuration provided by the given - * ActorSystem. - * - * Java API - */ - def create(system: ActorSystem): ClientConnectionSettings = ClientConnectionSettings(system) - - /** - * Creates an instance of ClientConnectionSettings using the given Config. - * - * Java API - */ - def create(config: Config): ClientConnectionSettings = ClientConnectionSettings(config) - - /** - * Create an instance of ClientConnectionSettings using the given String of config overrides to override - * settings set in the class loader of this class (i.e. by application.conf or reference.conf files in - * the class loader of this class). - * - * Java API - */ - def create(configOverrides: String): ClientConnectionSettings = ClientConnectionSettings(configOverrides) -} \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/ConnectionPoolSettings.scala b/akka-http-core/src/main/scala/akka/http/ConnectionPoolSettings.scala deleted file mode 100644 index b990bd4383..0000000000 --- a/akka-http-core/src/main/scala/akka/http/ConnectionPoolSettings.scala +++ /dev/null @@ -1,81 +0,0 @@ -/** - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package akka.http - -import java.lang.{ Iterable ⇒ JIterable } - -import akka.actor.ActorSystem -import akka.event.LoggingAdapter -import akka.http.impl.util._ -import akka.http.scaladsl.HttpsContext -import com.typesafe.config.Config - -import scala.concurrent.duration.Duration - -final case class HostConnectionPoolSetup(host: String, port: Int, setup: ConnectionPoolSetup) - -final case class ConnectionPoolSetup( - settings: ConnectionPoolSettings, - httpsContext: Option[HttpsContext], - log: LoggingAdapter) - -object ConnectionPoolSetup { - /** Java API */ - def create(settings: ConnectionPoolSettings, - httpsContext: akka.japi.Option[akka.http.javadsl.HttpsContext], - log: LoggingAdapter): ConnectionPoolSetup = - ConnectionPoolSetup(settings, httpsContext.map(_.asInstanceOf[HttpsContext]), log) -} - -final case class ConnectionPoolSettings( - maxConnections: Int, - maxRetries: Int, - maxOpenRequests: Int, - pipeliningLimit: Int, - idleTimeout: Duration, - connectionSettings: ClientConnectionSettings) { - - require(maxConnections > 0, "max-connections must be > 0") - require(maxRetries >= 0, "max-retries must be >= 0") - require(maxOpenRequests > 0 && (maxOpenRequests & (maxOpenRequests - 1)) == 0, "max-open-requests must be a power of 2 > 0") - require(pipeliningLimit > 0, "pipelining-limit must be > 0") - require(idleTimeout >= Duration.Zero, "idle-timeout must be >= 0") -} - -object ConnectionPoolSettings extends SettingsCompanion[ConnectionPoolSettings]("akka.http.host-connection-pool") { - def fromSubConfig(root: Config, c: Config) = { - apply( - c getInt "max-connections", - c getInt "max-retries", - c getInt "max-open-requests", - c getInt "pipelining-limit", - c getPotentiallyInfiniteDuration "idle-timeout", - ClientConnectionSettings.fromSubConfig(root, c.getConfig("client"))) - } - - /** - * Creates an instance of ConnectionPoolSettings using the configuration provided by the given - * ActorSystem. - * - * Java API - */ - def create(system: ActorSystem): ConnectionPoolSettings = ConnectionPoolSettings(system) - - /** - * Creates an instance of ConnectionPoolSettings using the given Config. - * - * Java API - */ - def create(config: Config): ConnectionPoolSettings = ConnectionPoolSettings(config) - - /** - * Create an instance of ConnectionPoolSettings using the given String of config overrides to override - * settings set in the class loader of this class (i.e. by application.conf or reference.conf files in - * the class loader of this class). - * - * Java API - */ - def create(configOverrides: String): ConnectionPoolSettings = ConnectionPoolSettings(configOverrides) -} \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/ParserSettings.scala b/akka-http-core/src/main/scala/akka/http/ParserSettings.scala deleted file mode 100644 index 7af10fdb4b..0000000000 --- a/akka-http-core/src/main/scala/akka/http/ParserSettings.scala +++ /dev/null @@ -1,134 +0,0 @@ -/** - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package akka.http - -import java.util.Locale -import akka.actor.ActorSystem -import com.typesafe.config.Config -import scala.collection.JavaConverters._ -import akka.http.scaladsl.model.{ StatusCode, HttpMethod, Uri } -import akka.http.impl.util._ -import akka.http.impl.engine.parsing.HttpHeaderParser - -final case class ParserSettings( - maxUriLength: Int, - maxMethodLength: Int, - maxResponseReasonLength: Int, - maxHeaderNameLength: Int, - maxHeaderValueLength: Int, - maxHeaderCount: Int, - maxContentLength: Long, - maxChunkExtLength: Int, - maxChunkSize: Int, - uriParsingMode: Uri.ParsingMode, - cookieParsingMode: ParserSettings.CookieParsingMode, - illegalHeaderWarnings: Boolean, - errorLoggingVerbosity: ParserSettings.ErrorLoggingVerbosity, - headerValueCacheLimits: Map[String, Int], - includeTlsSessionInfoHeader: Boolean, - customMethods: String ⇒ Option[HttpMethod], - customStatusCodes: Int ⇒ Option[StatusCode]) extends HttpHeaderParser.Settings { - - require(maxUriLength > 0, "max-uri-length must be > 0") - require(maxMethodLength > 0, "max-method-length must be > 0") - require(maxResponseReasonLength > 0, "max-response-reason-length must be > 0") - require(maxHeaderNameLength > 0, "max-header-name-length must be > 0") - require(maxHeaderValueLength > 0, "max-header-value-length must be > 0") - require(maxHeaderCount > 0, "max-header-count must be > 0") - require(maxContentLength > 0, "max-content-length must be > 0") - require(maxChunkExtLength > 0, "max-chunk-ext-length must be > 0") - require(maxChunkSize > 0, "max-chunk-size must be > 0") - - val defaultHeaderValueCacheLimit: Int = headerValueCacheLimits("default") - - def headerValueCacheLimit(headerName: String): Int = - headerValueCacheLimits.getOrElse(headerName, defaultHeaderValueCacheLimit) - - def withCustomMethods(methods: HttpMethod*): ParserSettings = { - val map = methods.map(m ⇒ m.name -> m).toMap - copy(customMethods = map.get) - } - def withCustomStatusCodes(codes: StatusCode*): ParserSettings = { - val map = codes.map(c ⇒ c.intValue -> c).toMap - copy(customStatusCodes = map.get) - } -} - -object ParserSettings extends SettingsCompanion[ParserSettings]("akka.http.parsing") { - def fromSubConfig(root: Config, inner: Config) = { - val c = inner.withFallback(root.getConfig(prefix)) - val cacheConfig = c getConfig "header-cache" - - apply( - c getIntBytes "max-uri-length", - c getIntBytes "max-method-length", - c getIntBytes "max-response-reason-length", - c getIntBytes "max-header-name-length", - c getIntBytes "max-header-value-length", - c getIntBytes "max-header-count", - c getPossiblyInfiniteBytes "max-content-length", - c getIntBytes "max-chunk-ext-length", - c getIntBytes "max-chunk-size", - Uri.ParsingMode(c getString "uri-parsing-mode"), - CookieParsingMode(c getString "cookie-parsing-mode"), - c getBoolean "illegal-header-warnings", - ErrorLoggingVerbosity(c getString "error-logging-verbosity"), - cacheConfig.entrySet.asScala.map(kvp ⇒ kvp.getKey -> cacheConfig.getInt(kvp.getKey))(collection.breakOut), - c getBoolean "tls-session-info-header", - _ ⇒ None, - _ ⇒ None) - } - - sealed trait ErrorLoggingVerbosity - object ErrorLoggingVerbosity { - case object Off extends ErrorLoggingVerbosity - case object Simple extends ErrorLoggingVerbosity - case object Full extends ErrorLoggingVerbosity - - def apply(string: String): ErrorLoggingVerbosity = - string.toRootLowerCase match { - case "off" ⇒ Off - case "simple" ⇒ Simple - case "full" ⇒ Full - case x ⇒ throw new IllegalArgumentException(s"[$x] is not a legal `error-logging-verbosity` setting") - } - } - - sealed trait CookieParsingMode - object CookieParsingMode { - case object RFC6265 extends CookieParsingMode - case object Raw extends CookieParsingMode - - def apply(mode: String): CookieParsingMode = mode.toRootLowerCase match { - case "rfc6265" ⇒ RFC6265 - case "raw" ⇒ Raw - } - } - - /** - * Creates an instance of ParserSettings using the configuration provided by the given - * ActorSystem. - * - * Java API - */ - def create(system: ActorSystem): ParserSettings = ParserSettings(system) - - /** - * Creates an instance of ParserSettings using the given Config. - * - * Java API - */ - def create(config: Config): ParserSettings = ParserSettings(config) - - /** - * Create an instance of ParserSettings using the given String of config overrides to override - * settings set in the class loader of this class (i.e. by application.conf or reference.conf files in - * the class loader of this class). - * - * Java API - */ - def create(configOverrides: String): ParserSettings = ParserSettings(configOverrides) -} - diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/HttpConnectionTimeoutException.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/HttpConnectionTimeoutException.scala index 6cfa2e0d4f..59d4c35962 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/HttpConnectionTimeoutException.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/HttpConnectionTimeoutException.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.http.impl.engine diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/TokenSourceActor.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/TokenSourceActor.scala index b20c309333..91ea24623a 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/TokenSourceActor.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/TokenSourceActor.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/client/OutgoingConnectionBlueprint.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/client/OutgoingConnectionBlueprint.scala index 2bbd8df266..b68264ab67 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/client/OutgoingConnectionBlueprint.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/client/OutgoingConnectionBlueprint.scala @@ -1,10 +1,11 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.client -import akka.stream.impl.fusing.GraphInterpreter +import akka.NotUsed +import akka.http.scaladsl.settings.ClientConnectionSettings import language.existentials import scala.annotation.tailrec import scala.collection.mutable.ListBuffer @@ -13,7 +14,6 @@ import akka.util.ByteString import akka.event.LoggingAdapter import akka.stream._ import akka.stream.scaladsl._ -import akka.http.ClientConnectionSettings import akka.http.scaladsl.Http import akka.http.scaladsl.model.headers.Host import akka.http.scaladsl.model.{ IllegalResponseException, HttpMethod, HttpRequest, HttpResponse } @@ -60,7 +60,7 @@ private[http] object OutgoingConnectionBlueprint { val requestRendererFactory = new HttpRequestRendererFactory(userAgentHeader, requestHeaderSizeHint, log) - val requestRendering: Flow[HttpRequest, ByteString, Unit] = Flow[HttpRequest] + val requestRendering: Flow[HttpRequest, ByteString, NotUsed] = Flow[HttpRequest] .map(RequestRenderingContext(_, hostHeader)) .via(Flow[RequestRenderingContext].flatMapConcat(requestRendererFactory.renderToSource).named("renderer")) @@ -75,8 +75,8 @@ private[http] object OutgoingConnectionBlueprint { case (Seq(MessageEnd), remaining) ⇒ SubSource.kill(remaining) false - case _ ⇒ - true + case (seq, _) ⇒ + seq.nonEmpty } .map { case (Seq(ResponseStart(statusCode, protocol, headers, createEntity, _)), entityParts) ⇒ @@ -167,7 +167,6 @@ private[http] object OutgoingConnectionBlueprint { // each connection uses a single (private) response parser instance for all its responses // which builds a cache of all header instances seen on that connection val parser = rootParser.createShallowCopy() - var methodBypassCompleted = false var waitingForMethod = true setHandler(methodBypassInput, new InHandler { @@ -179,7 +178,6 @@ private[http] object OutgoingConnectionBlueprint { } override def onUpstreamFinish(): Unit = if (waitingForMethod) completeStage() - else methodBypassCompleted = true }) setHandler(dataInput, new InHandler { @@ -201,17 +199,16 @@ private[http] object OutgoingConnectionBlueprint { setHandler(out, eagerTerminateOutput) - val getNextMethod = () ⇒ - if (methodBypassCompleted) completeStage() - else { - pull(methodBypassInput) - waitingForMethod = true - } + val getNextMethod = () ⇒ { + waitingForMethod = true + if (isClosed(methodBypassInput)) completeStage() + else pull(methodBypassInput) + } val getNextData = () ⇒ { waitingForMethod = false - if (!isClosed(dataInput)) pull(dataInput) - else completeStage() + if (isClosed(dataInput)) completeStage() + else pull(dataInput) } @tailrec def drainParser(current: ResponseOutput, b: ListBuffer[ResponseOutput] = ListBuffer.empty): Unit = { @@ -219,13 +216,10 @@ private[http] object OutgoingConnectionBlueprint { if (output.nonEmpty) emit(out, output, andThen) else andThen() current match { - case NeedNextRequestMethod ⇒ - e(b.result(), getNextMethod) - case StreamEnd ⇒ - e(b.result(), () ⇒ completeStage()) - case NeedMoreData ⇒ - e(b.result(), getNextData) - case x ⇒ drainParser(parser.onPull(), b += x) + case NeedNextRequestMethod ⇒ e(b.result(), getNextMethod) + case StreamEnd ⇒ e(b.result(), () ⇒ completeStage()) + case NeedMoreData ⇒ e(b.result(), getNextData) + case x ⇒ drainParser(parser.onPull(), b += x) } } diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolConductor.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolConductor.scala index 24d9666de6..c87d7368f1 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolConductor.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolConductor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.client @@ -140,15 +140,16 @@ private object PoolConductor { slotStates(slotIx) = slotStateAfterDisconnect(slotStates(slotIx), failed) } pull(slotIn) - val tryPull = nextSlot == -1 + val wasBlocked = nextSlot == -1 nextSlot = bestSlot() - if (tryPull) tryPullCtx() + val nowUnblocked = nextSlot != -1 + if (wasBlocked && nowUnblocked) pull(ctxIn) // get next request context } }) setHandler(out, eagerTerminateOutput) - val tryPullCtx = () ⇒ if (nextSlot != -1) pull(ctxIn) + val tryPullCtx = () ⇒ if (nextSlot != -1 && !hasBeenPulled(ctxIn)) pull(ctxIn) override def preStart(): Unit = { pull(ctxIn) diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolFlow.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolFlow.scala index 06ece1d27c..88d8139f80 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolFlow.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolFlow.scala @@ -1,11 +1,12 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.client import java.net.InetSocketAddress -import akka.http.ConnectionPoolSettings +import akka.NotUsed +import akka.http.scaladsl.settings.ConnectionPoolSettings import scala.concurrent.{ Promise, Future } import scala.util.Try @@ -69,7 +70,7 @@ private object PoolFlow { */ def apply(connectionFlow: Flow[HttpRequest, HttpResponse, Future[Http.OutgoingConnection]], remoteAddress: InetSocketAddress, settings: ConnectionPoolSettings, log: LoggingAdapter)( - implicit system: ActorSystem, fm: Materializer): Flow[RequestContext, ResponseContext, Unit] = + implicit system: ActorSystem, fm: Materializer): Flow[RequestContext, ResponseContext, NotUsed] = Flow.fromGraph(GraphDSL.create[FlowShape[RequestContext, ResponseContext]]() { implicit b ⇒ import settings._ import GraphDSL.Implicits._ diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolGateway.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolGateway.scala index 470a685d1e..6129e3029e 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolGateway.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolGateway.scala @@ -3,7 +3,7 @@ package akka.http.impl.engine.client import java.util.concurrent.atomic.AtomicReference import scala.annotation.tailrec import scala.concurrent.{ Future, Promise } -import akka.http.HostConnectionPoolSetup +import akka.http.impl.settings.HostConnectionPoolSetup import akka.actor.{ Deploy, Props, ActorSystem, ActorRef } import akka.http.scaladsl.Http import akka.http.scaladsl.model.{ HttpResponse, HttpRequest } diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolInterfaceActor.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolInterfaceActor.scala index 51317681f8..4c4ee9dc6c 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolInterfaceActor.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolInterfaceActor.scala @@ -1,12 +1,12 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.client import java.net.InetSocketAddress -import akka.stream.OverflowStrategy.Fail.BufferOverflowException +import akka.stream.BufferOverflowException import scala.annotation.tailrec import scala.concurrent.Promise @@ -18,9 +18,9 @@ import akka.stream.actor.ActorPublisherMessage._ import akka.stream.actor.ActorSubscriberMessage._ import akka.stream.impl.{ SeqActorName, FixedSizeBuffer } import akka.stream.scaladsl.{ Keep, Flow, Sink, Source } -import akka.http.HostConnectionPoolSetup +import akka.http.impl.settings.HostConnectionPoolSetup import akka.http.scaladsl.model._ -import akka.http.scaladsl.Http +import akka.http.scaladsl.{ ConnectionContext, HttpsConnectionContext, Http } import PoolFlow._ private object PoolInterfaceActor { @@ -64,9 +64,10 @@ private class PoolInterfaceActor(hcps: HostConnectionPoolSetup, import hcps._ import setup._ - val connectionFlow = - if (httpsContext.isEmpty) Http().outgoingConnection(host, port, None, settings.connectionSettings, setup.log) - else Http().outgoingConnectionTls(host, port, None, settings.connectionSettings, httpsContext, setup.log) + val connectionFlow = connectionContext match { + case httpsContext: HttpsConnectionContext ⇒ Http().outgoingConnectionHttps(host, port, httpsContext, None, settings.connectionSettings, setup.log) + case _ ⇒ Http().outgoingConnection(host, port, None, settings.connectionSettings, setup.log) + } val poolFlow = PoolFlow( Flow[HttpRequest].viaMat(connectionFlow)(Keep.right), @@ -147,7 +148,7 @@ private class PoolInterfaceActor(hcps: HostConnectionPoolSetup, } def dispatchRequest(pr: PoolRequest): Unit = { - val scheme = Uri.httpScheme(hcps.setup.httpsContext.isDefined) + val scheme = Uri.httpScheme(hcps.setup.connectionContext.isSecure) val hostHeader = headers.Host(hcps.host, Uri.normalizePort(hcps.port, scheme)) val effectiveRequest = pr.request diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolSlot.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolSlot.scala index 1e6ee2ce25..ad537d16ab 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolSlot.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolSlot.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.client @@ -7,10 +7,9 @@ package akka.http.impl.engine.client import java.net.InetSocketAddress import akka.actor._ -import akka.http.ConnectionPoolSettings +import akka.http.scaladsl.settings.ConnectionPoolSettings import akka.http.impl.util._ import akka.http.scaladsl.model.{ HttpEntity, HttpRequest, HttpResponse } -import akka.http.scaladsl.util.FastFuture import akka.stream._ import akka.stream.actor._ import akka.stream.impl.{ ActorProcessor, ExposedPublisher, SeqActorName, SubscribePending } @@ -163,18 +162,7 @@ private object PoolSlot { case FromConnection(OnNext(response: HttpResponse)) ⇒ val requestContext = inflightRequests.head inflightRequests = inflightRequests.tail - val (entity, whenCompleted) = response.entity match { - case x: HttpEntity.Strict ⇒ x -> FastFuture.successful(()) - case x: HttpEntity.Default ⇒ - val (newData, whenCompleted) = StreamUtils.captureTermination(x.data) - x.copy(data = newData) -> whenCompleted - case x: HttpEntity.CloseDelimited ⇒ - val (newData, whenCompleted) = StreamUtils.captureTermination(x.data) - x.copy(data = newData) -> whenCompleted - case x: HttpEntity.Chunked ⇒ - val (newChunks, whenCompleted) = StreamUtils.captureTermination(x.chunks) - x.copy(chunks = newChunks) -> whenCompleted - } + val (entity, whenCompleted) = HttpEntity.captureTermination(response.entity) val delivery = ResponseDelivery(ResponseContext(requestContext, Success(response withEntity entity))) import fm.executionContext val requestCompleted = SlotEvent.RequestCompletedFuture(whenCompleted.map(_ ⇒ SlotEvent.RequestCompleted(slotIx))) diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/BodyPartParser.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/BodyPartParser.scala index 13b2f4fa11..c2cdb54039 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/BodyPartParser.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/BodyPartParser.scala @@ -1,11 +1,10 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.parsing -import akka.http.ParserSettings -import akka.stream.impl.fusing.GraphInterpreter +import akka.NotUsed import scala.annotation.tailrec import akka.event.LoggingAdapter import akka.parboiled2.CharPredicate @@ -26,7 +25,7 @@ import akka.stream.impl.fusing.SubSource private[http] final class BodyPartParser(defaultContentType: ContentType, boundary: String, log: LoggingAdapter, - settings: BodyPartParser.Settings = BodyPartParser.defaultSettings) + settings: BodyPartParser.Settings) extends PushPullStage[ByteString, BodyPartParser.Output] { import BodyPartParser._ import settings._ @@ -146,7 +145,7 @@ private[http] final class BodyPartParser(defaultContentType: ContentType, else if (doubleDash(input, ix)) terminate() else fail("Illegal multipart boundary in message content") - case HttpHeaderParser.EmptyHeader ⇒ parseEntity(headers.toList, contentType)(input, lineEnd) + case EmptyHeader ⇒ parseEntity(headers.toList, contentType)(input, lineEnd) case h: `Content-Type` ⇒ if (cth.isEmpty) parseHeaderLines(input, lineEnd, headers, headerCount + 1, Some(h)) @@ -261,6 +260,8 @@ private[http] object BodyPartParser { val boundaryChar = CharPredicate.Digit ++ CharPredicate.Alpha ++ "'()+_,-./:=? " private object BoundaryHeader extends HttpHeader { + def renderInRequests = false + def renderInResponses = false def name = "" def lowercaseName = "" def value = "" @@ -270,32 +271,13 @@ private[http] object BodyPartParser { sealed trait Output sealed trait PartStart extends Output - final case class BodyPartStart(headers: List[HttpHeader], createEntity: Source[Output, Unit] ⇒ BodyPartEntity) extends PartStart + final case class BodyPartStart(headers: List[HttpHeader], createEntity: Source[Output, NotUsed] ⇒ BodyPartEntity) extends PartStart final case class EntityPart(data: ByteString) extends Output final case class ParseError(info: ErrorInfo) extends PartStart - final case class Settings( - maxHeaderNameLength: Int, - maxHeaderValueLength: Int, - maxHeaderCount: Int, - illegalHeaderWarnings: Boolean, - headerValueCacheLimit: Int, - uriParsingMode: Uri.ParsingMode, - cookieParsingMode: ParserSettings.CookieParsingMode) extends HttpHeaderParser.Settings { - require(maxHeaderNameLength > 0, "maxHeaderNameLength must be > 0") - require(maxHeaderValueLength > 0, "maxHeaderValueLength must be > 0") - require(maxHeaderCount > 0, "maxHeaderCount must be > 0") - require(headerValueCacheLimit >= 0, "headerValueCacheLimit must be >= 0") - def headerValueCacheLimit(headerName: String) = headerValueCacheLimit + abstract class Settings extends HttpHeaderParser.Settings { + def maxHeaderCount: Int + def illegalHeaderWarnings: Boolean + def defaultHeaderValueCacheLimit: Int } - - // TODO: load from config - val defaultSettings = Settings( - maxHeaderNameLength = 64, - maxHeaderValueLength = 8192, - maxHeaderCount = 64, - illegalHeaderWarnings = true, - headerValueCacheLimit = 8, - uriParsingMode = Uri.ParsingMode.Relaxed, - cookieParsingMode = ParserSettings.CookieParsingMode.RFC6265) } diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/BoyerMoore.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/BoyerMoore.scala index 7e3bda1755..772e203e9a 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/BoyerMoore.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/BoyerMoore.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.parsing diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpHeaderParser.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpHeaderParser.scala index 010519f582..703f626591 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpHeaderParser.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpHeaderParser.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.parsing @@ -11,8 +11,8 @@ import scala.annotation.tailrec import akka.parboiled2.CharUtils import akka.util.ByteString import akka.http.impl.util._ -import akka.http.scaladsl.model.{ IllegalHeaderException, StatusCodes, HttpHeader, ErrorInfo, Uri } -import akka.http.scaladsl.model.headers.RawHeader +import akka.http.scaladsl.model.{ IllegalHeaderException, StatusCodes, HttpHeader, ErrorInfo } +import akka.http.scaladsl.model.headers.{ EmptyHeader, RawHeader } import akka.http.impl.model.parser.HeaderParser import akka.http.impl.model.parser.CharacterClasses._ @@ -408,20 +408,12 @@ private[engine] final class HttpHeaderParser private ( private[http] object HttpHeaderParser { import SpecializedHeaderValueParsers._ - trait Settings extends HeaderParser.Settings { + abstract class Settings extends HeaderParser.Settings { def maxHeaderNameLength: Int def maxHeaderValueLength: Int def headerValueCacheLimit(headerName: String): Int } - object EmptyHeader extends HttpHeader { - def name = "" - def lowercaseName = "" - def value = "" - def render[R <: Rendering](r: R): r.type = r - override def toString = "EmptyHeader" - } - private def predefinedHeaders = Seq( "Accept: *", "Accept: */*", diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpMessageParser.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpMessageParser.scala index 227aeb5051..a612cc7d8d 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpMessageParser.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpMessageParser.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.parsing @@ -14,7 +14,7 @@ import akka.parboiled2.CharUtils import akka.util.ByteString import akka.stream.stage._ import akka.http.impl.model.parser.CharacterClasses -import akka.http.ParserSettings +import akka.http.scaladsl.settings.ParserSettings import akka.http.scaladsl.model._ import headers._ import HttpProtocols._ @@ -136,7 +136,7 @@ private[http] abstract class HttpMessageParser[Output >: MessageOutput <: Parser resultHeader match { case null ⇒ continue(input, lineStart)(parseHeaderLinesAux(headers, headerCount, ch, clh, cth, teh, e100c, hh)) - case HttpHeaderParser.EmptyHeader ⇒ + case EmptyHeader ⇒ val close = HttpMessage.connectionCloseExpected(protocol, ch) setCompletionHandling(CompletionIsEntityStreamError) parseEntity(headers.toList, protocol, input, lineEnd, clh, cth, teh, e100c, hh, close) @@ -206,7 +206,7 @@ private[http] abstract class HttpMessageParser[Output >: MessageOutput <: Parser catch { case e: ParsingException ⇒ errorInfo = e.info; 0 } if (errorInfo eq null) { headerParser.resultHeader match { - case HttpHeaderParser.EmptyHeader ⇒ + case EmptyHeader ⇒ val lastChunk = if (extension.isEmpty && headers.isEmpty) HttpEntity.LastChunk else HttpEntity.LastChunk(extension, headers) emit(EntityChunk(lastChunk)) diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpRequestParser.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpRequestParser.scala index 951f010bec..a7ed0cd9a6 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpRequestParser.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpRequestParser.scala @@ -1,12 +1,12 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.parsing import java.lang.{ StringBuilder ⇒ JStringBuilder } import scala.annotation.tailrec -import akka.http.ParserSettings +import akka.http.scaladsl.settings.ParserSettings import akka.util.ByteString import akka.http.impl.engine.ws.Handshake import akka.http.impl.model.parser.CharacterClasses diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpResponseParser.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpResponseParser.scala index 353e53cb32..c338307819 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpResponseParser.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpResponseParser.scala @@ -1,11 +1,11 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.parsing import scala.annotation.tailrec -import akka.http.ParserSettings +import akka.http.scaladsl.settings.ParserSettings import akka.http.impl.model.parser.CharacterClasses import akka.util.ByteString import akka.http.scaladsl.model._ diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/ParserOutput.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/ParserOutput.scala index 26d83b1852..b249c6a848 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/ParserOutput.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/ParserOutput.scala @@ -1,9 +1,10 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.parsing +import akka.NotUsed import akka.http.scaladsl.model._ import akka.stream.impl.fusing.GraphInterpreter import akka.stream.scaladsl.{ Sink, Source } @@ -63,17 +64,17 @@ private[http] object ParserOutput { ////////////////////////////////////// - sealed abstract class EntityCreator[-A <: ParserOutput, +B >: HttpEntity.Strict <: HttpEntity] extends (Source[A, Unit] ⇒ B) + sealed abstract class EntityCreator[-A <: ParserOutput, +B >: HttpEntity.Strict <: HttpEntity] extends (Source[A, NotUsed] ⇒ B) final case class StrictEntityCreator(entity: HttpEntity.Strict) extends EntityCreator[ParserOutput, HttpEntity.Strict] { - def apply(parts: Source[ParserOutput, Unit]) = { + def apply(parts: Source[ParserOutput, NotUsed]) = { // We might need to drain stray empty tail streams which will be read by no one. SubSource.kill(parts) entity } } - final case class StreamedEntityCreator[-A <: ParserOutput, +B >: HttpEntity.Strict <: HttpEntity](creator: Source[A, Unit] ⇒ B) + final case class StreamedEntityCreator[-A <: ParserOutput, +B >: HttpEntity.Strict <: HttpEntity](creator: Source[A, NotUsed] ⇒ B) extends EntityCreator[A, B] { - def apply(parts: Source[A, Unit]) = creator(parts) + def apply(parts: Source[A, NotUsed]) = creator(parts) } } diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/SpecializedHeaderValueParsers.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/SpecializedHeaderValueParsers.scala index bde90d02e4..03d276c471 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/SpecializedHeaderValueParsers.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/SpecializedHeaderValueParsers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.parsing diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/package.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/package.scala index ebb032d9ca..11b1c08737 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/package.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/package.scala @@ -1,11 +1,11 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine import java.lang.{ StringBuilder ⇒ JStringBuilder } -import akka.http.ParserSettings +import akka.http.scaladsl.settings.ParserSettings import scala.annotation.tailrec import akka.event.LoggingAdapter diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/rendering/BodyPartRenderer.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/rendering/BodyPartRenderer.scala index 752686ae49..16fefd0776 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/rendering/BodyPartRenderer.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/rendering/BodyPartRenderer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.rendering diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/rendering/HttpRequestRendererFactory.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/rendering/HttpRequestRendererFactory.scala index e7a91775f2..63cf2c7fbb 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/rendering/HttpRequestRendererFactory.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/rendering/HttpRequestRendererFactory.scala @@ -1,10 +1,10 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.rendering -import akka.http.ClientConnectionSettings +import akka.http.scaladsl.settings.ClientConnectionSettings import akka.http.scaladsl.model.RequestEntityAcceptance._ import scala.annotation.tailrec @@ -78,8 +78,8 @@ private[http] class HttpRequestRendererFactory(userAgentHeader: Option[headers.` case x: `Raw-Request-URI` ⇒ // we never render this header renderHeaders(tail, hostHeaderSeen, userAgentSeen, transferEncodingSeen) - case x: CustomHeader ⇒ - if (!x.suppressRendering) render(x) + case x: CustomHeader if x.renderInRequests ⇒ + render(x) renderHeaders(tail, hostHeaderSeen, userAgentSeen, transferEncodingSeen) case x: RawHeader if (x is "content-type") || (x is "content-length") || (x is "transfer-encoding") || @@ -88,7 +88,8 @@ private[http] class HttpRequestRendererFactory(userAgentHeader: Option[headers.` renderHeaders(tail, hostHeaderSeen, userAgentSeen, transferEncodingSeen) case x ⇒ - render(x) + if (x.renderInRequests) render(x) + else log.warning("HTTP header '{}' is not allowed in requests", x) renderHeaders(tail, hostHeaderSeen, userAgentSeen, transferEncodingSeen) } diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/rendering/HttpResponseRendererFactory.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/rendering/HttpResponseRendererFactory.scala index 448f0944b2..1aaa12eba2 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/rendering/HttpResponseRendererFactory.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/rendering/HttpResponseRendererFactory.scala @@ -1,12 +1,13 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.rendering -import akka.http.impl.engine.ws.{ FrameEvent, UpgradeToWebsocketResponseHeader } +import akka.NotUsed +import akka.http.impl.engine.ws.{ FrameEvent, UpgradeToWebSocketResponseHeader } import akka.http.scaladsl.model.ws.Message -import akka.stream.{ Outlet, Inlet, Attributes, FlowShape } +import akka.stream.{ Outlet, Inlet, Attributes, FlowShape, Graph } import scala.annotation.tailrec import akka.event.LoggingAdapter @@ -53,7 +54,7 @@ private[http] class HttpResponseRendererFactory(serverHeader: Option[headers.Ser // split out so we can stabilize by overriding in tests protected def currentTimeMillis(): Long = System.currentTimeMillis() - def renderer: Flow[ResponseRenderingContext, ResponseRenderingOutput, Unit] = Flow.fromGraph(HttpResponseRenderer) + def renderer: Flow[ResponseRenderingContext, ResponseRenderingOutput, NotUsed] = Flow.fromGraph(HttpResponseRenderer) object HttpResponseRenderer extends GraphStage[FlowShape[ResponseRenderingContext, ResponseRenderingOutput]] { val in = Inlet[ResponseRenderingContext]("in") @@ -62,13 +63,13 @@ private[http] class HttpResponseRendererFactory(serverHeader: Option[headers.Ser def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { - private[this] var closeMode: CloseMode = DontClose // signals what to do after the current response - private[this] def close: Boolean = closeMode != DontClose - private[this] def closeIf(cond: Boolean): Unit = - if (cond) closeMode = CloseConnection + var closeMode: CloseMode = DontClose // signals what to do after the current response + def close: Boolean = closeMode != DontClose + def closeIf(cond: Boolean): Unit = if (cond) closeMode = CloseConnection + var transferring = false setHandler(in, new InHandler { - def onPush(): Unit = + override def onPush(): Unit = render(grab(in)) match { case Strict(outElement) ⇒ push(out, outElement) @@ -76,23 +77,36 @@ private[http] class HttpResponseRendererFactory(serverHeader: Option[headers.Ser case Streamed(outStream) ⇒ transfer(outStream) } - override def onUpstreamFinish(): Unit = closeMode = CloseConnection + override def onUpstreamFinish(): Unit = + if (transferring) closeMode = CloseConnection + else completeStage() }) val waitForDemandHandler = new OutHandler { - def onPull(): Unit = if (close) completeStage() else pull(in) + def onPull(): Unit = pull(in) } setHandler(out, waitForDemandHandler) def transfer(outStream: Source[ResponseRenderingOutput, Any]): Unit = { + transferring = true val sinkIn = new SubSinkInlet[ResponseRenderingOutput]("RenderingSink") sinkIn.setHandler(new InHandler { - def onPush(): Unit = push(out, sinkIn.grab()) - override def onUpstreamFinish(): Unit = if (close) completeStage() else setHandler(out, waitForDemandHandler) + override def onPush(): Unit = push(out, sinkIn.grab()) + override def onUpstreamFinish(): Unit = + if (close) completeStage() + else { + transferring = false + setHandler(out, waitForDemandHandler) + if (isAvailable(out)) pull(in) + } }) setHandler(out, new OutHandler { - def onPull(): Unit = sinkIn.pull() + override def onPull(): Unit = sinkIn.pull() + override def onDownstreamFinish(): Unit = { + completeStage() + sinkIn.cancel() + } }) sinkIn.pull() - Source.fromGraph(outStream).runWith(sinkIn.sink)(interpreter.subFusingMaterializer) + outStream.runWith(sinkIn.sink)(interpreter.subFusingMaterializer) } def render(ctx: ResponseRenderingContext): StrictOrStreamed = { @@ -148,8 +162,8 @@ private[http] class HttpResponseRendererFactory(serverHeader: Option[headers.Ser render(x) renderHeaders(tail, alwaysClose, connHeader, serverSeen = true, transferEncodingSeen, dateSeen) - case x: CustomHeader ⇒ - if (!x.suppressRendering) render(x) + case x: CustomHeader if x.renderInResponses ⇒ + render(x) renderHeaders(tail, alwaysClose, connHeader, serverSeen, transferEncodingSeen, dateSeen) case x: RawHeader if (x is "content-type") || (x is "content-length") || (x is "transfer-encoding") || @@ -158,7 +172,8 @@ private[http] class HttpResponseRendererFactory(serverHeader: Option[headers.Ser renderHeaders(tail, alwaysClose, connHeader, serverSeen, transferEncodingSeen, dateSeen) case x ⇒ - render(x) + if (x.renderInResponses) render(x) + else log.warning("HTTP header '{}' is not allowed in responses", x) renderHeaders(tail, alwaysClose, connHeader, serverSeen, transferEncodingSeen, dateSeen) } @@ -190,8 +205,8 @@ private[http] class HttpResponseRendererFactory(serverHeader: Option[headers.Ser else if (connHeader != null && connHeader.hasUpgrade) { r ~~ connHeader ~~ CrLf headers - .collectFirst { case u: UpgradeToWebsocketResponseHeader ⇒ u } - .foreach { header ⇒ closeMode = SwitchToWebsocket(header.handler) } + .collectFirst { case u: UpgradeToWebSocketResponseHeader ⇒ u } + .foreach { header ⇒ closeMode = SwitchToWebSocket(header.handler) } } if (mustRenderTransferEncodingChunkedHeader && !transferEncodingSeen) r ~~ `Transfer-Encoding` ~~ ChunkedBytes ~~ CrLf @@ -214,7 +229,7 @@ private[http] class HttpResponseRendererFactory(serverHeader: Option[headers.Ser Strict { closeMode match { - case SwitchToWebsocket(handler) ⇒ ResponseRenderingOutput.SwitchToWebsocket(r.get, handler) + case SwitchToWebSocket(handler) ⇒ ResponseRenderingOutput.SwitchToWebSocket(r.get, handler) case _ ⇒ ResponseRenderingOutput.HttpData(r.get) } } @@ -253,7 +268,7 @@ private[http] class HttpResponseRendererFactory(serverHeader: Option[headers.Ser sealed trait CloseMode case object DontClose extends CloseMode case object CloseConnection extends CloseMode - case class SwitchToWebsocket(handler: Either[Flow[FrameEvent, FrameEvent, Any], Flow[Message, Message, Any]]) extends CloseMode + case class SwitchToWebSocket(handler: Either[Graph[FlowShape[FrameEvent, FrameEvent], Any], Graph[FlowShape[Message, Message], Any]]) extends CloseMode } /** @@ -270,5 +285,5 @@ private[http] sealed trait ResponseRenderingOutput /** INTERNAL API */ private[http] object ResponseRenderingOutput { private[http] case class HttpData(bytes: ByteString) extends ResponseRenderingOutput - private[http] case class SwitchToWebsocket(httpResponseBytes: ByteString, handler: Either[Flow[FrameEvent, FrameEvent, Any], Flow[Message, Message, Any]]) extends ResponseRenderingOutput -} + private[http] case class SwitchToWebSocket(httpResponseBytes: ByteString, handler: Either[Graph[FlowShape[FrameEvent, FrameEvent], Any], Graph[FlowShape[Message, Message], Any]]) extends ResponseRenderingOutput +} \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/rendering/RenderSupport.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/rendering/RenderSupport.scala index 5c6c0ee698..be9a0bfcc8 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/rendering/RenderSupport.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/rendering/RenderSupport.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.rendering diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpAttributes.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpAttributes.scala index 82e2e392f9..2ff1ba8ec7 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpAttributes.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpAttributes.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.server diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpServerBluePrint.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpServerBluePrint.scala index fa3d8ec042..4b47319a79 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpServerBluePrint.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpServerBluePrint.scala @@ -1,33 +1,37 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.server import java.net.InetSocketAddress -import java.util.Random -import akka.stream.impl.fusing.GraphInterpreter +import java.util.concurrent.atomic.AtomicReference +import scala.concurrent.{ Promise, Future } +import scala.concurrent.duration.{ Deadline, FiniteDuration, Duration } import scala.collection.immutable -import org.reactivestreams.{ Publisher, Subscriber } import scala.util.control.NonFatal +import akka.NotUsed +import akka.actor.Cancellable +import akka.japi.Function import akka.event.LoggingAdapter -import akka.http.ServerSettings +import akka.util.ByteString +import akka.stream._ +import akka.stream.io._ +import akka.stream.scaladsl._ +import akka.stream.stage._ +import akka.http.scaladsl.settings.ServerSettings import akka.http.impl.engine.HttpConnectionTimeoutException import akka.http.impl.engine.parsing.ParserOutput._ import akka.http.impl.engine.parsing._ import akka.http.impl.engine.rendering.{ HttpResponseRendererFactory, ResponseRenderingContext, ResponseRenderingOutput } import akka.http.impl.engine.ws._ import akka.http.impl.util._ -import akka.http.scaladsl.Http +import akka.http.scaladsl.util.FastFuture.EnhancedFuture +import akka.http.scaladsl.{ TimeoutAccess, Http } +import akka.http.scaladsl.model.headers.`Timeout-Access` +import akka.http.javadsl.model import akka.http.scaladsl.model._ -import akka.stream._ -import akka.stream.impl.ConstantFun -import akka.stream.io._ -import akka.stream.scaladsl._ -import akka.stream.stage._ -import akka.util.ByteString import akka.http.scaladsl.model.ws.Message -import akka.stream.impl.fusing.SubSource /** * INTERNAL API @@ -54,6 +58,7 @@ private[http] object HttpServerBluePrint { def apply(settings: ServerSettings, remoteAddress: Option[InetSocketAddress], log: LoggingAdapter): Http.ServerLayer = { val theStack = userHandlerGuard(settings.pipeliningLimit) atop + requestTimeoutSupport(settings.timeouts.requestTimeout) atop requestPreparation(settings) atop controller(settings, log) atop parsingRendering(settings, log) atop @@ -63,21 +68,27 @@ private[http] object HttpServerBluePrint { theStack.withAttributes(HttpAttributes.remoteAddress(remoteAddress)) } - val tlsSupport: BidiFlow[ByteString, SslTlsOutbound, SslTlsInbound, SessionBytes, Unit] = + val tlsSupport: BidiFlow[ByteString, SslTlsOutbound, SslTlsInbound, SessionBytes, NotUsed] = BidiFlow.fromFlows(Flow[ByteString].map(SendBytes), Flow[SslTlsInbound].collect { case x: SessionBytes ⇒ x }) - def websocketSupport(settings: ServerSettings, log: LoggingAdapter): BidiFlow[ResponseRenderingOutput, ByteString, SessionBytes, SessionBytes, Unit] = + def websocketSupport(settings: ServerSettings, log: LoggingAdapter): BidiFlow[ResponseRenderingOutput, ByteString, SessionBytes, SessionBytes, NotUsed] = BidiFlow.fromGraph(new ProtocolSwitchStage(settings, log)) - def parsingRendering(settings: ServerSettings, log: LoggingAdapter): BidiFlow[ResponseRenderingContext, ResponseRenderingOutput, SessionBytes, RequestOutput, Unit] = + def parsingRendering(settings: ServerSettings, log: LoggingAdapter): BidiFlow[ResponseRenderingContext, ResponseRenderingOutput, SessionBytes, RequestOutput, NotUsed] = BidiFlow.fromFlows(rendering(settings, log), parsing(settings, log)) - def controller(settings: ServerSettings, log: LoggingAdapter): BidiFlow[HttpResponse, ResponseRenderingContext, RequestOutput, RequestOutput, Unit] = + def controller(settings: ServerSettings, log: LoggingAdapter): BidiFlow[HttpResponse, ResponseRenderingContext, RequestOutput, RequestOutput, NotUsed] = BidiFlow.fromGraph(new ControllerStage(settings, log)).reversed - def requestPreparation(settings: ServerSettings): BidiFlow[HttpResponse, HttpResponse, RequestOutput, HttpRequest, Unit] = + def requestPreparation(settings: ServerSettings): BidiFlow[HttpResponse, HttpResponse, RequestOutput, HttpRequest, NotUsed] = BidiFlow.fromFlows(Flow[HttpResponse], new PrepareRequests(settings)) + def requestTimeoutSupport(timeout: Duration): BidiFlow[HttpResponse, HttpResponse, HttpRequest, HttpRequest, NotUsed] = + timeout match { + case x: FiniteDuration ⇒ BidiFlow.fromGraph(new RequestTimeoutSupport(x)).reversed + case _ ⇒ BidiFlow.identity + } + final class PrepareRequests(settings: ServerSettings) extends GraphStage[FlowShape[RequestOutput, HttpRequest]] { val in = Inlet[RequestOutput]("RequestStartThenRunIgnore.in") val out = Outlet[HttpRequest]("RequestStartThenRunIgnore.out") @@ -97,6 +108,8 @@ private[http] object HttpServerBluePrint { val entity = createEntity(entityCreator) withSizeLimit settings.parserSettings.maxContentLength push(out, HttpRequest(effectiveMethod, uri, effectiveHeaders, entity, protocol)) + case other ⇒ + throw new IllegalStateException(s"unexpected element of type ${other.getClass}") } } setHandler(in, idle) @@ -127,7 +140,7 @@ private[http] object HttpServerBluePrint { } } - def parsing(settings: ServerSettings, log: LoggingAdapter): Flow[SessionBytes, RequestOutput, Unit] = { + def parsing(settings: ServerSettings, log: LoggingAdapter): Flow[SessionBytes, RequestOutput, NotUsed] = { import settings._ // the initial header parser we initially use for every connection, @@ -157,7 +170,7 @@ private[http] object HttpServerBluePrint { .map(establishAbsoluteUri) } - def rendering(settings: ServerSettings, log: LoggingAdapter): Flow[ResponseRenderingContext, ResponseRenderingOutput, Unit] = { + def rendering(settings: ServerSettings, log: LoggingAdapter): Flow[ResponseRenderingContext, ResponseRenderingOutput, NotUsed] = { import settings._ val responseRendererFactory = new HttpResponseRendererFactory(serverHeader, responseHeaderSizeHint, log) @@ -173,6 +186,104 @@ private[http] object HttpServerBluePrint { .via(Flow[ResponseRenderingOutput].transform(() ⇒ errorHandling(errorHandler)).named("errorLogger")) } + class RequestTimeoutSupport(initialTimeout: FiniteDuration) + extends GraphStage[BidiShape[HttpRequest, HttpRequest, HttpResponse, HttpResponse]] { + private val requestIn = Inlet[HttpRequest]("requestIn") + private val requestOut = Outlet[HttpRequest]("requestOut") + private val responseIn = Inlet[HttpResponse]("responseIn") + private val responseOut = Outlet[HttpResponse]("responseOut") + + override def initialAttributes = Attributes.name("RequestTimeoutSupport") + + val shape = new BidiShape(requestIn, requestOut, responseIn, responseOut) + + def createLogic(effectiveAttributes: Attributes) = new GraphStageLogic(shape) { + var openTimeouts = immutable.Queue[TimeoutAccessImpl]() + setHandler(requestIn, new InHandler { + def onPush(): Unit = { + val request = grab(requestIn) + val (entity, requestEnd) = HttpEntity.captureTermination(request.entity) + val access = new TimeoutAccessImpl(request, initialTimeout, requestEnd, + getAsyncCallback(emitTimeoutResponse), interpreter.materializer) + openTimeouts = openTimeouts.enqueue(access) + push(requestOut, request.copy(headers = request.headers :+ `Timeout-Access`(access), entity = entity)) + } + override def onUpstreamFinish() = complete(requestOut) + override def onUpstreamFailure(ex: Throwable) = fail(requestOut, ex) + def emitTimeoutResponse(response: (TimeoutAccess, HttpResponse)) = + if (openTimeouts.head eq response._1) { + emit(responseOut, response._2, () ⇒ complete(responseOut)) + } // else the application response arrived after we scheduled the timeout response, which is close but ok + }) + // TODO: provide and use default impl for simply connecting an input and an output port as we do here + setHandler(requestOut, new OutHandler { + def onPull(): Unit = pull(requestIn) + override def onDownstreamFinish() = cancel(requestIn) + }) + setHandler(responseIn, new InHandler { + def onPush(): Unit = { + openTimeouts.head.clear() + openTimeouts = openTimeouts.tail + push(responseOut, grab(responseIn)) + } + override def onUpstreamFinish() = complete(responseOut) + override def onUpstreamFailure(ex: Throwable) = fail(responseOut, ex) + }) + setHandler(responseOut, new OutHandler { + def onPull(): Unit = pull(responseIn) + override def onDownstreamFinish() = cancel(responseIn) + }) + } + } + + private class TimeoutSetup(val timeoutBase: Deadline, + val scheduledTask: Cancellable, + val timeout: Duration, + val handler: HttpRequest ⇒ HttpResponse) + + private class TimeoutAccessImpl(request: HttpRequest, initialTimeout: FiniteDuration, requestEnd: Future[Unit], + trigger: AsyncCallback[(TimeoutAccess, HttpResponse)], materializer: Materializer) + extends AtomicReference[Future[TimeoutSetup]] with TimeoutAccess with (HttpRequest ⇒ HttpResponse) { self ⇒ + import materializer.executionContext + + set { + requestEnd.fast.map(_ ⇒ new TimeoutSetup(Deadline.now, schedule(initialTimeout, this), initialTimeout, this)) + } + + override def apply(request: HttpRequest) = HttpResponse(StatusCodes.ServiceUnavailable, entity = "The server was not able " + + "to produce a timely response to your request.\r\nPlease try again in a short while!") + + def clear(): Unit = // best effort timeout cancellation + get.fast.foreach(setup ⇒ if (setup.scheduledTask ne null) setup.scheduledTask.cancel()) + + override def updateTimeout(timeout: Duration): Unit = update(timeout, null: HttpRequest ⇒ HttpResponse) + override def updateHandler(handler: HttpRequest ⇒ HttpResponse): Unit = update(null, handler) + override def update(timeout: Duration, handler: HttpRequest ⇒ HttpResponse): Unit = { + val promise = Promise[TimeoutSetup]() + for (old ← getAndSet(promise.future).fast) + promise.success { + if ((old.scheduledTask eq null) || old.scheduledTask.cancel()) { + val newHandler = if (handler eq null) old.handler else handler + val newTimeout = if (timeout eq null) old.timeout else timeout + val newScheduling = newTimeout match { + case x: FiniteDuration ⇒ schedule(old.timeoutBase + x - Deadline.now, newHandler) + case _ ⇒ null // don't schedule a new timeout + } + new TimeoutSetup(old.timeoutBase, newScheduling, newTimeout, newHandler) + } else old // too late, the previously set timeout cannot be cancelled anymore + } + } + private def schedule(delay: FiniteDuration, handler: HttpRequest ⇒ HttpResponse): Cancellable = + materializer.scheduleOnce(delay, new Runnable { def run() = trigger.invoke((self, handler(request))) }) + + import akka.http.impl.util.JavaMapping.Implicits._ + /** JAVA API **/ + def update(timeout: Duration, handler: Function[model.HttpRequest, model.HttpResponse]): Unit = + update(timeout, handler(_: HttpRequest).asScala) + def updateHandler(handler: Function[model.HttpRequest, model.HttpResponse]): Unit = + updateHandler(handler(_: HttpRequest).asScala) + } + class ControllerStage(settings: ServerSettings, log: LoggingAdapter) extends GraphStage[BidiShape[RequestOutput, RequestOutput, HttpResponse, ResponseRenderingContext]] { private val requestParsingIn = Inlet[RequestOutput]("requestParsingIn") @@ -330,7 +441,7 @@ private[http] object HttpServerBluePrint { def with100ContinueTrigger[T <: ParserOutput](createEntity: EntityCreator[T, RequestEntity]) = StreamedEntityCreator { - createEntity.compose[Source[T, Unit]] { + createEntity.compose[Source[T, NotUsed]] { _.via(Flow[T].transform(() ⇒ new PushPullStage[T, T] { private var oneHundredContinueSent = false def onPush(elem: T, ctx: Context[T]) = ctx.push(elem) @@ -352,7 +463,7 @@ private[http] object HttpServerBluePrint { * - produces exactly one response per request * - has not more than `pipeliningLimit` responses outstanding */ - def userHandlerGuard(pipeliningLimit: Int): BidiFlow[HttpResponse, HttpResponse, HttpRequest, HttpRequest, Unit] = + def userHandlerGuard(pipeliningLimit: Int): BidiFlow[HttpResponse, HttpResponse, HttpRequest, HttpRequest, NotUsed] = One2OneBidiFlow[HttpRequest, HttpResponse](pipeliningLimit).reversed private class ProtocolSwitchStage(settings: ServerSettings, log: LoggingAdapter) @@ -371,31 +482,37 @@ private[http] object HttpServerBluePrint { def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new TimerGraphStageLogic(shape) { import akka.http.impl.engine.rendering.ResponseRenderingOutput._ + /* + * These handlers are in charge until a switch command comes in, then they + * are replaced. + */ + setHandler(fromHttp, new InHandler { override def onPush(): Unit = grab(fromHttp) match { case HttpData(b) ⇒ push(toNet, b) - case SwitchToWebsocket(bytes, handlerFlow) ⇒ + case SwitchToWebSocket(bytes, handlerFlow) ⇒ push(toNet, bytes) complete(toHttp) cancel(fromHttp) - switchToWebsocket(handlerFlow) + switchToWebSocket(handlerFlow) } + override def onUpstreamFinish(): Unit = complete(toNet) + override def onUpstreamFailure(ex: Throwable): Unit = fail(toNet, ex) }) setHandler(toNet, new OutHandler { override def onPull(): Unit = pull(fromHttp) + override def onDownstreamFinish(): Unit = completeStage() }) setHandler(fromNet, new InHandler { - def onPush(): Unit = push(toHttp, grab(fromNet)) - - // propagate error but don't close stage yet to prevent fromHttp/fromWs being cancelled - // too eagerly + override def onPush(): Unit = push(toHttp, grab(fromNet)) + override def onUpstreamFinish(): Unit = complete(toHttp) override def onUpstreamFailure(ex: Throwable): Unit = fail(toHttp, ex) }) setHandler(toHttp, new OutHandler { override def onPull(): Unit = pull(fromNet) - override def onDownstreamFinish(): Unit = () + override def onDownstreamFinish(): Unit = cancel(fromNet) }) private var activeTimers = 0 @@ -419,44 +536,68 @@ private[http] object HttpServerBluePrint { } /* - * Websocket support + * WebSocket support */ - def switchToWebsocket(handlerFlow: Either[Flow[FrameEvent, FrameEvent, Any], Flow[Message, Message, Any]]): Unit = { + def switchToWebSocket(handlerFlow: Either[Graph[FlowShape[FrameEvent, FrameEvent], Any], Graph[FlowShape[Message, Message], Any]]): Unit = { val frameHandler = handlerFlow match { case Left(frameHandler) ⇒ frameHandler case Right(messageHandler) ⇒ - Websocket.stack(serverSide = true, maskingRandomFactory = settings.websocketRandomFactory, log = log).join(messageHandler) + WebSocket.stack(serverSide = true, maskingRandomFactory = settings.websocketRandomFactory, log = log).join(messageHandler) } + val sinkIn = new SubSinkInlet[ByteString]("FrameSink") - val sourceOut = new SubSourceOutlet[ByteString]("FrameSource") - - val timeoutKey = SubscriptionTimeout(() ⇒ { - sourceOut.timeout(timeout) - if (sourceOut.isClosed) completeStage() - }) - addTimeout(timeoutKey) - sinkIn.setHandler(new InHandler { override def onPush(): Unit = push(toNet, sinkIn.grab()) - }) - setHandler(toNet, new OutHandler { - override def onPull(): Unit = sinkIn.pull() + override def onUpstreamFinish(): Unit = complete(toNet) + override def onUpstreamFailure(ex: Throwable): Unit = fail(toNet, ex) }) - setHandler(fromNet, new InHandler { - override def onPush(): Unit = sourceOut.push(grab(fromNet).bytes) - }) - sourceOut.setHandler(new OutHandler { - override def onPull(): Unit = { - if (!hasBeenPulled(fromNet)) pull(fromNet) - cancelTimeout(timeoutKey) - sourceOut.setHandler(new OutHandler { - override def onPull(): Unit = if (!hasBeenPulled(fromNet)) pull(fromNet) - }) - } - }) + if (isClosed(fromNet)) { + setHandler(toNet, new OutHandler { + override def onPull(): Unit = sinkIn.pull() + override def onDownstreamFinish(): Unit = { + completeStage() + sinkIn.cancel() + } + }) + WebSocket.framing.join(frameHandler).runWith(Source.empty, sinkIn.sink)(subFusingMaterializer) + } else { + val sourceOut = new SubSourceOutlet[ByteString]("FrameSource") - Websocket.framing.join(frameHandler).runWith(sourceOut.source, sinkIn.sink)(subFusingMaterializer) + val timeoutKey = SubscriptionTimeout(() ⇒ { + sourceOut.timeout(timeout) + if (sourceOut.isClosed) completeStage() + }) + addTimeout(timeoutKey) + + setHandler(toNet, new OutHandler { + override def onPull(): Unit = sinkIn.pull() + override def onDownstreamFinish(): Unit = { + completeStage() + sinkIn.cancel() + sourceOut.complete() + } + }) + + setHandler(fromNet, new InHandler { + override def onPush(): Unit = sourceOut.push(grab(fromNet).bytes) + override def onUpstreamFinish(): Unit = sourceOut.complete() + override def onUpstreamFailure(ex: Throwable): Unit = sourceOut.fail(ex) + }) + sourceOut.setHandler(new OutHandler { + override def onPull(): Unit = { + if (!hasBeenPulled(fromNet)) pull(fromNet) + cancelTimeout(timeoutKey) + sourceOut.setHandler(new OutHandler { + override def onPull(): Unit = if (!hasBeenPulled(fromNet)) pull(fromNet) + override def onDownstreamFinish(): Unit = cancel(fromNet) + }) + } + override def onDownstreamFinish(): Unit = cancel(fromNet) + }) + + WebSocket.framing.join(frameHandler).runWith(sourceOut.source, sinkIn.sink)(subFusingMaterializer) + } } } } diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameEvent.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameEvent.scala index c2e0d3045f..aefb0edc3a 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameEvent.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameEvent.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws @@ -12,7 +12,7 @@ private[http] sealed trait FrameEventOrError private[http] final case class FrameError(p: ProtocolException) extends FrameEventOrError /** - * The low-level Websocket framing model. + * The low-level WebSocket framing model. * * INTERNAL API */ diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameEventParser.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameEventParser.scala index ceaf4a7afe..8d04e133b0 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameEventParser.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameEventParser.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws @@ -10,7 +10,7 @@ import akka.stream.io.ByteStringParser import akka.stream.Attributes /** - * Streaming parser for the Websocket framing protocol as defined in RFC6455 + * Streaming parser for the WebSocket framing protocol as defined in RFC6455 * * http://tools.ietf.org/html/rfc6455 * @@ -46,7 +46,7 @@ private[http] object FrameEventParser extends ByteStringParser[FrameEvent] { } object ReadFrameHeader extends Step { - override def parse(reader: ByteReader): (FrameEvent, Step) = { + override def parse(reader: ByteReader): ParseResult[FrameEvent] = { import Protocol._ val flagsAndOp = reader.readByte() @@ -83,23 +83,25 @@ private[http] object FrameEventParser extends ByteStringParser[FrameEvent] { val takeNow = (header.length min reader.remainingSize).toInt val thisFrameData = reader.take(takeNow) + val noMoreData = thisFrameData.length == length val nextState = - if (thisFrameData.length == length) ReadFrameHeader + if (noMoreData) ReadFrameHeader else new ReadData(length - thisFrameData.length) - (FrameStart(header, thisFrameData.compact), nextState) + ParseResult(Some(FrameStart(header, thisFrameData.compact)), nextState, true) } } class ReadData(_remaining: Long) extends Step { + override def canWorkWithPartialData = true var remaining = _remaining - override def parse(reader: ByteReader): (FrameEvent, Step) = + override def parse(reader: ByteReader): ParseResult[FrameEvent] = if (reader.remainingSize < remaining) { remaining -= reader.remainingSize - (FrameData(reader.takeAll(), lastPart = false), this) + ParseResult(Some(FrameData(reader.takeAll(), lastPart = false)), this, true) } else { - (FrameData(reader.take(remaining.toInt), lastPart = true), ReadFrameHeader) + ParseResult(Some(FrameData(reader.take(remaining.toInt), lastPart = true)), ReadFrameHeader, true) } } } diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameEventRenderer.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameEventRenderer.scala index 5f3bf90604..208657c2a9 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameEventRenderer.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameEventRenderer.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameHandler.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameHandler.scala index c405c234c1..0752bfbe3d 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameHandler.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameHandler.scala @@ -1,9 +1,10 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws +import akka.NotUsed import akka.stream.scaladsl.Flow import akka.stream.stage.{ SyncDirective, Context, StatefulStage } import akka.util.ByteString @@ -19,7 +20,7 @@ import scala.util.control.NonFatal */ private[http] object FrameHandler { - def create(server: Boolean): Flow[FrameEventOrError, Output, Unit] = + def create(server: Boolean): Flow[FrameEventOrError, Output, NotUsed] = Flow[FrameEventOrError].transform(() ⇒ new HandlerStage(server)) private class HandlerStage(server: Boolean) extends StatefulStage[FrameEventOrError, Output] { diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameOutHandler.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameOutHandler.scala index d9d69474d1..b363abd96f 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameOutHandler.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameOutHandler.scala @@ -1,16 +1,17 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws +import akka.NotUsed import akka.event.LoggingAdapter import akka.stream.scaladsl.Flow import scala.concurrent.duration.FiniteDuration import akka.stream.stage._ import akka.http.impl.util.Timestamp import akka.http.impl.engine.ws.FrameHandler._ -import Websocket.Tick +import WebSocket.Tick import akka.http.impl.engine.ws.FrameHandler.UserHandlerErredOut /** @@ -45,7 +46,7 @@ private[http] class FrameOutHandler(serverSide: Boolean, _closeTimeout: FiniteDu become(new WaitingForPeerCloseFrame()) ctx.push(FrameEvent.closeFrame(Protocol.CloseCodes.Regular)) case UserHandlerErredOut(e) ⇒ - log.error(e, s"Websocket handler failed with ${e.getMessage}") + log.error(e, s"WebSocket handler failed with ${e.getMessage}") become(new WaitingForPeerCloseFrame()) ctx.push(FrameEvent.closeFrame(Protocol.CloseCodes.UnexpectedCondition, "internal error")) case Tick ⇒ ctx.pull() // ignore @@ -64,7 +65,7 @@ private[http] class FrameOutHandler(serverSide: Boolean, _closeTimeout: FiniteDu def onPush(elem: AnyRef, ctx: Context[FrameStart]): SyncDirective = elem match { case UserHandlerCompleted ⇒ sendOutLastFrame(ctx) case UserHandlerErredOut(e) ⇒ - log.error(e, s"Websocket handler failed while waiting for handler completion with ${e.getMessage}") + log.error(e, s"WebSocket handler failed while waiting for handler completion with ${e.getMessage}") sendOutLastFrame(ctx) case start: FrameStart ⇒ ctx.push(start) case _ ⇒ ctx.pull() // ignore @@ -145,6 +146,6 @@ private[http] class FrameOutHandler(serverSide: Boolean, _closeTimeout: FiniteDu private[http] object FrameOutHandler { type Input = AnyRef - def create(serverSide: Boolean, closeTimeout: FiniteDuration, log: LoggingAdapter): Flow[Input, FrameStart, Unit] = + def create(serverSide: Boolean, closeTimeout: FiniteDuration, log: LoggingAdapter): Flow[Input, FrameStart, NotUsed] = Flow[Input].transform(() ⇒ new FrameOutHandler(serverSide, closeTimeout, log)) } \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Handshake.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Handshake.scala index e0909b08f8..2e3d495fd6 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Handshake.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Handshake.scala @@ -1,34 +1,30 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws import java.util.Random - import scala.collection.immutable import scala.collection.immutable.Seq import scala.reflect.ClassTag - -import akka.stream.scaladsl.Flow - import akka.http.impl.util._ - import akka.http.scaladsl.model.headers._ -import akka.http.scaladsl.model.ws.{ Message, UpgradeToWebsocket } +import akka.http.scaladsl.model.ws.{ Message, UpgradeToWebSocket } import akka.http.scaladsl.model._ +import akka.stream.{ Graph, FlowShape } /** - * Server-side implementation of the Websocket handshake + * Server-side implementation of the WebSocket handshake * * INTERNAL API */ private[http] object Handshake { - val CurrentWebsocketVersion = 13 + val CurrentWebSocketVersion = 13 object Server { /** - * Validates a client Websocket handshake. Returns either `Right(UpgradeToWebsocket)` or + * Validates a client WebSocket handshake. Returns either `Right(UpgradeToWebSocket)` or * `Left(MessageStartError)`. * * From: http://tools.ietf.org/html/rfc6455#section-4.2.1 @@ -66,7 +62,7 @@ private[http] object Handshake { * to speak. The interpretation of this header field is discussed * in Section 9.1. */ - def websocketUpgrade(headers: List[HttpHeader], hostHeaderPresent: Boolean): Option[UpgradeToWebsocket] = { + def websocketUpgrade(headers: List[HttpHeader], hostHeaderPresent: Boolean): Option[UpgradeToWebSocket] = { def find[T <: HttpHeader: ClassTag]: Option[T] = headers.collectFirst { case t: T ⇒ t @@ -87,24 +83,24 @@ private[http] object Handshake { // FIXME See #18709 // val extensions = find[`Sec-WebSocket-Extensions`] - if (upgrade.exists(_.hasWebsocket) && + if (upgrade.exists(_.hasWebSocket) && connection.exists(_.hasUpgrade) && - version.exists(_.hasVersion(CurrentWebsocketVersion)) && + version.exists(_.hasVersion(CurrentWebSocketVersion)) && key.exists(k ⇒ k.isValid)) { - val header = new UpgradeToWebsocketLowLevel { + val header = new UpgradeToWebSocketLowLevel { def requestedProtocols: Seq[String] = clientSupportedSubprotocols - def handle(handler: Either[Flow[FrameEvent, FrameEvent, Any], Flow[Message, Message, Any]], subprotocol: Option[String]): HttpResponse = { + def handle(handler: Either[Graph[FlowShape[FrameEvent, FrameEvent], Any], Graph[FlowShape[Message, Message], Any]], subprotocol: Option[String]): HttpResponse = { require(subprotocol.forall(chosen ⇒ clientSupportedSubprotocols.contains(chosen)), s"Tried to choose invalid subprotocol '$subprotocol' which wasn't offered by the client: [${requestedProtocols.mkString(", ")}]") buildResponse(key.get, handler, subprotocol) } - def handleFrames(handlerFlow: Flow[FrameEvent, FrameEvent, Any], subprotocol: Option[String]): HttpResponse = + def handleFrames(handlerFlow: Graph[FlowShape[FrameEvent, FrameEvent], Any], subprotocol: Option[String]): HttpResponse = handle(Left(handlerFlow), subprotocol) - override def handleMessages(handlerFlow: Flow[Message, Message, Any], subprotocol: Option[String] = None): HttpResponse = + override def handleMessages(handlerFlow: Graph[FlowShape[Message, Message], Any], subprotocol: Option[String] = None): HttpResponse = handle(Right(handlerFlow), subprotocol) } Some(header) @@ -130,7 +126,7 @@ private[http] object Handshake { concatenated value to obtain a 20-byte value and base64- encoding (see Section 4 of [RFC4648]) this 20-byte hash. */ - def buildResponse(key: `Sec-WebSocket-Key`, handler: Either[Flow[FrameEvent, FrameEvent, Any], Flow[Message, Message, Any]], subprotocol: Option[String]): HttpResponse = + def buildResponse(key: `Sec-WebSocket-Key`, handler: Either[Graph[FlowShape[FrameEvent, FrameEvent], Any], Graph[FlowShape[Message, Message], Any]], subprotocol: Option[String]): HttpResponse = HttpResponse( StatusCodes.SwitchingProtocols, subprotocol.map(p ⇒ `Sec-WebSocket-Protocol`(Seq(p))).toList ::: @@ -138,11 +134,11 @@ private[http] object Handshake { UpgradeHeader, ConnectionUpgradeHeader, `Sec-WebSocket-Accept`.forKey(key), - UpgradeToWebsocketResponseHeader(handler))) + UpgradeToWebSocketResponseHeader(handler))) } object Client { - case class NegotiatedWebsocketSettings(subprotocol: Option[String]) + case class NegotiatedWebSocketSettings(subprotocol: Option[String]) /** * Builds a WebSocket handshake request. @@ -160,7 +156,7 @@ private[http] object Handshake { UpgradeHeader, ConnectionUpgradeHeader, key, - SecWebsocketVersionHeader) ++ protocol ++ extraHeaders + SecWebSocketVersionHeader) ++ protocol ++ extraHeaders (HttpRequest(HttpMethods.GET, uri.toRelative, headers), key) } @@ -169,7 +165,7 @@ private[http] object Handshake { * Tries to validate the HTTP response. Returns either Right(settings) or an error message if * the response cannot be validated. */ - def validateResponse(response: HttpResponse, subprotocols: Seq[String], key: `Sec-WebSocket-Key`): Either[String, NegotiatedWebsocketSettings] = { + def validateResponse(response: HttpResponse, subprotocols: Seq[String], key: `Sec-WebSocket-Key`): Either[String, NegotiatedWebSocketSettings] = { /* From http://tools.ietf.org/html/rfc6455#section-4.1 @@ -253,8 +249,8 @@ private[http] object Handshake { case None ⇒ val subs = response.header[`Sec-WebSocket-Protocol`].flatMap(_.protocols.headOption) - if (subprotocols.isEmpty && subs.isEmpty) Right(NegotiatedWebsocketSettings(None)) // no specific one selected - else if (subs.nonEmpty && subprotocols.contains(subs.get)) Right(NegotiatedWebsocketSettings(Some(subs.get))) + if (subprotocols.isEmpty && subs.isEmpty) Right(NegotiatedWebSocketSettings(None)) // no specific one selected + else if (subs.nonEmpty && subprotocols.contains(subs.get)) Right(NegotiatedWebSocketSettings(Some(subs.get))) else Left(s"response that indicated that the given subprotocol was not supported. (client supported: ${subprotocols.mkString(", ")}, server supported: $subs)") case Some(problem) ⇒ Left(problem) } @@ -263,5 +259,5 @@ private[http] object Handshake { val UpgradeHeader = Upgrade(List(UpgradeProtocol("websocket"))) val ConnectionUpgradeHeader = Connection(List("upgrade")) - val SecWebsocketVersionHeader = `Sec-WebSocket-Version`(Seq(CurrentWebsocketVersion)) + val SecWebSocketVersionHeader = `Sec-WebSocket-Version`(Seq(CurrentWebSocketVersion)) } diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Masking.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Masking.scala index bd6e886d24..c9d71ef441 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Masking.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Masking.scala @@ -1,24 +1,25 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws import java.util.Random +import akka.NotUsed import akka.stream.scaladsl.{ Keep, BidiFlow, Flow } import akka.stream.stage.{ SyncDirective, Context, StatefulStage } /** - * Implements Websocket Frame masking. + * Implements WebSocket Frame masking. * * INTERNAL API */ private[http] object Masking { - def apply(serverSide: Boolean, maskRandom: () ⇒ Random): BidiFlow[ /* net in */ FrameEvent, /* app out */ FrameEventOrError, /* app in */ FrameEvent, /* net out */ FrameEvent, Unit] = + def apply(serverSide: Boolean, maskRandom: () ⇒ Random): BidiFlow[ /* net in */ FrameEvent, /* app out */ FrameEventOrError, /* app in */ FrameEvent, /* net out */ FrameEvent, NotUsed] = BidiFlow.fromFlowsMat(unmaskIf(serverSide), maskIf(!serverSide, maskRandom))(Keep.none) - def maskIf(condition: Boolean, maskRandom: () ⇒ Random): Flow[FrameEvent, FrameEvent, Unit] = + def maskIf(condition: Boolean, maskRandom: () ⇒ Random): Flow[FrameEvent, FrameEvent, NotUsed] = if (condition) Flow[FrameEvent] .transform(() ⇒ new Masking(maskRandom())) // new random per materialization @@ -27,7 +28,7 @@ private[http] object Masking { case FrameError(ex) ⇒ throw ex } else Flow[FrameEvent] - def unmaskIf(condition: Boolean): Flow[FrameEvent, FrameEventOrError, Unit] = + def unmaskIf(condition: Boolean): Flow[FrameEvent, FrameEventOrError, NotUsed] = if (condition) Flow[FrameEvent].transform(() ⇒ new Unmasking()) else Flow[FrameEvent] diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/MessageToFrameRenderer.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/MessageToFrameRenderer.scala index 94d048ff0b..bd75f0a416 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/MessageToFrameRenderer.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/MessageToFrameRenderer.scala @@ -1,9 +1,10 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws +import akka.NotUsed import akka.util.ByteString import akka.stream.scaladsl.{ Source, Flow } @@ -16,12 +17,12 @@ import akka.http.scaladsl.model.ws._ * INTERNAL API */ private[http] object MessageToFrameRenderer { - def create(serverSide: Boolean): Flow[Message, FrameStart, Unit] = { + def create(serverSide: Boolean): Flow[Message, FrameStart, NotUsed] = { def strictFrames(opcode: Opcode, data: ByteString): Source[FrameStart, _] = // FIXME: fragment? Source.single(FrameEvent.fullFrame(opcode, None, data, fin = true)) - def streamedFrames[M](opcode: Opcode, data: Source[ByteString, M]): Source[FrameStart, Unit] = + def streamedFrames[M](opcode: Opcode, data: Source[ByteString, M]): Source[FrameStart, NotUsed] = Source.single(FrameEvent.empty(opcode, fin = false)) ++ data.map(FrameEvent.fullFrame(Opcode.Continuation, None, _, fin = false)) ++ Source.single(FrameEvent.emptyLastContinuationFrame) diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Protocol.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Protocol.scala index d6afc7301d..79a32035b5 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Protocol.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Protocol.scala @@ -1,11 +1,11 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws /** - * Contains Websocket protocol constants + * Contains WebSocket protocol constants * * INTERNAL API */ diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Randoms.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Randoms.scala index 735a2e0c72..998c2be189 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Randoms.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Randoms.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/UpgradeToWebSocketLowLevel.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/UpgradeToWebSocketLowLevel.scala new file mode 100644 index 0000000000..d7e7c95553 --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/UpgradeToWebSocketLowLevel.scala @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package akka.http.impl.engine.ws + +import akka.http.scaladsl.model.HttpResponse +import akka.http.scaladsl.model.ws.UpgradeToWebSocket +import akka.stream.{ Graph, FlowShape } + +/** + * Currently internal API to handle FrameEvents directly. + * + * INTERNAL API + */ +private[http] abstract class UpgradeToWebSocketLowLevel extends InternalCustomHeader("UpgradeToWebSocket") with UpgradeToWebSocket { + /** + * The low-level interface to create WebSocket server based on "frames". + * The user needs to handle control frames manually in this case. + * + * Returns a response to return in a request handler that will signal the + * low-level HTTP implementation to upgrade the connection to WebSocket and + * use the supplied handler to handle incoming WebSocket frames. + * + * INTERNAL API (for now) + */ + private[http] def handleFrames(handlerFlow: Graph[FlowShape[FrameEvent, FrameEvent], Any], subprotocol: Option[String] = None): HttpResponse +} diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/UpgradeToWebSocketsResponseHeader.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/UpgradeToWebSocketsResponseHeader.scala new file mode 100644 index 0000000000..7d1151a452 --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/UpgradeToWebSocketsResponseHeader.scala @@ -0,0 +1,18 @@ +/* + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package akka.http.impl.engine.ws + +import akka.http.scaladsl.model.headers.CustomHeader +import akka.http.scaladsl.model.ws.Message +import akka.stream.{ Graph, FlowShape } + +private[http] final case class UpgradeToWebSocketResponseHeader(handler: Either[Graph[FlowShape[FrameEvent, FrameEvent], Any], Graph[FlowShape[Message, Message], Any]]) + extends InternalCustomHeader("UpgradeToWebSocketResponseHeader") + +private[http] abstract class InternalCustomHeader(val name: String) extends CustomHeader { + final def renderInRequests = false + final def renderInResponses = false + def value: String = "" +} diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/UpgradeToWebsocketLowLevel.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/UpgradeToWebsocketLowLevel.scala deleted file mode 100644 index 82716d58a0..0000000000 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/UpgradeToWebsocketLowLevel.scala +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright (C) 2009-2015 Typesafe Inc. - */ - -package akka.http.impl.engine.ws - -import akka.http.scaladsl.model.HttpResponse -import akka.http.scaladsl.model.ws.UpgradeToWebsocket -import akka.stream.scaladsl.Flow - -/** - * Currently internal API to handle FrameEvents directly. - * - * INTERNAL API - */ -private[http] abstract class UpgradeToWebsocketLowLevel extends InternalCustomHeader("UpgradeToWebsocket") with UpgradeToWebsocket { - /** - * The low-level interface to create Websocket server based on "frames". - * The user needs to handle control frames manually in this case. - * - * Returns a response to return in a request handler that will signal the - * low-level HTTP implementation to upgrade the connection to Websocket and - * use the supplied handler to handle incoming Websocket frames. - * - * INTERNAL API (for now) - */ - private[http] def handleFrames(handlerFlow: Flow[FrameEvent, FrameEvent, Any], subprotocol: Option[String] = None): HttpResponse -} diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/UpgradeToWebsocketsResponseHeader.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/UpgradeToWebsocketsResponseHeader.scala deleted file mode 100644 index 37970943b9..0000000000 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/UpgradeToWebsocketsResponseHeader.scala +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright (C) 2009-2015 Typesafe Inc. - */ - -package akka.http.impl.engine.ws - -import akka.http.scaladsl.model.headers.CustomHeader -import akka.http.scaladsl.model.ws.Message -import akka.stream.scaladsl.Flow - -private[http] final case class UpgradeToWebsocketResponseHeader(handler: Either[Flow[FrameEvent, FrameEvent, Any], Flow[Message, Message, Any]]) - extends InternalCustomHeader("UpgradeToWebsocketResponseHeader") - -private[http] abstract class InternalCustomHeader(val name: String) extends CustomHeader { - override def suppressRendering: Boolean = true - - def value(): String = "" -} \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Utf8Decoder.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Utf8Decoder.scala index 6cc6f655aa..fdb51fb9a5 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Utf8Decoder.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Utf8Decoder.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws @@ -17,7 +17,7 @@ import scala.util.Try * * which is licensed under this license: * - * Copyright (c) 2008-2010 Bjoern Hoehrmann + * Copyright (C) 2008-2016 Bjoern Hoehrmann * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Utf8Encoder.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Utf8Encoder.scala index 1b6c517bab..b01985a844 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Utf8Encoder.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Utf8Encoder.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Websocket.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/WebSocket.scala similarity index 87% rename from akka-http-core/src/main/scala/akka/http/impl/engine/ws/Websocket.scala rename to akka-http-core/src/main/scala/akka/http/impl/engine/ws/WebSocket.scala index ee64d95c57..63a4e5c3ea 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Websocket.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/WebSocket.scala @@ -1,10 +1,11 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws import java.util.Random +import akka.NotUsed import akka.event.LoggingAdapter import akka.util.ByteString import scala.concurrent.duration._ @@ -19,7 +20,7 @@ import akka.stream.impl.fusing.SubSource * * Defines components of the websocket stack. */ -private[http] object Websocket { +private[http] object WebSocket { import FrameHandler._ /** @@ -28,20 +29,20 @@ private[http] object Websocket { def stack(serverSide: Boolean, maskingRandomFactory: () ⇒ Random, closeTimeout: FiniteDuration = 3.seconds, - log: LoggingAdapter): BidiFlow[FrameEvent, Message, Message, FrameEvent, Unit] = + log: LoggingAdapter): BidiFlow[FrameEvent, Message, Message, FrameEvent, NotUsed] = masking(serverSide, maskingRandomFactory) atop frameHandling(serverSide, closeTimeout, log) atop messageAPI(serverSide, closeTimeout) /** The lowest layer that implements the binary protocol */ - def framing: BidiFlow[ByteString, FrameEvent, FrameEvent, ByteString, Unit] = + def framing: BidiFlow[ByteString, FrameEvent, FrameEvent, ByteString, NotUsed] = BidiFlow.fromFlows( Flow[ByteString].via(FrameEventParser), Flow[FrameEvent].transform(() ⇒ new FrameEventRenderer)) .named("ws-framing") /** The layer that handles masking using the rules defined in the specification */ - def masking(serverSide: Boolean, maskingRandomFactory: () ⇒ Random): BidiFlow[FrameEvent, FrameEventOrError, FrameEvent, FrameEvent, Unit] = + def masking(serverSide: Boolean, maskingRandomFactory: () ⇒ Random): BidiFlow[FrameEvent, FrameEventOrError, FrameEvent, FrameEvent, NotUsed] = Masking(serverSide, maskingRandomFactory) .named("ws-masking") @@ -51,7 +52,7 @@ private[http] object Websocket { */ def frameHandling(serverSide: Boolean = true, closeTimeout: FiniteDuration, - log: LoggingAdapter): BidiFlow[FrameEventOrError, FrameHandler.Output, FrameOutHandler.Input, FrameStart, Unit] = + log: LoggingAdapter): BidiFlow[FrameEventOrError, FrameHandler.Output, FrameOutHandler.Input, FrameStart, NotUsed] = BidiFlow.fromFlows( FrameHandler.create(server = serverSide), FrameOutHandler.create(serverSide, closeTimeout, log)) @@ -61,8 +62,8 @@ private[http] object Websocket { * The layer that provides the high-level user facing API on top of frame handling. */ def messageAPI(serverSide: Boolean, - closeTimeout: FiniteDuration): BidiFlow[FrameHandler.Output, Message, Message, FrameOutHandler.Input, Unit] = { - /** Completes this branch of the flow if no more messages are expected and converts close codes into errors */ + closeTimeout: FiniteDuration): BidiFlow[FrameHandler.Output, Message, Message, FrameOutHandler.Input, NotUsed] = { + /* Completes this branch of the flow if no more messages are expected and converts close codes into errors */ class PrepareForUserHandler extends PushStage[MessagePart, MessagePart] { var inMessage = false def onPush(elem: MessagePart, ctx: Context[MessagePart]): SyncDirective = elem match { @@ -80,8 +81,8 @@ private[http] object Websocket { } } - /** Collects user-level API messages from MessageDataParts */ - val collectMessage: Flow[MessageDataPart, Message, Unit] = + /* Collects user-level API messages from MessageDataParts */ + val collectMessage: Flow[MessageDataPart, Message, NotUsed] = Flow[MessageDataPart] .prefixAndTail(1) .mapConcat { @@ -110,7 +111,7 @@ private[http] object Websocket { }) :: Nil } - def prepareMessages: Flow[MessagePart, Message, Unit] = + def prepareMessages: Flow[MessagePart, Message, NotUsed] = Flow[MessagePart] .transform(() ⇒ new PrepareForUserHandler) .splitWhen(_.isMessageEnd) // FIXME using splitAfter from #16885 would simplify protocol a lot @@ -121,7 +122,7 @@ private[http] object Websocket { .concatSubstreams .named("ws-prepare-messages") - def renderMessages: Flow[Message, FrameStart, Unit] = + def renderMessages: Flow[Message, FrameStart, NotUsed] = MessageToFrameRenderer.create(serverSide) .named("ws-render-messages") @@ -172,13 +173,12 @@ private[http] object Websocket { } } }) - val pullIn = () ⇒ pull(in) + val pullIn = () ⇒ tryPull(in) setHandler(bypass, eagerTerminateOutput) setHandler(user, ignoreTerminateOutput) override def preStart(): Unit = { - super.preStart() pullIn() } } @@ -196,14 +196,19 @@ private[http] object Websocket { def createLogic(effectiveAttributes: Attributes) = new GraphStageLogic(shape) { - passAlong(bypass, out, doFinish = true, doFail = true) - passAlong(user, out, doFinish = false, doFail = false) + class PassAlong[T <: AnyRef](from: Inlet[T]) extends InHandler with (() ⇒ Unit) { + override def apply(): Unit = tryPull(from) + override def onPush(): Unit = emit(out, grab(from), this) + override def onUpstreamFinish(): Unit = + if (isClosed(bypass) && isClosed(user)) completeStage() + } + setHandler(bypass, new PassAlong(bypass)) + setHandler(user, new PassAlong(user)) passAlong(tick, out, doFinish = false, doFail = false) setHandler(out, eagerTerminateOutput) override def preStart(): Unit = { - super.preStart() pull(bypass) pull(user) pull(tick) diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/WebsocketClientBlueprint.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/WebSocketClientBlueprint.scala similarity index 81% rename from akka-http-core/src/main/scala/akka/http/impl/engine/ws/WebsocketClientBlueprint.scala rename to akka-http-core/src/main/scala/akka/http/impl/engine/ws/WebSocketClientBlueprint.scala index b4523a4d70..cae2f15287 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/ws/WebsocketClientBlueprint.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/ws/WebSocketClientBlueprint.scala @@ -1,9 +1,10 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws +import akka.NotUsed import akka.http.scaladsl.model.ws._ import scala.concurrent.{ Future, Promise } @@ -16,7 +17,7 @@ import akka.stream.BidiShape import akka.stream.io.{ SessionBytes, SendBytes, SslTlsInbound } import akka.stream.scaladsl._ -import akka.http.ClientConnectionSettings +import akka.http.scaladsl.settings.ClientConnectionSettings import akka.http.scaladsl.Http import akka.http.scaladsl.model.{ HttpResponse, HttpMethods } import akka.http.scaladsl.model.headers.Host @@ -25,29 +26,29 @@ import akka.http.impl.engine.parsing.HttpMessageParser.StateResult import akka.http.impl.engine.parsing.ParserOutput.{ RemainingBytes, ResponseStart, NeedMoreData } import akka.http.impl.engine.parsing.{ ParserOutput, HttpHeaderParser, HttpResponseParser } import akka.http.impl.engine.rendering.{ HttpRequestRendererFactory, RequestRenderingContext } -import akka.http.impl.engine.ws.Handshake.Client.NegotiatedWebsocketSettings +import akka.http.impl.engine.ws.Handshake.Client.NegotiatedWebSocketSettings import akka.http.impl.util.StreamUtils -object WebsocketClientBlueprint { +object WebSocketClientBlueprint { /** - * Returns a WebsocketClientLayer that can be materialized once. + * Returns a WebSocketClientLayer that can be materialized once. */ - def apply(request: WebsocketRequest, + def apply(request: WebSocketRequest, settings: ClientConnectionSettings, - log: LoggingAdapter): Http.WebsocketClientLayer = + log: LoggingAdapter): Http.WebSocketClientLayer = (simpleTls.atopMat(handshake(request, settings, log))(Keep.right) atop - Websocket.framing atop - Websocket.stack(serverSide = false, maskingRandomFactory = settings.websocketRandomFactory, log = log)).reversed + WebSocket.framing atop + WebSocket.stack(serverSide = false, maskingRandomFactory = settings.websocketRandomFactory, log = log)).reversed /** * A bidi flow that injects and inspects the WS handshake and then goes out of the way. This BidiFlow * can only be materialized once. */ - def handshake(request: WebsocketRequest, + def handshake(request: WebSocketRequest, settings: ClientConnectionSettings, - log: LoggingAdapter): BidiFlow[ByteString, ByteString, ByteString, ByteString, Future[WebsocketUpgradeResponse]] = { + log: LoggingAdapter): BidiFlow[ByteString, ByteString, ByteString, ByteString, Future[WebSocketUpgradeResponse]] = { import request._ - val result = Promise[WebsocketUpgradeResponse]() + val result = Promise[WebSocketUpgradeResponse]() val valve = StreamUtils.OneTimeValve() @@ -84,7 +85,7 @@ object WebsocketClientBlueprint { case ResponseStart(status, protocol, headers, entity, close) ⇒ val response = HttpResponse(status, headers, protocol = protocol) Handshake.Client.validateResponse(response, subprotocol.toList, key) match { - case Right(NegotiatedWebsocketSettings(protocol)) ⇒ + case Right(NegotiatedWebSocketSettings(protocol)) ⇒ result.success(ValidUpgrade(response, protocol)) become(transparent) @@ -95,11 +96,15 @@ object WebsocketClientBlueprint { parser.onPull() match { case NeedMoreData ⇒ ctx.pull() case RemainingBytes(bytes) ⇒ ctx.push(bytes) + case other ⇒ + throw new IllegalStateException(s"unexpected element of type ${other.getClass}") } case Left(problem) ⇒ - result.success(InvalidUpgradeResponse(response, s"Websocket server at $uri returned $problem")) - ctx.fail(throw new IllegalArgumentException(s"Websocket upgrade did not finish because of '$problem'")) + result.success(InvalidUpgradeResponse(response, s"WebSocket server at $uri returned $problem")) + ctx.fail(throw new IllegalArgumentException(s"WebSocket upgrade did not finish because of '$problem'")) } + case other ⇒ + throw new IllegalStateException(s"unexpected element of type ${other.getClass}") } } } @@ -129,7 +134,7 @@ object WebsocketClientBlueprint { }) mapMaterializedValue (_ ⇒ result.future) } - def simpleTls: BidiFlow[SslTlsInbound, ByteString, ByteString, SendBytes, Unit] = + def simpleTls: BidiFlow[SslTlsInbound, ByteString, ByteString, SendBytes, NotUsed] = BidiFlow.fromFlowsMat( Flow[SslTlsInbound].collect { case SessionBytes(_, bytes) ⇒ bytes }, Flow[ByteString].map(SendBytes))(Keep.none) diff --git a/akka-http-core/src/main/scala/akka/http/impl/model/JavaQuery.scala b/akka-http-core/src/main/scala/akka/http/impl/model/JavaQuery.scala index 878856605b..e509d4ca2e 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/model/JavaQuery.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/model/JavaQuery.scala @@ -1,9 +1,10 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.model +import java.util.Optional import java.{ util ⇒ ju } import akka.http.impl.model.parser.CharacterClasses import akka.http.impl.util.StringRendering @@ -11,15 +12,16 @@ import akka.http.javadsl.model.HttpCharset import akka.http.javadsl.{ model ⇒ jm } import akka.http.scaladsl.model.UriRendering import akka.http.scaladsl.{ model ⇒ sm } -import akka.japi.{ Pair, Option } +import akka.japi.Pair import akka.parboiled2.CharPredicate import scala.collection.JavaConverters._ import akka.http.impl.util.JavaMapping.Implicits._ +import scala.compat.java8.OptionConverters._ /** INTERNAL API */ case class JavaQuery(query: sm.Uri.Query) extends jm.Query { - override def get(key: String): Option[String] = query.get(key) + override def get(key: String): Optional[String] = query.get(key).asJava override def toMap: ju.Map[String, String] = query.toMap.asJava override def toList: ju.List[Pair[String, String]] = query.map(_.asJava).asJava override def getOrElse(key: String, _default: String): String = query.getOrElse(key, _default) diff --git a/akka-http-core/src/main/scala/akka/http/impl/model/JavaUri.scala b/akka-http-core/src/main/scala/akka/http/impl/model/JavaUri.scala index 7a947e80d2..f356849635 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/model/JavaUri.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/model/JavaUri.scala @@ -1,16 +1,17 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.model import java.nio.charset.Charset +import java.util.Optional import java.{ lang ⇒ jl } import akka.http.scaladsl.model.Uri.ParsingMode -import akka.japi.Option import akka.http.javadsl.{ model ⇒ jm } import akka.http.scaladsl.{ model ⇒ sm } import akka.http.impl.util.JavaMapping.Implicits._ +import scala.compat.java8.OptionConverters._ /** INTERNAL API */ case class JavaUri(uri: sm.Uri) extends jm.Uri { @@ -36,12 +37,12 @@ case class JavaUri(uri: sm.Uri) extends jm.Uri { gatherSegments(uri.path).asJava } - def rawQueryString: Option[String] = uri.rawQueryString - def queryString(charset: Charset): Option[String] = uri.queryString(charset) + def rawQueryString: Optional[String] = uri.rawQueryString.asJava + def queryString(charset: Charset): Optional[String] = uri.queryString(charset).asJava def query: jm.Query = uri.query().asJava def query(charset: Charset, mode: ParsingMode): jm.Query = uri.query(charset, mode).asJava - def fragment: Option[String] = uri.fragment + def fragment: Optional[String] = uri.fragment.asJava // Modification methods @@ -69,7 +70,7 @@ case class JavaUri(uri: sm.Uri) extends jm.Uri { u.withPath(newPath) } - def fragment(fragment: Option[String]): jm.Uri = t(_.copy(fragment = fragment)) + def fragment(fragment: Optional[String]): jm.Uri = t(_.copy(fragment = fragment.asScala)) def fragment(fragment: String): jm.Uri = t(_.withFragment(fragment)) override def toString: String = uri.toString diff --git a/akka-http-core/src/main/scala/akka/http/impl/model/parser/AcceptCharsetHeader.scala b/akka-http-core/src/main/scala/akka/http/impl/model/parser/AcceptCharsetHeader.scala index 811ddb9799..4d8d740b60 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/model/parser/AcceptCharsetHeader.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/model/parser/AcceptCharsetHeader.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.model.parser diff --git a/akka-http-core/src/main/scala/akka/http/impl/model/parser/AcceptEncodingHeader.scala b/akka-http-core/src/main/scala/akka/http/impl/model/parser/AcceptEncodingHeader.scala index dc3ae3b596..9fa494f528 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/model/parser/AcceptEncodingHeader.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/model/parser/AcceptEncodingHeader.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.model.parser diff --git a/akka-http-core/src/main/scala/akka/http/impl/model/parser/AcceptHeader.scala b/akka-http-core/src/main/scala/akka/http/impl/model/parser/AcceptHeader.scala index 0579637df6..52b477a22b 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/model/parser/AcceptHeader.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/model/parser/AcceptHeader.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.model.parser diff --git a/akka-http-core/src/main/scala/akka/http/impl/model/parser/AcceptLanguageHeader.scala b/akka-http-core/src/main/scala/akka/http/impl/model/parser/AcceptLanguageHeader.scala index 3c99ef7ab4..17398535a4 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/model/parser/AcceptLanguageHeader.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/model/parser/AcceptLanguageHeader.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.model.parser diff --git a/akka-http-core/src/main/scala/akka/http/impl/model/parser/Base64Parsing.scala b/akka-http-core/src/main/scala/akka/http/impl/model/parser/Base64Parsing.scala index 022c89993e..3de083f213 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/model/parser/Base64Parsing.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/model/parser/Base64Parsing.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-http-core/src/main/scala/akka/http/impl/model/parser/CacheControlHeader.scala b/akka-http-core/src/main/scala/akka/http/impl/model/parser/CacheControlHeader.scala index 75e2dedf58..9b40305c70 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/model/parser/CacheControlHeader.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/model/parser/CacheControlHeader.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.model.parser diff --git a/akka-http-core/src/main/scala/akka/http/impl/model/parser/CharacterClasses.scala b/akka-http-core/src/main/scala/akka/http/impl/model/parser/CharacterClasses.scala index 0d55dd7145..26e3cbda13 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/model/parser/CharacterClasses.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/model/parser/CharacterClasses.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.model.parser diff --git a/akka-http-core/src/main/scala/akka/http/impl/model/parser/CommonActions.scala b/akka-http-core/src/main/scala/akka/http/impl/model/parser/CommonActions.scala index 46ddddb41a..5388a4ecd4 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/model/parser/CommonActions.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/model/parser/CommonActions.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.model.parser diff --git a/akka-http-core/src/main/scala/akka/http/impl/model/parser/CommonRules.scala b/akka-http-core/src/main/scala/akka/http/impl/model/parser/CommonRules.scala index 38399432fb..5217bc332f 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/model/parser/CommonRules.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/model/parser/CommonRules.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.model.parser diff --git a/akka-http-core/src/main/scala/akka/http/impl/model/parser/ContentDispositionHeader.scala b/akka-http-core/src/main/scala/akka/http/impl/model/parser/ContentDispositionHeader.scala index 283850347c..30cbc80427 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/model/parser/ContentDispositionHeader.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/model/parser/ContentDispositionHeader.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.model.parser diff --git a/akka-http-core/src/main/scala/akka/http/impl/model/parser/ContentTypeHeader.scala b/akka-http-core/src/main/scala/akka/http/impl/model/parser/ContentTypeHeader.scala index 9b2cba6a5c..3a80228eac 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/model/parser/ContentTypeHeader.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/model/parser/ContentTypeHeader.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.model.parser diff --git a/akka-http-core/src/main/scala/akka/http/impl/model/parser/HeaderParser.scala b/akka-http-core/src/main/scala/akka/http/impl/model/parser/HeaderParser.scala index a84e3e2d7e..d765e91aaa 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/model/parser/HeaderParser.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/model/parser/HeaderParser.scala @@ -1,11 +1,11 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.model.parser -import akka.http.ParserSettings -import akka.http.ParserSettings.CookieParsingMode +import akka.http.scaladsl.settings.ParserSettings +import akka.http.scaladsl.settings.ParserSettings.CookieParsingMode import akka.http.scaladsl.model.headers.HttpCookiePair import scala.util.control.NonFatal import akka.http.impl.util.SingletonException @@ -30,7 +30,7 @@ private[http] class HeaderParser(val input: ParserInput, settings: HeaderParser. with LinkHeader with SimpleHeaders with StringBuilding - with WebsocketHeaders { + with WebSocketHeaders { import CharacterClasses._ // http://www.rfc-editor.org/errata_search.php?rfc=7230 errata id 4189 @@ -156,7 +156,7 @@ private[http] object HeaderParser { "www-authenticate", "x-forwarded-for") - trait Settings { + abstract class Settings { def uriParsingMode: Uri.ParsingMode def cookieParsingMode: ParserSettings.CookieParsingMode } diff --git a/akka-http-core/src/main/scala/akka/http/impl/model/parser/IpAddressParsing.scala b/akka-http-core/src/main/scala/akka/http/impl/model/parser/IpAddressParsing.scala index f4a313f5e2..fceda8d29d 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/model/parser/IpAddressParsing.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/model/parser/IpAddressParsing.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.model.parser diff --git a/akka-http-core/src/main/scala/akka/http/impl/model/parser/LinkHeader.scala b/akka-http-core/src/main/scala/akka/http/impl/model/parser/LinkHeader.scala index 06ac655691..ca8b547ba0 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/model/parser/LinkHeader.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/model/parser/LinkHeader.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.model.parser diff --git a/akka-http-core/src/main/scala/akka/http/impl/model/parser/SimpleHeaders.scala b/akka-http-core/src/main/scala/akka/http/impl/model/parser/SimpleHeaders.scala index 6e8ed14d6d..be3f4f74f6 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/model/parser/SimpleHeaders.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/model/parser/SimpleHeaders.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.model.parser diff --git a/akka-http-core/src/main/scala/akka/http/impl/model/parser/StringBuilding.scala b/akka-http-core/src/main/scala/akka/http/impl/model/parser/StringBuilding.scala index d8b5e5e1cf..9ae10b44bd 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/model/parser/StringBuilding.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/model/parser/StringBuilding.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-http-core/src/main/scala/akka/http/impl/model/parser/UriParser.scala b/akka-http-core/src/main/scala/akka/http/impl/model/parser/UriParser.scala index 9bb6077899..fd080f8a3c 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/model/parser/UriParser.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/model/parser/UriParser.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.model.parser diff --git a/akka-http-core/src/main/scala/akka/http/impl/model/parser/WebsocketHeaders.scala b/akka-http-core/src/main/scala/akka/http/impl/model/parser/WebSocketHeaders.scala similarity index 90% rename from akka-http-core/src/main/scala/akka/http/impl/model/parser/WebsocketHeaders.scala rename to akka-http-core/src/main/scala/akka/http/impl/model/parser/WebSocketHeaders.scala index ceb3b30254..280e449690 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/model/parser/WebsocketHeaders.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/model/parser/WebSocketHeaders.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.model.parser @@ -8,7 +8,7 @@ import akka.http.scaladsl.model.headers._ import akka.parboiled2._ // see grammar at http://tools.ietf.org/html/rfc6455#section-4.3 -private[parser] trait WebsocketHeaders { this: Parser with CommonRules with CommonActions ⇒ +private[parser] trait WebSocketHeaders { this: Parser with CommonRules with CommonActions ⇒ import CharacterClasses._ import Base64Parsing.rfc2045Alphabet @@ -44,7 +44,7 @@ private[parser] trait WebsocketHeaders { this: Parser with CommonRules with Comm private def extension = rule { `extension-token` ~ zeroOrMore(ws(";") ~ `extension-param`) ~> - ((name, params) ⇒ WebsocketExtension(name, Map(params: _*))) + ((name, params) ⇒ WebSocketExtension(name, Map(params: _*))) } private def `extension-token`: Rule1[String] = token private def `extension-param`: Rule1[(String, String)] = diff --git a/akka-http-core/src/main/scala/akka/http/impl/settings/ClientConnectionSettingsImpl.scala b/akka-http-core/src/main/scala/akka/http/impl/settings/ClientConnectionSettingsImpl.scala new file mode 100644 index 0000000000..ce4ba64ddd --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/impl/settings/ClientConnectionSettingsImpl.scala @@ -0,0 +1,47 @@ +/** + * Copyright (C) 2009-2014 Typesafe Inc. + */ + +package akka.http.impl.settings + +import java.util.Random + +import akka.http.impl.engine.ws.Randoms +import akka.http.impl.util._ +import akka.http.scaladsl.model.headers.`User-Agent` +import akka.http.scaladsl.settings.ParserSettings +import akka.io.Inet.SocketOption +import com.typesafe.config.Config + +import scala.collection.immutable +import scala.concurrent.duration.{ Duration, FiniteDuration } + +/** INTERNAL API */ +private[akka] final case class ClientConnectionSettingsImpl( + userAgentHeader: Option[`User-Agent`], + connectingTimeout: FiniteDuration, + idleTimeout: Duration, + requestHeaderSizeHint: Int, + websocketRandomFactory: () ⇒ Random, + socketOptions: immutable.Seq[SocketOption], + parserSettings: ParserSettings) + extends akka.http.scaladsl.settings.ClientConnectionSettings { + + require(connectingTimeout >= Duration.Zero, "connectingTimeout must be >= 0") + require(requestHeaderSizeHint > 0, "request-size-hint must be > 0") +} + +object ClientConnectionSettingsImpl extends SettingsCompanion[ClientConnectionSettingsImpl]("akka.http.client") { + def fromSubConfig(root: Config, inner: Config) = { + val c = inner.withFallback(root.getConfig(prefix)) + new ClientConnectionSettingsImpl( + userAgentHeader = c.getString("user-agent-header").toOption.map(`User-Agent`(_)), + connectingTimeout = c getFiniteDuration "connecting-timeout", + idleTimeout = c getPotentiallyInfiniteDuration "idle-timeout", + requestHeaderSizeHint = c getIntBytes "request-header-size-hint", + websocketRandomFactory = Randoms.SecureRandomInstances, // can currently only be overridden from code + socketOptions = SocketOptionSettings.fromSubConfig(root, c.getConfig("socket-options")), + parserSettings = ParserSettingsImpl.fromSubConfig(root, c.getConfig("parsing"))) + } + +} \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/impl/settings/ConnectionPoolSettingsImpl.scala b/akka-http-core/src/main/scala/akka/http/impl/settings/ConnectionPoolSettingsImpl.scala new file mode 100644 index 0000000000..eb7544e47d --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/impl/settings/ConnectionPoolSettingsImpl.scala @@ -0,0 +1,41 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ + +package akka.http.impl.settings + +import akka.http.impl.util.SettingsCompanion +import akka.http.impl.util._ +import akka.http.scaladsl.settings.{ ConnectionPoolSettings, ClientConnectionSettings } +import com.typesafe.config.Config +import scala.concurrent.duration.Duration + +/** INTERNAL API */ +private[akka] final case class ConnectionPoolSettingsImpl( + val maxConnections: Int, + val maxRetries: Int, + val maxOpenRequests: Int, + val pipeliningLimit: Int, + val idleTimeout: Duration, + val connectionSettings: ClientConnectionSettings) + extends ConnectionPoolSettings { + + require(maxConnections > 0, "max-connections must be > 0") + require(maxRetries >= 0, "max-retries must be >= 0") + require(maxOpenRequests > 0 && (maxOpenRequests & (maxOpenRequests - 1)) == 0, "max-open-requests must be a power of 2 > 0") + require(pipeliningLimit > 0, "pipelining-limit must be > 0") + require(idleTimeout >= Duration.Zero, "idle-timeout must be >= 0") + +} + +object ConnectionPoolSettingsImpl extends SettingsCompanion[ConnectionPoolSettingsImpl]("akka.http.host-connection-pool") { + def fromSubConfig(root: Config, c: Config) = { + ConnectionPoolSettingsImpl( + c getInt "max-connections", + c getInt "max-retries", + c getInt "max-open-requests", + c getInt "pipelining-limit", + c getPotentiallyInfiniteDuration "idle-timeout", + ClientConnectionSettingsImpl.fromSubConfig(root, c.getConfig("client"))) + } +} diff --git a/akka-http-core/src/main/scala/akka/http/impl/settings/ConnectionPoolSetup.scala b/akka-http-core/src/main/scala/akka/http/impl/settings/ConnectionPoolSetup.scala new file mode 100644 index 0000000000..81a94e4685 --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/impl/settings/ConnectionPoolSetup.scala @@ -0,0 +1,15 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ + +package akka.http.impl.settings + +import akka.event.LoggingAdapter +import akka.http.scaladsl.ConnectionContext +import akka.http.scaladsl.settings.ConnectionPoolSettings + +/** INTERNAL API */ +private[akka] final case class ConnectionPoolSetup( + settings: ConnectionPoolSettings, + connectionContext: ConnectionContext = ConnectionContext.noEncryption(), + log: LoggingAdapter) \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/impl/settings/HostConnectionPoolSetup.scala b/akka-http-core/src/main/scala/akka/http/impl/settings/HostConnectionPoolSetup.scala new file mode 100644 index 0000000000..7e74055eb9 --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/impl/settings/HostConnectionPoolSetup.scala @@ -0,0 +1,9 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ + +package akka.http.impl.settings + +/** INTERNAL API */ +final case class HostConnectionPoolSetup(host: String, port: Int, setup: ConnectionPoolSetup) + diff --git a/akka-http-core/src/main/scala/akka/http/impl/settings/ParserSettingsImpl.scala b/akka-http-core/src/main/scala/akka/http/impl/settings/ParserSettingsImpl.scala new file mode 100644 index 0000000000..4ba0734caf --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/impl/settings/ParserSettingsImpl.scala @@ -0,0 +1,77 @@ +/** + * Copyright (C) 2009-2014 Typesafe Inc. + */ + +package akka.http.impl.settings + +import akka.http.scaladsl.settings.ParserSettings +import akka.http.scaladsl.settings.ParserSettings.{ ErrorLoggingVerbosity, CookieParsingMode } +import com.typesafe.config.Config +import scala.collection.JavaConverters._ +import akka.http.scaladsl.model.{ StatusCode, HttpMethod, Uri } +import akka.http.impl.util._ + +/** INTERNAL API */ +private[akka] final case class ParserSettingsImpl( + maxUriLength: Int, + maxMethodLength: Int, + maxResponseReasonLength: Int, + maxHeaderNameLength: Int, + maxHeaderValueLength: Int, + maxHeaderCount: Int, + maxContentLength: Long, + maxChunkExtLength: Int, + maxChunkSize: Int, + uriParsingMode: Uri.ParsingMode, + cookieParsingMode: CookieParsingMode, + illegalHeaderWarnings: Boolean, + errorLoggingVerbosity: ParserSettings.ErrorLoggingVerbosity, + headerValueCacheLimits: Map[String, Int], + includeTlsSessionInfoHeader: Boolean, + customMethods: String ⇒ Option[HttpMethod], + customStatusCodes: Int ⇒ Option[StatusCode]) + extends akka.http.scaladsl.settings.ParserSettings { + + require(maxUriLength > 0, "max-uri-length must be > 0") + require(maxMethodLength > 0, "max-method-length must be > 0") + require(maxResponseReasonLength > 0, "max-response-reason-length must be > 0") + require(maxHeaderNameLength > 0, "max-header-name-length must be > 0") + require(maxHeaderValueLength > 0, "max-header-value-length must be > 0") + require(maxHeaderCount > 0, "max-header-count must be > 0") + require(maxContentLength > 0, "max-content-length must be > 0") + require(maxChunkExtLength > 0, "max-chunk-ext-length must be > 0") + require(maxChunkSize > 0, "max-chunk-size must be > 0") + + override val defaultHeaderValueCacheLimit: Int = headerValueCacheLimits("default") + + override def headerValueCacheLimit(headerName: String): Int = + headerValueCacheLimits.getOrElse(headerName, defaultHeaderValueCacheLimit) +} + +object ParserSettingsImpl extends SettingsCompanion[ParserSettingsImpl]("akka.http.parsing") { + def fromSubConfig(root: Config, inner: Config) = { + val c = inner.withFallback(root.getConfig(prefix)) + val cacheConfig = c getConfig "header-cache" + + new ParserSettingsImpl( + c getIntBytes "max-uri-length", + c getIntBytes "max-method-length", + c getIntBytes "max-response-reason-length", + c getIntBytes "max-header-name-length", + c getIntBytes "max-header-value-length", + c getIntBytes "max-header-count", + c getPossiblyInfiniteBytes "max-content-length", + c getIntBytes "max-chunk-ext-length", + c getIntBytes "max-chunk-size", + Uri.ParsingMode(c getString "uri-parsing-mode"), + CookieParsingMode(c getString "cookie-parsing-mode"), + c getBoolean "illegal-header-warnings", + ErrorLoggingVerbosity(c getString "error-logging-verbosity"), + cacheConfig.entrySet.asScala.map(kvp ⇒ kvp.getKey -> cacheConfig.getInt(kvp.getKey))(collection.breakOut), + c getBoolean "tls-session-info-header", + _ ⇒ None, + _ ⇒ None) + } + +} + diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/RoutingSettings.scala b/akka-http-core/src/main/scala/akka/http/impl/settings/RoutingSettingsImpl.scala similarity index 62% rename from akka-http/src/main/scala/akka/http/scaladsl/server/RoutingSettings.scala rename to akka-http-core/src/main/scala/akka/http/impl/settings/RoutingSettingsImpl.scala index 300156aad7..8e7462179d 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/RoutingSettings.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/settings/RoutingSettingsImpl.scala @@ -2,23 +2,23 @@ * Copyright (C) 2009-2014 Typesafe Inc. */ -package akka.http.scaladsl.server +package akka.http.impl.settings -import com.typesafe.config.Config -import akka.actor.ActorRefFactory import akka.http.impl.util._ +import com.typesafe.config.Config -case class RoutingSettings( +/** INTERNAL API */ +final case class RoutingSettingsImpl( verboseErrorMessages: Boolean, fileGetConditional: Boolean, renderVanityFooter: Boolean, rangeCountLimit: Int, rangeCoalescingThreshold: Long, decodeMaxBytesPerChunk: Int, - fileIODispatcher: String) + fileIODispatcher: String) extends akka.http.scaladsl.settings.RoutingSettings -object RoutingSettings extends SettingsCompanion[RoutingSettings]("akka.http.routing") { - def fromSubConfig(root: Config, c: Config) = apply( +object RoutingSettingsImpl extends SettingsCompanion[RoutingSettingsImpl]("akka.http.routing") { + def fromSubConfig(root: Config, c: Config) = new RoutingSettingsImpl( c getBoolean "verbose-error-messages", c getBoolean "file-get-conditional", c getBoolean "render-vanity-footer", @@ -27,6 +27,4 @@ object RoutingSettings extends SettingsCompanion[RoutingSettings]("akka.http.rou c getIntBytes "decode-max-bytes-per-chunk", c getString "file-io-dispatcher") - implicit def default(implicit refFactory: ActorRefFactory): RoutingSettings = - apply(actorSystem) } diff --git a/akka-http-core/src/main/scala/akka/http/ServerSettings.scala b/akka-http-core/src/main/scala/akka/http/impl/settings/ServerSettingsImpl.scala similarity index 57% rename from akka-http-core/src/main/scala/akka/http/ServerSettings.scala rename to akka-http-core/src/main/scala/akka/http/impl/settings/ServerSettingsImpl.scala index fd2a887b4d..ce818b341d 100644 --- a/akka-http-core/src/main/scala/akka/http/ServerSettings.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/settings/ServerSettingsImpl.scala @@ -1,20 +1,21 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2016 Typesafe Inc. */ -package akka.http +package akka.http.impl.settings import java.util.Random import akka.http.impl.engine.ws.Randoms +import akka.http.scaladsl.settings.{ ServerSettings, ParserSettings } import com.typesafe.config.Config import scala.language.implicitConversions import scala.collection.immutable import scala.concurrent.duration._ +import akka.http.javadsl.{ settings ⇒ js } import akka.ConfigurationException -import akka.actor.{ ActorSystem, ActorRefFactory } import akka.io.Inet.SocketOption import akka.http.impl.util._ @@ -22,7 +23,8 @@ import akka.http.impl.util._ import akka.http.scaladsl.model.HttpHeader import akka.http.scaladsl.model.headers.{ Host, Server } -final case class ServerSettings( +/** INTERNAL API */ +private[akka] final case class ServerSettingsImpl( serverHeader: Option[Server], timeouts: ServerSettings.Timeouts, maxConnections: Int, @@ -33,28 +35,36 @@ final case class ServerSettings( verboseErrorMessages: Boolean, responseHeaderSizeHint: Int, backlog: Int, - socketOptions: immutable.Traversable[SocketOption], + socketOptions: immutable.Seq[SocketOption], defaultHostHeader: Host, websocketRandomFactory: () ⇒ Random, - parserSettings: ParserSettings) { + parserSettings: ParserSettings) extends ServerSettings { require(0 < maxConnections, "max-connections must be > 0") require(0 < pipeliningLimit && pipeliningLimit <= 1024, "pipelining-limit must be > 0 and <= 1024") require(0 < responseHeaderSizeHint, "response-size-hint must be > 0") require(0 < backlog, "backlog must be > 0") + } -object ServerSettings extends SettingsCompanion[ServerSettings]("akka.http.server") { - final case class Timeouts(idleTimeout: Duration, - bindTimeout: FiniteDuration) { +object ServerSettingsImpl extends SettingsCompanion[ServerSettingsImpl]("akka.http.server") { + implicit def timeoutsShortcut(s: js.ServerSettings): js.ServerSettings.Timeouts = s.getTimeouts + + /** INTERNAL API */ + final case class Timeouts( + idleTimeout: Duration, + requestTimeout: Duration, + bindTimeout: FiniteDuration) extends ServerSettings.Timeouts { + require(idleTimeout > Duration.Zero, "idleTimeout must be infinite or > 0") + require(requestTimeout > Duration.Zero, "requestTimeout must be infinite or > 0") require(bindTimeout > Duration.Zero, "bindTimeout must be > 0") } - implicit def timeoutsShortcut(s: ServerSettings): Timeouts = s.timeouts - def fromSubConfig(root: Config, c: Config) = apply( + def fromSubConfig(root: Config, c: Config) = new ServerSettingsImpl( c.getString("server-header").toOption.map(Server(_)), - Timeouts( + new Timeouts( c getPotentiallyInfiniteDuration "idle-timeout", + c getPotentiallyInfiniteDuration "request-timeout", c getFiniteDuration "bind-timeout"), c getInt "max-connections", c getInt "pipelining-limit", @@ -73,33 +83,9 @@ object ServerSettings extends SettingsCompanion[ServerSettings]("akka.http.serve throw new ConfigurationException(info.formatPretty) }, Randoms.SecureRandomInstances, // can currently only be overridden from code - ParserSettings.fromSubConfig(root, c.getConfig("parsing"))) + ParserSettingsImpl.fromSubConfig(root, c.getConfig("parsing"))) - def apply(optionalSettings: Option[ServerSettings])(implicit actorRefFactory: ActorRefFactory): ServerSettings = - optionalSettings getOrElse apply(actorSystem) - - /** - * Creates an instance of ServerSettings using the configuration provided by the given - * ActorSystem. - * - * Java API - */ - def create(system: ActorSystem): ServerSettings = ServerSettings(system) - - /** - * Creates an instance of ServerSettings using the given Config. - * - * Java API - */ - def create(config: Config): ServerSettings = ServerSettings(config) - - /** - * Create an instance of ServerSettings using the given String of config overrides to override - * settings set in the class loader of this class (i.e. by application.conf or reference.conf files in - * the class loader of this class). - * - * Java API - */ - def create(configOverrides: String): ServerSettings = ServerSettings(configOverrides) -} + // def apply(optionalSettings: Option[ServerSettings])(implicit actorRefFactory: ActorRefFactory): ServerSettings = + // optionalSettings getOrElse apply(actorSystem) +} \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/impl/util/ByteReader.scala b/akka-http-core/src/main/scala/akka/http/impl/util/ByteReader.scala deleted file mode 100644 index 3a03d3dea5..0000000000 --- a/akka-http-core/src/main/scala/akka/http/impl/util/ByteReader.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package akka.http.impl.util - -import scala.util.control.NoStackTrace -import akka.util.ByteString - -/** - * A helper class to read from a ByteString statefully. - * - * INTERNAL API - */ -private[akka] class ByteReader(input: ByteString) { - import ByteReader.NeedMoreData - - private[this] var off = 0 - - def hasRemaining: Boolean = off < input.size - - def currentOffset: Int = off - def remainingData: ByteString = input.drop(off) - def fromStartToHere: ByteString = input.take(off) - - def readByte(): Int = - if (off < input.length) { - val x = input(off) - off += 1 - x & 0xFF - } else throw NeedMoreData - def readShortLE(): Int = readByte() | (readByte() << 8) - def readIntLE(): Int = readShortLE() | (readShortLE() << 16) - def readLongLE(): Long = (readIntLE() & 0xffffffffL) | ((readIntLE() & 0xffffffffL) << 32) - - def readShortBE(): Int = (readByte() << 8) | readByte() - def readIntBE(): Int = (readShortBE() << 16) | readShortBE() - def readLongBE(): Long = ((readIntBE() & 0xffffffffL) << 32) | (readIntBE() & 0xffffffffL) - - def skip(numBytes: Int): Unit = - if (off + numBytes <= input.length) off += numBytes - else throw NeedMoreData - def skipZeroTerminatedString(): Unit = while (readByte() != 0) {} -} - -/* -* INTERNAL API -*/ -private[akka] object ByteReader { - val NeedMoreData = new Exception with NoStackTrace -} \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/impl/util/ByteStringParserStage.scala b/akka-http-core/src/main/scala/akka/http/impl/util/ByteStringParserStage.scala deleted file mode 100644 index 5ae861883a..0000000000 --- a/akka-http-core/src/main/scala/akka/http/impl/util/ByteStringParserStage.scala +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package akka.http.impl.util - -import akka.stream.stage.{ Context, StatefulStage } -import akka.util.ByteString -import akka.stream.stage.SyncDirective - -/** - * A helper class for writing parsers from ByteStrings. - * - * FIXME: move to akka.stream.io, https://github.com/akka/akka/issues/16529 - * - * INTERNAL API - */ -private[akka] abstract class ByteStringParserStage[Out] extends StatefulStage[ByteString, Out] { - protected def onTruncation(ctx: Context[Out]): SyncDirective - - /** - * Derive a stage from [[IntermediateState]] and then call `pull(ctx)` instead of - * `ctx.pull()` to have truncation errors reported. - */ - abstract class IntermediateState extends State { - override def onPull(ctx: Context[Out]): SyncDirective = pull(ctx) - def pull(ctx: Context[Out]): SyncDirective = - if (ctx.isFinishing) onTruncation(ctx) - else ctx.pull() - } - - /** - * A stage that tries to read from a side-effecting [[ByteReader]]. If a buffer underrun - * occurs the previous data is saved and the reading process is restarted from the beginning - * once more data was received. - * - * As [[read]] may be called several times for the same prefix of data, make sure not to - * manipulate any state during reading from the ByteReader. - */ - private[akka] trait ByteReadingState extends IntermediateState { - def read(reader: ByteReader, ctx: Context[Out]): SyncDirective - - def onPush(data: ByteString, ctx: Context[Out]): SyncDirective = - try { - val reader = new ByteReader(data) - read(reader, ctx) - } catch { - case ByteReader.NeedMoreData ⇒ - become(TryAgain(data, this)) - pull(ctx) - } - } - private case class TryAgain(previousData: ByteString, byteReadingState: ByteReadingState) extends IntermediateState { - def onPush(data: ByteString, ctx: Context[Out]): SyncDirective = { - become(byteReadingState) - byteReadingState.onPush(previousData ++ data, ctx) - } - } -} diff --git a/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedByteArray.scala b/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedByteArray.scala index 75420b87db..d62095f43f 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedByteArray.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedByteArray.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.util diff --git a/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedByteStringTraversableOnce.scala b/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedByteStringTraversableOnce.scala index fbe1fc9eef..8973c76011 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedByteStringTraversableOnce.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedByteStringTraversableOnce.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.util diff --git a/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedConfig.scala b/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedConfig.scala index 6ba6263783..2291037336 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedConfig.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedConfig.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.util diff --git a/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedInetSocketAddress.scala b/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedInetSocketAddress.scala deleted file mode 100644 index ee00e6c833..0000000000 --- a/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedInetSocketAddress.scala +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright (C) 2009-2015 Typesafe Inc. - */ - -package akka.http.impl.util - -import java.lang.reflect.{ InvocationTargetException, Method } -import java.net.InetSocketAddress - -import scala.util.control.NonFatal - -/** - * Provides getHostString support for Java 6. - * - * TODO: can be removed once support for Java 6 is dropped. - * - * Internal API - */ -private[http] class EnhancedInetSocketAddress(val address: InetSocketAddress) extends AnyVal { - /** - * Retrieve the original host string that was given (IP or DNS name) if the current JDK has - * a `getHostString` method with the right signature that can be made accessible. - * - * This avoids a reverse DNS query from calling getHostName() if the original host string is an IP address. - * If the reflective call doesn't work it falls back to getHostName. - */ - def getHostStringJava6Compatible: String = EnhancedInetSocketAddress.getHostStringFunction(address) -} - -/** - * Internal API - */ -private[http] object EnhancedInetSocketAddress { - private[http] val getHostStringFunction: InetSocketAddress ⇒ String = { - def fallbackToGetHostName = (_: InetSocketAddress).getHostName - def callReflectively(m: Method) = - (address: InetSocketAddress) ⇒ - try m.invoke(address).asInstanceOf[String] - catch { - case ite: InvocationTargetException ⇒ throw ite.getTargetException - } - - try { - val m = classOf[InetSocketAddress].getDeclaredMethod("getHostString") - - val candidate = - if (m.getReturnType == classOf[String] && m.getParameterTypes.isEmpty) { - if (!m.isAccessible) m.setAccessible(true) - callReflectively(m) - } else fallbackToGetHostName - - // probe so that we can be sure a reflective problem only turns up once - // here during construction - candidate(new InetSocketAddress("127.0.0.1", 80)) - candidate - } catch { - case NonFatal(_) ⇒ fallbackToGetHostName - } - } -} \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedRegex.scala b/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedRegex.scala index 047c4d68bc..79048b7a88 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedRegex.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedRegex.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.util diff --git a/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedString.scala b/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedString.scala index 12aa1a5009..df6772cf1a 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedString.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/util/EnhancedString.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.util diff --git a/akka-http-core/src/main/scala/akka/http/impl/util/Java6Compat.scala b/akka-http-core/src/main/scala/akka/http/impl/util/Java6Compat.scala deleted file mode 100644 index 2d3fb3eb19..0000000000 --- a/akka-http-core/src/main/scala/akka/http/impl/util/Java6Compat.scala +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright (C) 2009-2015 Typesafe Inc. - */ - -package akka.http.impl.util - -import java.lang.reflect.{ InvocationTargetException, Method } -import javax.net.ssl.SSLParameters - -import scala.util.control.NonFatal - -/** - * INTERNAL API - * - * Enables accessing SslParameters even if compiled against Java 6. - */ -private[http] object Java6Compat { - - def isJava6: Boolean = - System.getProperty("java.version").take(4) match { - case "1.6." ⇒ true - case _ ⇒ false - } - - /** - * Returns true if setting the algorithm was successful. - */ - def trySetEndpointIdentificationAlgorithm(parameters: SSLParameters, algorithm: String): Boolean = - setEndpointIdentificationAlgorithmFunction(parameters, algorithm) - - private[this] val setEndpointIdentificationAlgorithmFunction: (SSLParameters, String) ⇒ Boolean = { - def unsupported: (SSLParameters, String) ⇒ Boolean = (_, _) ⇒ false - - def callReflectively(m: Method) = - (params: SSLParameters, algorithm: String) ⇒ - try { - m.invoke(params, algorithm) - true - } catch { - case ite: InvocationTargetException ⇒ throw ite.getTargetException - } - - try { - val m = classOf[SSLParameters].getMethod("setEndpointIdentificationAlgorithm", classOf[java.lang.String]) - - val candidate = - if (m.getReturnType == Void.TYPE && m.getParameterTypes.toSeq == Seq(classOf[java.lang.String])) { - if (!m.isAccessible) m.setAccessible(true) - callReflectively(m) - } else unsupported - - // probe so that we can be sure a reflective problem only turns up once - // here during construction - candidate(new SSLParameters(), "https") - candidate - } catch { - case NonFatal(_) ⇒ unsupported - } - } -} diff --git a/akka-http-core/src/main/scala/akka/http/impl/util/JavaAccessors.scala b/akka-http-core/src/main/scala/akka/http/impl/util/JavaAccessors.scala index 4ba783c70e..8f2395cdf7 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/util/JavaAccessors.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/util/JavaAccessors.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.util diff --git a/akka-http-core/src/main/scala/akka/http/impl/util/JavaMapping.scala b/akka-http-core/src/main/scala/akka/http/impl/util/JavaMapping.scala index 26efac5a19..d38cf6cb9e 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/util/JavaMapping.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/util/JavaMapping.scala @@ -1,21 +1,24 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.util import java.net.InetAddress +import java.util.Optional import java.{ util ⇒ ju, lang ⇒ jl } import akka.japi.Pair -import akka.stream.javadsl -import akka.stream.scaladsl +import akka.stream.{ Graph, FlowShape, javadsl, scaladsl } import scala.collection.immutable import scala.reflect.ClassTag -import akka.japi +import akka.{ NotUsed, japi } import akka.http.impl.model.{ JavaQuery, JavaUri } -import akka.http.javadsl.{ model ⇒ jm } +import akka.http.javadsl.{ model ⇒ jm, HttpConnectionContext, ConnectionContext, HttpsConnectionContext } import akka.http.scaladsl.{ model ⇒ sm } +import akka.http.javadsl.{ settings ⇒ js } + +import scala.compat.java8.OptionConverters._ import scala.util.Try @@ -80,7 +83,7 @@ private[http] object JavaMapping { } } - /** This trivial mapping isn't enabled by default to prevent it from conflicting with the `Inherited` ones `*/ + /** This trivial mapping isn't enabled by default to prevent it from conflicting with the `Inherited` ones */ def identity[T]: JavaMapping[T, T] = new JavaMapping[T, T] { def toJava(scalaObject: T): J = scalaObject @@ -101,10 +104,10 @@ private[http] object JavaMapping { def toScala(javaObject: ju.Map[K, V]): immutable.Map[K, V] = javaObject.asScala.toMap def toJava(scalaObject: immutable.Map[K, V]): ju.Map[K, V] = scalaObject.asJava } - implicit def option[_J, _S](implicit mapping: JavaMapping[_J, _S]): JavaMapping[akka.japi.Option[_J], Option[_S]] = - new JavaMapping[akka.japi.Option[_J], Option[_S]] { - def toScala(javaObject: japi.Option[_J]): Option[_S] = javaObject.asScala.map(mapping.toScala) - def toJava(scalaObject: Option[_S]): japi.Option[_J] = japi.Option.fromScalaOption(scalaObject.map(mapping.toJava)) + implicit def option[_J, _S](implicit mapping: JavaMapping[_J, _S]): JavaMapping[Optional[_J], Option[_S]] = + new JavaMapping[Optional[_J], Option[_S]] { + def toScala(javaObject: Optional[_J]): Option[_S] = javaObject.asScala.map(mapping.toScala) + def toJava(scalaObject: Option[_S]): Optional[_J] = scalaObject.map(mapping.toJava).asJava } implicit def flowMapping[JIn, SIn, JOut, SOut, M](implicit inMapping: JavaMapping[JIn, SIn], outMapping: JavaMapping[JOut, SOut]): JavaMapping[javadsl.Flow[JIn, JOut, M], scaladsl.Flow[SIn, SOut, M]] = @@ -117,11 +120,21 @@ private[http] object JavaMapping { } } - def scalaToJavaAdapterFlow[J, S](implicit mapping: JavaMapping[J, S]): scaladsl.Flow[S, J, Unit] = + implicit def graphFlowMapping[JIn, SIn, JOut, SOut, M](implicit inMapping: JavaMapping[JIn, SIn], outMapping: JavaMapping[JOut, SOut]): JavaMapping[Graph[FlowShape[JIn, JOut], M], Graph[FlowShape[SIn, SOut], M]] = + new JavaMapping[Graph[FlowShape[JIn, JOut], M], Graph[FlowShape[SIn, SOut], M]] { + def toScala(javaObject: Graph[FlowShape[JIn, JOut], M]): S = + scaladsl.Flow[SIn].map(inMapping.toJava).viaMat(javaObject)(scaladsl.Keep.right).map(outMapping.toScala) + def toJava(scalaObject: Graph[FlowShape[SIn, SOut], M]): J = + javadsl.Flow.fromGraph { + scaladsl.Flow[JIn].map(inMapping.toScala).viaMat(scalaObject)(scaladsl.Keep.right).map(outMapping.toJava) + } + } + + def scalaToJavaAdapterFlow[J, S](implicit mapping: JavaMapping[J, S]): scaladsl.Flow[S, J, NotUsed] = scaladsl.Flow[S].map(mapping.toJava) - def javaToScalaAdapterFlow[J, S](implicit mapping: JavaMapping[J, S]): scaladsl.Flow[J, S, Unit] = + def javaToScalaAdapterFlow[J, S](implicit mapping: JavaMapping[J, S]): scaladsl.Flow[J, S, NotUsed] = scaladsl.Flow[J].map(mapping.toScala) - def adapterBidiFlow[JIn, SIn, SOut, JOut](implicit inMapping: JavaMapping[JIn, SIn], outMapping: JavaMapping[JOut, SOut]): scaladsl.BidiFlow[JIn, SIn, SOut, JOut, Unit] = + def adapterBidiFlow[JIn, SIn, SOut, JOut](implicit inMapping: JavaMapping[JIn, SIn], outMapping: JavaMapping[JOut, SOut]): scaladsl.BidiFlow[JIn, SIn, SOut, JOut, NotUsed] = scaladsl.BidiFlow.fromFlowsMat(javaToScalaAdapterFlow(inMapping), scalaToJavaAdapterFlow(outMapping))(scaladsl.Keep.none) implicit def pairMapping[J1, J2, S1, S2](implicit _1Mapping: JavaMapping[J1, S1], _2Mapping: JavaMapping[J2, S2]): JavaMapping[Pair[J1, J2], (S1, S2)] = @@ -152,6 +165,18 @@ private[http] object JavaMapping { def toScala(javaObject: J): S = cast[S](javaObject) } + implicit object ConnectionContext extends Inherited[ConnectionContext, akka.http.scaladsl.HttpConnectionContext] + implicit object HttpConnectionContext extends Inherited[HttpConnectionContext, akka.http.scaladsl.HttpConnectionContext] + implicit object HttpsConnectionContext extends Inherited[HttpsConnectionContext, akka.http.scaladsl.HttpsConnectionContext] + + implicit object ClientConnectionSettings extends Inherited[js.ClientConnectionSettings, akka.http.scaladsl.settings.ClientConnectionSettings] + implicit object ConnectionPoolSettings extends Inherited[js.ConnectionPoolSettings, akka.http.scaladsl.settings.ConnectionPoolSettings] + implicit object ParserSettings extends Inherited[js.ParserSettings, akka.http.scaladsl.settings.ParserSettings] + implicit object CookieParsingMode extends Inherited[js.ParserSettings.CookieParsingMode, akka.http.scaladsl.settings.ParserSettings.CookieParsingMode] + implicit object ErrorLoggingVerbosity extends Inherited[js.ParserSettings.ErrorLoggingVerbosity, akka.http.scaladsl.settings.ParserSettings.ErrorLoggingVerbosity] + implicit object ServerSettings extends Inherited[js.ServerSettings, akka.http.scaladsl.settings.ServerSettings] + implicit object ServerSettingsT extends Inherited[js.ServerSettings.Timeouts, akka.http.scaladsl.settings.ServerSettings.Timeouts] + implicit object DateTime extends Inherited[jm.DateTime, akka.http.scaladsl.model.DateTime] implicit object ContentType extends Inherited[jm.ContentType, sm.ContentType] @@ -182,9 +207,10 @@ private[http] object JavaMapping { implicit object TransferEncoding extends Inherited[jm.TransferEncoding, sm.TransferEncoding] implicit object HostHeader extends Inherited[jm.headers.Host, sm.headers.Host] - + implicit object Server extends Inherited[jm.headers.Server, sm.headers.Server] implicit object ByteRange extends Inherited[jm.headers.ByteRange, sm.headers.ByteRange] implicit object CacheDirective extends Inherited[jm.headers.CacheDirective, sm.headers.CacheDirective] + implicit object UserAgent extends Inherited[jm.headers.UserAgent, sm.headers.`User-Agent`] implicit object ContentDispositionType extends Inherited[jm.headers.ContentDispositionType, sm.headers.ContentDispositionType] implicit object EntityTag extends Inherited[jm.headers.EntityTag, sm.headers.EntityTag] implicit object EntityTagRange extends Inherited[jm.headers.EntityTagRange, sm.headers.EntityTagRange] @@ -212,6 +238,7 @@ private[http] object JavaMapping { def toScala(javaObject: J): Uri.S = cast[JavaUri](javaObject).uri def toJava(scalaObject: S): Uri.J = JavaUri(scalaObject) } + implicit object UriParsingMode extends Inherited[jm.Uri.ParsingMode, akka.http.scaladsl.model.Uri.ParsingMode] implicit object Query extends JavaMapping[jm.Query, sm.Uri.Query] { def toScala(javaObject: J): Query.S = cast[JavaQuery](javaObject).query diff --git a/akka-http-core/src/main/scala/akka/http/impl/util/ObjectRegistry.scala b/akka-http-core/src/main/scala/akka/http/impl/util/ObjectRegistry.scala index 866e64a2fd..b797a59aa6 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/util/ObjectRegistry.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/util/ObjectRegistry.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.util diff --git a/akka-http-core/src/main/scala/akka/http/impl/util/Rendering.scala b/akka-http-core/src/main/scala/akka/http/impl/util/Rendering.scala index 78cd2bc510..a7ccb29784 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/util/Rendering.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/util/Rendering.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.util diff --git a/akka-http-core/src/main/scala/akka/http/impl/util/SettingsCompanion.scala b/akka-http-core/src/main/scala/akka/http/impl/util/SettingsCompanion.scala index 0b17652f46..80a132abd5 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/util/SettingsCompanion.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/util/SettingsCompanion.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.util @@ -10,7 +10,7 @@ import com.typesafe.config.ConfigFactory._ import scala.util.control.NonFatal import scala.collection.immutable.ListMap import scala.collection.JavaConverters._ -import akka.actor.ActorSystem +import akka.actor.{ ActorRefFactory, ActorSystem } /** * INTERNAL API @@ -19,6 +19,9 @@ private[http] abstract class SettingsCompanion[T](protected val prefix: String) private final val MaxCached = 8 private[this] var cache = ListMap.empty[ActorSystem, T] + implicit def default(implicit refFactory: ActorRefFactory): T = + apply(actorSystem) + def apply(system: ActorSystem): T = // we use and update the cache without any synchronization, // there are two possible "problems" resulting from this: @@ -49,7 +52,7 @@ private[http] abstract class SettingsCompanion[T](protected val prefix: String) private[http] object SettingsCompanion { lazy val configAdditions: Config = { val localHostName = - try new InetSocketAddress(InetAddress.getLocalHost, 80).getHostStringJava6Compatible + try new InetSocketAddress(InetAddress.getLocalHost, 80).getHostString catch { case NonFatal(_) ⇒ "" } ConfigFactory.parseMap(Map("akka.http.hostname" -> localHostName).asJava) } diff --git a/akka-http-core/src/main/scala/akka/http/impl/util/SingletonException.scala b/akka-http-core/src/main/scala/akka/http/impl/util/SingletonException.scala index e29bf8c2d7..6e89f53b30 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/util/SingletonException.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/util/SingletonException.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.util diff --git a/akka-http-core/src/main/scala/akka/http/impl/util/SocketOptionSettings.scala b/akka-http-core/src/main/scala/akka/http/impl/util/SocketOptionSettings.scala index f07d29c182..ff8f238dce 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/util/SocketOptionSettings.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/util/SocketOptionSettings.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.util @@ -12,7 +12,7 @@ import akka.io.Inet.SocketOption import com.typesafe.config.Config private[http] object SocketOptionSettings { - def fromSubConfig(root: Config, c: Config): immutable.Traversable[SocketOption] = { + def fromSubConfig(root: Config, c: Config): immutable.Seq[SocketOption] = { def so[T](setting: String)(f: (Config, String) ⇒ T)(cons: T ⇒ SocketOption): List[SocketOption] = c.getString(setting) match { case "undefined" ⇒ Nil diff --git a/akka-http-core/src/main/scala/akka/http/impl/util/StreamUtils.scala b/akka-http-core/src/main/scala/akka/http/impl/util/StreamUtils.scala index 887032f78b..3778f1fcc0 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/util/StreamUtils.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/util/StreamUtils.scala @@ -1,14 +1,16 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.util import java.util.concurrent.atomic.{ AtomicBoolean, AtomicReference } +import akka.NotUsed import akka.http.scaladsl.model.RequestEntity import akka.stream._ import akka.stream.impl.StreamLayout.Module +import akka.stream.impl.fusing.GraphStages.SimpleLinearGraphStage import akka.stream.impl.{ PublisherSink, SinkModule, SourceModule } import akka.stream.scaladsl._ import akka.stream.stage._ @@ -51,7 +53,7 @@ private[http] object StreamUtils { def failedPublisher[T](ex: Throwable): Publisher[T] = impl.ErrorPublisher(ex, "failed").asInstanceOf[Publisher[T]] - def mapErrorTransformer(f: Throwable ⇒ Throwable): Flow[ByteString, ByteString, Unit] = { + def mapErrorTransformer(f: Throwable ⇒ Throwable): Flow[ByteString, ByteString, NotUsed] = { val transformer = new PushStage[ByteString, ByteString] { override def onPush(element: ByteString, ctx: Context[ByteString]): SyncDirective = ctx.push(element) @@ -79,7 +81,7 @@ private[http] object StreamUtils { source.transform(() ⇒ transformer) -> promise.future } - def sliceBytesTransformer(start: Long, length: Long): Flow[ByteString, ByteString, Unit] = { + def sliceBytesTransformer(start: Long, length: Long): Flow[ByteString, ByteString, NotUsed] = { val transformer = new StatefulStage[ByteString, ByteString] { def skipping = new State { @@ -113,41 +115,47 @@ private[http] object StreamUtils { Flow[ByteString].transform(() ⇒ transformer).named("sliceBytes") } - def limitByteChunksStage(maxBytesPerChunk: Int): PushPullStage[ByteString, ByteString] = - new StatefulStage[ByteString, ByteString] { - def initial = WaitingForData + def limitByteChunksStage(maxBytesPerChunk: Int): GraphStage[FlowShape[ByteString, ByteString]] = + new SimpleLinearGraphStage[ByteString] { + override def initialAttributes = Attributes.name("limitByteChunksStage") + var remaining = ByteString.empty - case object WaitingForData extends State { - def onPush(elem: ByteString, ctx: Context[ByteString]): SyncDirective = - if (elem.size <= maxBytesPerChunk) ctx.push(elem) - else { - become(DeliveringData(elem.drop(maxBytesPerChunk))) - ctx.push(elem.take(maxBytesPerChunk)) - } - } + override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { - case class DeliveringData(remaining: ByteString) extends State { - def onPush(elem: ByteString, ctx: Context[ByteString]): SyncDirective = - throw new IllegalStateException("Not expecting data") - - override def onPull(ctx: Context[ByteString]): SyncDirective = { + def splitAndPush(elem: ByteString): Unit = { val toPush = remaining.take(maxBytesPerChunk) val toKeep = remaining.drop(maxBytesPerChunk) + push(out, toPush) + remaining = toKeep + } + setHandlers(in, out, WaitingForData) - become { - if (toKeep.isEmpty) WaitingForData - else DeliveringData(toKeep) + case object WaitingForData extends InHandler with OutHandler { + override def onPush(): Unit = { + val elem = grab(in) + if (elem.size <= maxBytesPerChunk) push(out, elem) + else { + splitAndPush(elem) + setHandlers(in, out, DeliveringData) + } } - if (ctx.isFinishing) ctx.pushAndFinish(toPush) - else ctx.push(toPush) + override def onPull(): Unit = pull(in) } - } - override def onUpstreamFinish(ctx: Context[ByteString]): TerminationDirective = - current match { - case WaitingForData ⇒ ctx.finish() - case _: DeliveringData ⇒ ctx.absorbTermination() + case object DeliveringData extends InHandler() with OutHandler { + var finishing = false + override def onPush(): Unit = throw new IllegalStateException("Not expecting data") + override def onPull(): Unit = { + splitAndPush(remaining) + if (remaining.isEmpty) { + if (finishing) completeStage() else setHandlers(in, out, WaitingForData) + } + } + override def onUpstreamFinish(): Unit = if (remaining.isEmpty) completeStage() else finishing = true } + + override def toString = "limitByteChunksStage" + } } def mapEntityError(f: Throwable ⇒ Throwable): RequestEntity ⇒ RequestEntity = @@ -289,7 +297,7 @@ private[http] object StreamUtils { * Similar to Source.maybe but doesn't rely on materialization. Can only be used once. */ trait OneTimeValve { - def source[T]: Source[T, Unit] + def source[T]: Source[T, NotUsed] def open(): Unit } object OneTimeValve { @@ -297,7 +305,7 @@ private[http] object StreamUtils { val promise = Promise[Unit]() val _source = Source.fromFuture(promise.future).drop(1) // we are only interested in the completion event - def source[T]: Source[T, Unit] = _source.asInstanceOf[Source[T, Unit]] // safe, because source won't generate any elements + def source[T]: Source[T, NotUsed] = _source.asInstanceOf[Source[T, NotUsed]] // safe, because source won't generate any elements def open(): Unit = promise.success(()) } } diff --git a/akka-http-core/src/main/scala/akka/http/impl/util/Timestamp.scala b/akka-http-core/src/main/scala/akka/http/impl/util/Timestamp.scala index 1499901851..cd61dc976b 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/util/Timestamp.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/util/Timestamp.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.util diff --git a/akka-http-core/src/main/scala/akka/http/impl/util/package.scala b/akka-http-core/src/main/scala/akka/http/impl/util/package.scala index 1e69fa95b1..471aa3541b 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/util/package.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/util/package.scala @@ -1,15 +1,14 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl -import java.net.InetSocketAddress +import akka.NotUsed import language.implicitConversions import language.higherKinds import java.nio.charset.Charset -import java.util.concurrent.atomic.AtomicInteger import com.typesafe.config.Config import akka.stream.scaladsl.{ Flow, Source } import akka.stream.stage._ @@ -18,7 +17,6 @@ import scala.concurrent.{ Await, Future } import scala.reflect.ClassTag import scala.util.{ Failure, Success } import scala.util.matching.Regex -import akka.event.LoggingAdapter import akka.util.ByteString import akka.actor._ @@ -40,14 +38,12 @@ package object util { private[http] implicit def enhanceConfig(config: Config): EnhancedConfig = new EnhancedConfig(config) private[http] implicit def enhanceString_(s: String): EnhancedString = new EnhancedString(s) private[http] implicit def enhanceRegex(regex: Regex): EnhancedRegex = new EnhancedRegex(regex) - private[http] implicit def enhanceInetSocketAddress(address: InetSocketAddress): EnhancedInetSocketAddress = - new EnhancedInetSocketAddress(address) private[http] implicit def enhanceByteStrings(byteStrings: TraversableOnce[ByteString]): EnhancedByteStringTraversableOnce = new EnhancedByteStringTraversableOnce(byteStrings) private[http] implicit def enhanceByteStringsMat[Mat](byteStrings: Source[ByteString, Mat]): EnhancedByteStringSource[Mat] = new EnhancedByteStringSource(byteStrings) - private[http] def printEvent[T](marker: String): Flow[T, T, Unit] = + private[http] def printEvent[T](marker: String): Flow[T, T, NotUsed] = Flow[T].transform(() ⇒ new PushPullStage[T, T] { override def onPush(element: T, ctx: Context[T]): SyncDirective = { println(s"$marker: $element") @@ -75,7 +71,7 @@ package object util { private[http] def installEventStreamLoggerFor(channel: Class[_])(implicit system: ActorSystem): Unit = { synchronized { if (eventStreamLogger == null) - eventStreamLogger = system.actorOf(Props[util.EventStreamLogger].withDeploy(Deploy.local), name = "event-stream-logger") + eventStreamLogger = system.actorOf(Props[util.EventStreamLogger]().withDeploy(Deploy.local), name = "event-stream-logger") } system.eventStream.subscribe(eventStreamLogger, channel) } @@ -178,6 +174,4 @@ package util { } } } - - private[http] class ReadTheDocumentationException(message: String) extends RuntimeException(message) } diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/ConnectHttp.scala b/akka-http-core/src/main/scala/akka/http/javadsl/ConnectHttp.scala new file mode 100644 index 0000000000..680272a878 --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/javadsl/ConnectHttp.scala @@ -0,0 +1,105 @@ +/* + * Copyright (C) 2016 Typesafe Inc. + */ +package akka.http.javadsl + +import java.util.Locale +import java.util.Optional + +import akka.http.javadsl.model.Uri + +abstract class ConnectHttp { + def host: String + def port: Int + + def isHttps: Boolean + def connectionContext: Optional[HttpsConnectionContext] + + final def effectiveConnectionContext(fallbackContext: HttpsConnectionContext): HttpsConnectionContext = + connectionContext.orElse(fallbackContext) +} + +object ConnectHttp { + + // TODO may be optimised a bit to avoid parsing the Uri entirely for the known port cases + + /** Extracts host data from given Uri. */ + def toHost(uriHost: Uri): ConnectHttp = { + val s = uriHost.scheme.toLowerCase(Locale.ROOT) + if (s == "https") new ConnectHttpsImpl(uriHost.host.address, uriHost.port) + else new ConnectHttpImpl(uriHost.host.address, uriHost.port) + } + + def toHost(host: String): ConnectHttp = + toHost(Uri.create(host)) + + def toHost(host: String, port: Int): ConnectHttp = { + require(port > 0, "port must be > 0") + toHost(Uri.create(host).port(port)) + } + + /** + * Extracts host data from given Uri. + * Forces an HTTPS connection to the given host, using the default HTTPS context and default port. + */ + @throws(classOf[IllegalArgumentException]) + def toHostHttps(uriHost: Uri): ConnectWithHttps = { + val s = uriHost.scheme.toLowerCase(Locale.ROOT) + require(s == "" || s == "https", "toHostHttps used with non https scheme! Was: " + uriHost) + val httpsHost = uriHost.scheme("https") // for effective port calculation + new ConnectHttpsImpl(httpsHost.host.address, effectivePort(uriHost)) + } + + /** Forces an HTTPS connection to the given host, using the default HTTPS context and default port. */ + @throws(classOf[IllegalArgumentException]) + def toHostHttps(host: String): ConnectWithHttps = + toHostHttps(Uri.create(host)) + + /** Forces an HTTPS connection to the given host, using the default HTTPS context and given port. */ + @throws(classOf[IllegalArgumentException]) + def toHostHttps(host: String, port: Int): ConnectWithHttps = { + require(port > 0, "port must be > 0") + toHostHttps(Uri.create(host).port(port).host.address) + } + + private def effectivePort(uri: Uri): Int = { + val s = uri.scheme.toLowerCase(Locale.ROOT) + effectivePort(s, -1) + } + + private def effectivePort(scheme: String, port: Int): Int = { + val s = scheme.toLowerCase(Locale.ROOT) + if (port > 0) port + else if (s == "https" || s == "wss") 443 + else if (s == "http" || s == "ws") 80 + else throw new IllegalArgumentException("Scheme is not http/https/ws/wss and no port given!") + } + +} + +abstract class ConnectWithHttps extends ConnectHttp { + def withCustomHttpsContext(context: HttpsConnectionContext): ConnectWithHttps + def withDefaultHttpsContext(): ConnectWithHttps +} + +/** INTERNAL API */ +final class ConnectHttpImpl(val host: String, val port: Int) extends ConnectHttp { + def isHttps: Boolean = false + + def connectionContext: Optional[HttpsConnectionContext] = Optional.empty() +} + +final class ConnectHttpsImpl(val host: String, val port: Int, val context: Optional[HttpsConnectionContext] = Optional.empty()) + extends ConnectWithHttps { + + override def isHttps: Boolean = true + + override def withCustomHttpsContext(context: HttpsConnectionContext): ConnectWithHttps = + new ConnectHttpsImpl(host, port, Optional.of(context)) + + override def withDefaultHttpsContext(): ConnectWithHttps = + new ConnectHttpsImpl(host, port, Optional.empty()) + + override def connectionContext: Optional[HttpsConnectionContext] = context + +} diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/ConnectionContext.scala b/akka-http-core/src/main/scala/akka/http/javadsl/ConnectionContext.scala new file mode 100644 index 0000000000..12f660fe36 --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/javadsl/ConnectionContext.scala @@ -0,0 +1,56 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ +package akka.http.javadsl + +import java.util.{ Collection ⇒ JCollection, Optional } +import javax.net.ssl.{ SSLContext, SSLParameters } +import akka.http.scaladsl +import akka.stream.io.ClientAuth + +import scala.compat.java8.OptionConverters + +object ConnectionContext { + //#https-context-creation + /** Used to serve HTTPS traffic. */ + def https(sslContext: SSLContext): HttpsConnectionContext = + scaladsl.ConnectionContext.https(sslContext) + + /** Used to serve HTTPS traffic. */ + def https(sslContext: SSLContext, enabledCipherSuites: Optional[JCollection[String]], + enabledProtocols: Optional[JCollection[String]], clientAuth: Optional[ClientAuth], sslParameters: Optional[SSLParameters]) = + scaladsl.ConnectionContext.https(sslContext, sslParameters = OptionConverters.toScala(sslParameters)) + //#https-context-creation + + /** Used to serve HTTP traffic. */ + def noEncryption(): HttpConnectionContext = + scaladsl.ConnectionContext.noEncryption() +} + +abstract class ConnectionContext { + def isSecure: Boolean + /** Java API */ + def getDefaultPort: Int +} + +abstract class HttpConnectionContext extends akka.http.javadsl.ConnectionContext { + override final def isSecure = false + override final def getDefaultPort = 80 +} + +abstract class HttpsConnectionContext extends akka.http.javadsl.ConnectionContext { + override final def isSecure = true + override final def getDefaultPort = 443 + + /** Java API */ + def getEnabledCipherSuites: Optional[JCollection[String]] + /** Java API */ + def getEnabledProtocols: Optional[JCollection[String]] + /** Java API */ + def getClientAuth: Optional[ClientAuth] + + /** Java API */ + def getSslContext: SSLContext + /** Java API */ + def getSslParameters: Optional[SSLParameters] +} diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/HostConnectionPool.scala b/akka-http-core/src/main/scala/akka/http/javadsl/HostConnectionPool.scala index b0d4e866ff..7fdd8ec939 100644 --- a/akka-http-core/src/main/scala/akka/http/javadsl/HostConnectionPool.scala +++ b/akka-http-core/src/main/scala/akka/http/javadsl/HostConnectionPool.scala @@ -1,10 +1,10 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl -import akka.http.HostConnectionPoolSetup +import akka.http.impl.settings.HostConnectionPoolSetup trait HostConnectionPool { def setup: HostConnectionPoolSetup diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/Http.scala b/akka-http-core/src/main/scala/akka/http/javadsl/Http.scala index fb9b3446e9..7a7d0a1b8e 100644 --- a/akka-http-core/src/main/scala/akka/http/javadsl/Http.scala +++ b/akka-http-core/src/main/scala/akka/http/javadsl/Http.scala @@ -1,29 +1,34 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl import java.net.InetSocketAddress +import java.util.Optional +import akka.http.impl.settings.HostConnectionPoolSetup import akka.http.impl.util.JavaMapping +import akka.http.impl.util.JavaMapping.HttpsConnectionContext import akka.http.javadsl.model.ws._ -import akka.stream +import akka.http.javadsl.settings.{ ConnectionPoolSettings, ClientConnectionSettings, ServerSettings } +import akka.{ NotUsed, stream } import akka.stream.io.{ SslTlsInbound, SslTlsOutbound } - import scala.language.implicitConversions import scala.concurrent.Future import scala.util.Try import akka.stream.scaladsl.Keep -import akka.japi.{ Pair, Option, Function } +import akka.japi.{ Pair, Function } import akka.actor.{ ExtendedActorSystem, ActorSystem, ExtensionIdProvider, ExtensionId } import akka.event.LoggingAdapter import akka.stream.Materializer import akka.stream.javadsl.{ BidiFlow, Flow, Source } - import akka.http.impl.util.JavaMapping.Implicits._ import akka.http.scaladsl.{ model ⇒ sm } import akka.http.javadsl.model._ import akka.http._ +import scala.compat.java8.OptionConverters._ +import scala.compat.java8.FutureConverters._ +import java.util.concurrent.CompletionStage object Http extends ExtensionId[Http] with ExtensionIdProvider { override def get(system: ActorSystem): Http = super.get(system) @@ -34,35 +39,36 @@ object Http extends ExtensionId[Http] with ExtensionIdProvider { class Http(system: ExtendedActorSystem) extends akka.actor.Extension { import akka.dispatch.ExecutionContexts.{ sameThreadExecutionContext ⇒ ec } + import language.implicitConversions + private implicit def completionStageCovariant[T, U >: T](in: CompletionStage[T]): CompletionStage[U] = in.asInstanceOf[CompletionStage[U]] + private implicit def javaModelIsScalaModel[J <: AnyRef, S <: J](in: Future[J])(implicit ev: JavaMapping.Inherited[J, S]): Future[S] = in.asInstanceOf[Future[S]] + private lazy val delegate = akka.http.scaladsl.Http(system) - private implicit def convertHttpsContext(hctx: Option[HttpsContext]) = - hctx.map(_.asInstanceOf[akka.http.scaladsl.HttpsContext]) - /** - * Constructs a server layer stage using the configured default [[ServerSettings]]. The returned [[BidiFlow]] isn't + * Constructs a server layer stage using the configured default [[akka.http.javadsl.settings.ServerSettings]]. The returned [[BidiFlow]] isn't * reusable and can only be materialized once. */ - def serverLayer(materializer: Materializer): BidiFlow[HttpResponse, SslTlsOutbound, SslTlsInbound, HttpRequest, Unit] = + def serverLayer(materializer: Materializer): BidiFlow[HttpResponse, SslTlsOutbound, SslTlsInbound, HttpRequest, NotUsed] = adaptServerLayer(delegate.serverLayer()(materializer)) /** - * Constructs a server layer stage using the given [[ServerSettings]]. The returned [[BidiFlow]] isn't reusable and + * Constructs a server layer stage using the given [[akka.http.javadsl.settings.ServerSettings]]. The returned [[BidiFlow]] isn't reusable and * can only be materialized once. */ def serverLayer(settings: ServerSettings, - materializer: Materializer): BidiFlow[HttpResponse, SslTlsOutbound, SslTlsInbound, HttpRequest, Unit] = - adaptServerLayer(delegate.serverLayer(settings)(materializer)) + materializer: Materializer): BidiFlow[HttpResponse, SslTlsOutbound, SslTlsInbound, HttpRequest, NotUsed] = + adaptServerLayer(delegate.serverLayer(settings.asScala)(materializer)) /** - * Constructs a server layer stage using the given [[ServerSettings]]. The returned [[BidiFlow]] isn't reusable and + * Constructs a server layer stage using the given [[akka.http.javadsl.settings.ServerSettings]]. The returned [[BidiFlow]] isn't reusable and * can only be materialized once. The `remoteAddress`, if provided, will be added as a header to each [[HttpRequest]] * this layer produces if the `akka.http.server.remote-address-header` configuration option is enabled. */ def serverLayer(settings: ServerSettings, - remoteAddress: Option[InetSocketAddress], - materializer: Materializer): BidiFlow[HttpResponse, SslTlsOutbound, SslTlsInbound, HttpRequest, Unit] = - adaptServerLayer(delegate.serverLayer(settings, remoteAddress.asScala)(materializer)) + remoteAddress: Optional[InetSocketAddress], + materializer: Materializer): BidiFlow[HttpResponse, SslTlsOutbound, SslTlsInbound, HttpRequest, NotUsed] = + adaptServerLayer(delegate.serverLayer(settings.asScala, remoteAddress.asScala)(materializer)) /** * Constructs a server layer stage using the given [[ServerSettings]]. The returned [[BidiFlow]] isn't reusable and @@ -70,10 +76,10 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * this layer produces if the `akka.http.server.remote-address-header` configuration option is enabled. */ def serverLayer(settings: ServerSettings, - remoteAddress: Option[InetSocketAddress], + remoteAddress: Optional[InetSocketAddress], log: LoggingAdapter, - materializer: Materializer): BidiFlow[HttpResponse, SslTlsOutbound, SslTlsInbound, HttpRequest, Unit] = - adaptServerLayer(delegate.serverLayer(settings, remoteAddress.asScala, log)(materializer)) + materializer: Materializer): BidiFlow[HttpResponse, SslTlsOutbound, SslTlsInbound, HttpRequest, NotUsed] = + adaptServerLayer(delegate.serverLayer(settings.asScala, remoteAddress.asScala, log)(materializer)) /** * Creates a [[Source]] of [[IncomingConnection]] instances which represents a prospective HTTP server binding @@ -85,29 +91,70 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * fail, unless the first materialization has already been unbound. Unbinding can be triggered via the materialized * [[ServerBinding]]. */ - def bind(interface: String, port: Int, materializer: Materializer): Source[IncomingConnection, Future[ServerBinding]] = + def bind(interface: String, port: Int, materializer: Materializer): Source[IncomingConnection, CompletionStage[ServerBinding]] = new Source(delegate.bind(interface, port)(materializer) .map(new IncomingConnection(_)) - .mapMaterializedValue(_.map(new ServerBinding(_))(ec))) + .mapMaterializedValue(_.map(new ServerBinding(_))(ec).toJava)) /** * Creates a [[Source]] of [[IncomingConnection]] instances which represents a prospective HTTP server binding * on the given `endpoint`. + * * If the given port is 0 the resulting source can be materialized several times. Each materialization will * then be assigned a new local port by the operating system, which can then be retrieved by the materialized * [[ServerBinding]]. + * * If the given port is non-zero subsequent materialization attempts of the produced source will immediately * fail, unless the first materialization has already been unbound. Unbinding can be triggered via the materialized * [[ServerBinding]]. */ def bind(interface: String, port: Int, + connectionContext: ConnectionContext, settings: ServerSettings, - httpsContext: Option[HttpsContext], - log: LoggingAdapter, - materializer: Materializer): Source[IncomingConnection, Future[ServerBinding]] = - new Source(delegate.bind(interface, port, settings, httpsContext, log)(materializer) + materializer: Materializer): Source[IncomingConnection, CompletionStage[ServerBinding]] = + new Source(delegate.bind(interface, port, settings = settings.asScala, connectionContext = ConnectionContext.noEncryption().asScala)(materializer) .map(new IncomingConnection(_)) - .mapMaterializedValue(_.map(new ServerBinding(_))(ec))) + .mapMaterializedValue(_.map(new ServerBinding(_))(ec).toJava)) + + /** + * Creates a [[Source]] of [[IncomingConnection]] instances which represents a prospective HTTP server binding + * on the given `endpoint`. + * + * If the given port is 0 the resulting source can be materialized several times. Each materialization will + * then be assigned a new local port by the operating system, which can then be retrieved by the materialized + * [[ServerBinding]]. + * + * If the given port is non-zero subsequent materialization attempts of the produced source will immediately + * fail, unless the first materialization has already been unbound. Unbinding can be triggered via the materialized + * [[ServerBinding]]. + */ + def bind(interface: String, port: Int, + connectionContext: ConnectionContext, + materializer: Materializer): Source[IncomingConnection, CompletionStage[ServerBinding]] = + new Source(delegate.bind(interface, port, connectionContext = connectionContext.asScala)(materializer) + .map(new IncomingConnection(_)) + .mapMaterializedValue(_.map(new ServerBinding(_))(ec).toJava)) + + /** + * Creates a [[Source]] of [[IncomingConnection]] instances which represents a prospective HTTP server binding + * on the given `endpoint`. + * + * If the given port is 0 the resulting source can be materialized several times. Each materialization will + * then be assigned a new local port by the operating system, which can then be retrieved by the materialized + * [[ServerBinding]]. + * + * If the given port is non-zero subsequent materialization attempts of the produced source will immediately + * fail, unless the first materialization has already been unbound. Unbinding can be triggered via the materialized + * [[ServerBinding]]. + */ + def bind(interface: String, port: Int, + connectionContext: ConnectionContext, + settings: ServerSettings, + log: LoggingAdapter, + materializer: Materializer): Source[IncomingConnection, CompletionStage[ServerBinding]] = + new Source(delegate.bind(interface, port, ConnectionContext.noEncryption().asScala, settings.asScala, log)(materializer) + .map(new IncomingConnection(_)) + .mapMaterializedValue(_.map(new ServerBinding(_))(ec).toJava)) /** * Convenience method which starts a new HTTP server at the given endpoint and uses the given `handler` @@ -118,10 +165,25 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { */ def bindAndHandle(handler: Flow[HttpRequest, HttpResponse, _], interface: String, port: Int, - materializer: Materializer): Future[ServerBinding] = + materializer: Materializer): CompletionStage[ServerBinding] = delegate.bindAndHandle(handler.asInstanceOf[Flow[sm.HttpRequest, sm.HttpResponse, _]].asScala, interface, port)(materializer) - .map(new ServerBinding(_))(ec) + .map(new ServerBinding(_))(ec).toJava + + /** + * Convenience method which starts a new HTTP server at the given endpoint and uses the given `handler` + * [[Flow]] for processing all incoming connections. + * + * The number of concurrently accepted connections can be configured by overriding + * the `akka.http.server.max-connections` setting. + */ + def bindAndHandle(handler: Flow[HttpRequest, HttpResponse, _], + interface: String, port: Int, + connectionContext: ConnectionContext, + materializer: Materializer): CompletionStage[ServerBinding] = + delegate.bindAndHandle(handler.asInstanceOf[Flow[sm.HttpRequest, sm.HttpResponse, _]].asScala, + interface, port, connectionContext.asScala)(materializer) + .map(new ServerBinding(_))(ec).toJava /** * Convenience method which starts a new HTTP server at the given endpoint and uses the given `handler` @@ -133,12 +195,12 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { def bindAndHandle(handler: Flow[HttpRequest, HttpResponse, _], interface: String, port: Int, settings: ServerSettings, - httpsContext: Option[HttpsContext], + connectionContext: ConnectionContext, log: LoggingAdapter, - materializer: Materializer): Future[ServerBinding] = + materializer: Materializer): CompletionStage[ServerBinding] = delegate.bindAndHandle(handler.asInstanceOf[Flow[sm.HttpRequest, sm.HttpResponse, _]].asScala, - interface, port, settings, httpsContext, log)(materializer) - .map(new ServerBinding(_))(ec) + interface, port, connectionContext.asScala, settings.asScala, log)(materializer) + .map(new ServerBinding(_))(ec).toJava /** * Convenience method which starts a new HTTP server at the given endpoint and uses the given `handler` @@ -149,9 +211,23 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { */ def bindAndHandleSync(handler: Function[HttpRequest, HttpResponse], interface: String, port: Int, - materializer: Materializer): Future[ServerBinding] = + materializer: Materializer): CompletionStage[ServerBinding] = delegate.bindAndHandleSync(handler.apply(_).asScala, interface, port)(materializer) - .map(new ServerBinding(_))(ec) + .map(new ServerBinding(_))(ec).toJava + + /** + * Convenience method which starts a new HTTP server at the given endpoint and uses the given `handler` + * [[Flow]] for processing all incoming connections. + * + * The number of concurrently accepted connections can be configured by overriding + * the `akka.http.server.max-connections` setting. + */ + def bindAndHandleSync(handler: Function[HttpRequest, HttpResponse], + interface: String, port: Int, + connectionContext: ConnectionContext, + materializer: Materializer): CompletionStage[ServerBinding] = + delegate.bindAndHandleSync(handler.apply(_).asScala, interface, port, connectionContext.asScala)(materializer) + .map(new ServerBinding(_))(ec).toJava /** * Convenience method which starts a new HTTP server at the given endpoint and uses the given `handler` @@ -163,12 +239,12 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { def bindAndHandleSync(handler: Function[HttpRequest, HttpResponse], interface: String, port: Int, settings: ServerSettings, - httpsContext: Option[HttpsContext], + connectionContext: ConnectionContext, log: LoggingAdapter, - materializer: Materializer): Future[ServerBinding] = + materializer: Materializer): CompletionStage[ServerBinding] = delegate.bindAndHandleSync(handler.apply(_).asScala, - interface, port, settings, httpsContext, log)(materializer) - .map(new ServerBinding(_))(ec) + interface, port, connectionContext.asScala, settings.asScala, log)(materializer) + .map(new ServerBinding(_))(ec).toJava /** * Convenience method which starts a new HTTP server at the given endpoint and uses the given `handler` @@ -177,11 +253,11 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * The number of concurrently accepted connections can be configured by overriding * the `akka.http.server.max-connections` setting. */ - def bindAndHandleAsync(handler: Function[HttpRequest, Future[HttpResponse]], + def bindAndHandleAsync(handler: Function[HttpRequest, CompletionStage[HttpResponse]], interface: String, port: Int, - materializer: Materializer): Future[ServerBinding] = - delegate.bindAndHandleAsync(handler.apply(_).asInstanceOf[Future[sm.HttpResponse]], interface, port)(materializer) - .map(new ServerBinding(_))(ec) + materializer: Materializer): CompletionStage[ServerBinding] = + delegate.bindAndHandleAsync(handler.apply(_).toScala, interface, port)(materializer) + .map(new ServerBinding(_))(ec).toJava /** * Convenience method which starts a new HTTP server at the given endpoint and uses the given `handler` @@ -190,68 +266,69 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * The number of concurrently accepted connections can be configured by overriding * the `akka.http.server.max-connections` setting. */ - def bindAndHandleAsync(handler: Function[HttpRequest, Future[HttpResponse]], + def bindAndHandleAsync(handler: Function[HttpRequest, CompletionStage[HttpResponse]], interface: String, port: Int, - settings: ServerSettings, httpsContext: Option[HttpsContext], + connectionContext: ConnectionContext, + materializer: Materializer): CompletionStage[ServerBinding] = + delegate.bindAndHandleAsync(handler.apply(_).toScala, interface, port, connectionContext.asScala)(materializer) + .map(new ServerBinding(_))(ec).toJava + + /** + * Convenience method which starts a new HTTP server at the given endpoint and uses the given `handler` + * [[Flow]] for processing all incoming connections. + * + * The number of concurrently accepted connections can be configured by overriding + * the `akka.http.server.max-connections` setting. + */ + def bindAndHandleAsync(handler: Function[HttpRequest, CompletionStage[HttpResponse]], + interface: String, port: Int, + settings: ServerSettings, connectionContext: ConnectionContext, parallelism: Int, log: LoggingAdapter, - materializer: Materializer): Future[ServerBinding] = - delegate.bindAndHandleAsync(handler.apply(_).asInstanceOf[Future[sm.HttpResponse]], - interface, port, settings, httpsContext, parallelism, log)(materializer) - .map(new ServerBinding(_))(ec) + materializer: Materializer): CompletionStage[ServerBinding] = + delegate.bindAndHandleAsync(handler.apply(_).toScala, + interface, port, connectionContext.asScala, settings.asScala, parallelism, log)(materializer) + .map(new ServerBinding(_))(ec).toJava /** - * Constructs a client layer stage using the configured default [[ClientConnectionSettings]]. + * Constructs a client layer stage using the configured default [[akka.http.javadsl.settings.ClientConnectionSettings]]. */ - def clientLayer(hostHeader: headers.Host): BidiFlow[HttpRequest, SslTlsOutbound, SslTlsInbound, HttpResponse, Unit] = + def clientLayer(hostHeader: headers.Host): BidiFlow[HttpRequest, SslTlsOutbound, SslTlsInbound, HttpResponse, NotUsed] = adaptClientLayer(delegate.clientLayer(JavaMapping.toScala(hostHeader))) /** - * Constructs a client layer stage using the given [[ClientConnectionSettings]]. + * Constructs a client layer stage using the given [[akka.http.javadsl.settings.ClientConnectionSettings]]. */ def clientLayer(hostHeader: headers.Host, - settings: ClientConnectionSettings): BidiFlow[HttpRequest, SslTlsOutbound, SslTlsInbound, HttpResponse, Unit] = - adaptClientLayer(delegate.clientLayer(JavaMapping.toScala(hostHeader), settings)) + settings: ClientConnectionSettings): BidiFlow[HttpRequest, SslTlsOutbound, SslTlsInbound, HttpResponse, NotUsed] = + adaptClientLayer(delegate.clientLayer(JavaMapping.toScala(hostHeader), settings.asScala)) /** * Constructs a client layer stage using the given [[ClientConnectionSettings]]. */ def clientLayer(hostHeader: headers.Host, settings: ClientConnectionSettings, - log: LoggingAdapter): BidiFlow[HttpRequest, SslTlsOutbound, SslTlsInbound, HttpResponse, Unit] = - adaptClientLayer(delegate.clientLayer(JavaMapping.toScala(hostHeader), settings, log)) + log: LoggingAdapter): BidiFlow[HttpRequest, SslTlsOutbound, SslTlsInbound, HttpResponse, NotUsed] = + adaptClientLayer(delegate.clientLayer(JavaMapping.toScala(hostHeader), settings.asScala, log)) /** * Creates a [[Flow]] representing a prospective HTTP client connection to the given endpoint. * Every materialization of the produced flow will attempt to establish a new outgoing connection. + * + * If the hostname is given with an `https://` prefix, the default [[HttpsConnectionContext]] will be used. */ - def outgoingConnection(host: String): Flow[HttpRequest, HttpResponse, Future[OutgoingConnection]] = - outgoingConnection(host, 80) - - /** - * Same as [[outgoingConnection]] but with HTTPS encryption. - */ - def outgoingConnectionTls(host: String): Flow[HttpRequest, HttpResponse, Future[OutgoingConnection]] = - outgoingConnectionTls(host, 443) + def outgoingConnection(host: String): Flow[HttpRequest, HttpResponse, CompletionStage[OutgoingConnection]] = + outgoingConnection(ConnectHttp.toHost(host)) /** * Creates a [[Flow]] representing a prospective HTTP client connection to the given endpoint. * Every materialization of the produced flow will attempt to establish a new outgoing connection. + * + * Use the [[ConnectHttp]] DSL to configure target host and whether HTTPS should be used. */ - def outgoingConnection(host: String, port: Int): Flow[HttpRequest, HttpResponse, Future[OutgoingConnection]] = - Flow.fromGraph { - akka.stream.scaladsl.Flow[HttpRequest].map(_.asScala) - .viaMat(delegate.outgoingConnection(host, port))(Keep.right) - .mapMaterializedValue(_.map(new OutgoingConnection(_))(ec)) - } - - /** - * Same as [[outgoingConnection]] but with HTTPS encryption. - */ - def outgoingConnectionTls(host: String, port: Int): Flow[HttpRequest, HttpResponse, Future[OutgoingConnection]] = - Flow.fromGraph { - akka.stream.scaladsl.Flow[HttpRequest].map(_.asScala) - .viaMat(delegate.outgoingConnectionTls(host, port))(Keep.right) - .mapMaterializedValue(_.map(new OutgoingConnection(_))(ec)) + def outgoingConnection(to: ConnectHttp): Flow[HttpRequest, HttpResponse, CompletionStage[OutgoingConnection]] = + adaptOutgoingFlow { + if (to.isHttps) delegate.outgoingConnectionHttps(to.host, to.port, to.effectiveConnectionContext(defaultClientHttpsContext).asScala) + else delegate.outgoingConnection(to.host, to.port) } /** @@ -259,31 +336,15 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * Every materialization of the produced flow will attempt to establish a new outgoing connection. */ def outgoingConnection(host: String, port: Int, - localAddress: Option[InetSocketAddress], + connectionContext: ConnectionContext, + localAddress: Optional[InetSocketAddress], settings: ClientConnectionSettings, - log: LoggingAdapter): Flow[HttpRequest, HttpResponse, Future[OutgoingConnection]] = - Flow.fromGraph { - akka.stream.scaladsl.Flow[HttpRequest].map(_.asScala) - .viaMat(delegate.outgoingConnection(host, port, localAddress.asScala, settings, log))(Keep.right) - .mapMaterializedValue(_.map(new OutgoingConnection(_))(ec)) - } - - /** - * Same as [[outgoingConnection]] but with HTTPS encryption. - * - * If an explicit [[HttpsContext]] is given then it rather than the configured default [[HttpsContext]] will be used - * for encryption on the connection. - */ - def outgoingConnectionTls(host: String, port: Int, - localAddress: Option[InetSocketAddress], - settings: ClientConnectionSettings, - httpsContext: Option[HttpsContext], - log: LoggingAdapter): Flow[HttpRequest, HttpResponse, Future[OutgoingConnection]] = - Flow.fromGraph { - akka.stream.scaladsl.Flow[HttpRequest].map(_.asScala) - .viaMat(delegate.outgoingConnectionTls(host, port, localAddress.asScala, settings, - httpsContext.map(_.asInstanceOf[akka.http.scaladsl.HttpsContext]), log))(Keep.right) - .mapMaterializedValue(_.map(new OutgoingConnection(_))(ec)) + log: LoggingAdapter): Flow[HttpRequest, HttpResponse, CompletionStage[OutgoingConnection]] = + adaptOutgoingFlow { + connectionContext match { + case https: HttpsConnectionContext ⇒ delegate.outgoingConnectionHttps(host, port, https.asScala, localAddress.asScala, settings.asScala, log) + case _ ⇒ delegate.outgoingConnection(host, port, localAddress.asScala, settings.asScala, log) + } } /** @@ -300,63 +361,40 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * In order to allow for easy response-to-request association the flow takes in a custom, opaque context * object of type `T` from the application which is emitted together with the corresponding response. */ - def newHostConnectionPool[T](host: String, port: Int, materializer: Materializer): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], HostConnectionPool] = - adaptTupleFlow(delegate.newHostConnectionPool[T](host, port)(materializer)) + def newHostConnectionPool[T](host: String, materializer: Materializer): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], HostConnectionPool] = + newHostConnectionPool[T](ConnectHttp.toHost(host), materializer) + + /** + * Starts a new connection pool to the given host and configuration and returns a [[Flow]] which dispatches + * the requests from all its materializations across this pool. + * While the started host connection pool internally shuts itself down automatically after the configured idle + * timeout it will spin itself up again if more requests arrive from an existing or a new client flow + * materialization. The returned flow therefore remains usable for the full lifetime of the application. + * + * Since the underlying transport usually comprises more than a single connection the produced flow might generate + * responses in an order that doesn't directly match the consumed requests. + * For example, if two requests A and B enter the flow in that order the response for B might be produced before the + * response for A. + * In order to allow for easy response-to-request association the flow takes in a custom, opaque context + * object of type `T` from the application which is emitted together with the corresponding response. + */ + def newHostConnectionPool[T](to: ConnectHttp, materializer: Materializer): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], HostConnectionPool] = + adaptTupleFlow(delegate.newHostConnectionPool[T](to.host, to.port)(materializer)) /** * Same as [[newHostConnectionPool]] but with HTTPS encryption. - */ - def newHostConnectionPoolTls[T](host: String, port: Int, materializer: Materializer): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], HostConnectionPool] = - adaptTupleFlow(delegate.newHostConnectionPoolTls[T](host, port)(materializer)) - - /** - * Starts a new connection pool to the given host and configuration and returns a [[Flow]] which dispatches - * the requests from all its materializations across this pool. - * While the started host connection pool internally shuts itself down automatically after the configured idle - * timeout it will spin itself up again if more requests arrive from an existing or a new client flow - * materialization. The returned flow therefore remains usable for the full lifetime of the application. * - * Since the underlying transport usually comprises more than a single connection the produced flow might generate - * responses in an order that doesn't directly match the consumed requests. - * For example, if two requests A and B enter the flow in that order the response for B might be produced before the - * response for A. - * In order to allow for easy response-to-request association the flow takes in a custom, opaque context - * object of type `T` from the application which is emitted together with the corresponding response. + * The given [[ConnectionContext]] will be used for encryption on the connection. */ - def newHostConnectionPool[T](host: String, port: Int, + def newHostConnectionPool[T](to: ConnectHttp, settings: ConnectionPoolSettings, log: LoggingAdapter, materializer: Materializer): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], HostConnectionPool] = - adaptTupleFlow(delegate.newHostConnectionPool[T](host, port, settings, log)(materializer)) - - /** - * Same as [[newHostConnectionPool]] but with HTTPS encryption. - * - * If an explicit [[HttpsContext]] is given then it rather than the configured default [[HttpsContext]] will be used - * for encryption on the connection. - */ - def newHostConnectionPoolTls[T](host: String, port: Int, - settings: ConnectionPoolSettings, - httpsContext: Option[HttpsContext], - log: LoggingAdapter, materializer: Materializer): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], HostConnectionPool] = - adaptTupleFlow(delegate.newHostConnectionPoolTls[T](host, port, settings, - httpsContext.map(_.asInstanceOf[akka.http.scaladsl.HttpsContext]), log)(materializer)) - - /** - * Starts a new connection pool to the given host and configuration and returns a [[Flow]] which dispatches - * the requests from all its materializations across this pool. - * While the started host connection pool internally shuts itself down automatically after the configured idle - * timeout it will spin itself up again if more requests arrive from an existing or a new client flow - * materialization. The returned flow therefore remains usable for the full lifetime of the application. - * - * Since the underlying transport usually comprises more than a single connection the produced flow might generate - * responses in an order that doesn't directly match the consumed requests. - * For example, if two requests A and B enter the flow in that order the response for B might be produced before the - * response for A. - * In order to allow for easy response-to-request association the flow takes in a custom, opaque context - * object of type `T` from the application which is emitted together with the corresponding response. - */ - def newHostConnectionPool[T](setup: HostConnectionPoolSetup, materializer: Materializer): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], HostConnectionPool] = - adaptTupleFlow(delegate.newHostConnectionPool[T](setup)(materializer)) + adaptTupleFlow { + to.effectiveConnectionContext(defaultClientHttpsContext) match { + case https: HttpsConnectionContext ⇒ delegate.newHostConnectionPoolHttps[T](to.host, to.port, https.asScala, settings.asScala, log)(materializer) + case _ ⇒ delegate.newHostConnectionPool[T](to.host, to.port, settings.asScala, log)(materializer) + } + } /** * Returns a [[Flow]] which dispatches incoming HTTP requests to the per-ActorSystem pool of outgoing @@ -375,14 +413,8 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * In order to allow for easy response-to-request association the flow takes in a custom, opaque context * object of type `T` from the application which is emitted together with the corresponding response. */ - def cachedHostConnectionPool[T](host: String, port: Int, materializer: Materializer): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], HostConnectionPool] = - adaptTupleFlow(delegate.cachedHostConnectionPool[T](host, port)(materializer)) - - /** - * Same as [[cachedHostConnectionPool]] but with HTTPS encryption. - */ - def cachedHostConnectionPoolTls[T](host: String, port: Int, materializer: Materializer): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], HostConnectionPool] = - adaptTupleFlow(delegate.cachedHostConnectionPoolTls[T](host, port)(materializer)) + def cachedHostConnectionPool[T](host: String, materializer: Materializer): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], HostConnectionPool] = + cachedHostConnectionPool(ConnectHttp.toHost(host), materializer) /** * Returns a [[Flow]] which dispatches incoming HTTP requests to the per-ActorSystem pool of outgoing @@ -401,43 +433,18 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * In order to allow for easy response-to-request association the flow takes in a custom, opaque context * object of type `T` from the application which is emitted together with the corresponding response. */ - def cachedHostConnectionPool[T](host: String, port: Int, - settings: ConnectionPoolSettings, - log: LoggingAdapter, materializer: Materializer): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], HostConnectionPool] = - adaptTupleFlow(delegate.cachedHostConnectionPool[T](host, port, settings, log)(materializer)) + def cachedHostConnectionPool[T](to: ConnectHttp, materializer: Materializer): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], HostConnectionPool] = + adaptTupleFlow(delegate.cachedHostConnectionPool[T](to.host, to.port)(materializer)) /** * Same as [[cachedHostConnectionPool]] but with HTTPS encryption. * - * If an explicit [[HttpsContext]] is given then it rather than the configured default [[HttpsContext]] will be used - * for encryption on the connection. + * The given [[ConnectionContext]] will be used for encryption on the connection. */ - def cachedHostConnectionPoolTls[T](host: String, port: Int, - settings: ConnectionPoolSettings, - httpsContext: Option[HttpsContext], - log: LoggingAdapter, materializer: Materializer): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], HostConnectionPool] = - adaptTupleFlow(delegate.cachedHostConnectionPoolTls[T](host, port, settings, - httpsContext.map(_.asInstanceOf[akka.http.scaladsl.HttpsContext]), log)(materializer)) - - /** - * Returns a [[Flow]] which dispatches incoming HTTP requests to the per-ActorSystem pool of outgoing - * HTTP connections to the given target host endpoint. For every ActorSystem, target host and pool - * configuration a separate connection pool is maintained. - * The HTTP layer transparently manages idle shutdown and restarting of connections pools as configured. - * The returned [[Flow]] instances therefore remain valid throughout the lifetime of the application. - * - * The internal caching logic guarantees that there will never be more than a single pool running for the - * given target host endpoint and configuration (in this ActorSystem). - * - * Since the underlying transport usually comprises more than a single connection the produced flow might generate - * responses in an order that doesn't directly match the consumed requests. - * For example, if two requests A and B enter the flow in that order the response for B might be produced before the - * response for A. - * In order to allow for easy response-to-request association the flow takes in a custom, opaque context - * object of type `T` from the application which is emitted together with the corresponding response. - */ - def cachedHostConnectionPool[T](setup: HostConnectionPoolSetup, materializer: Materializer): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], HostConnectionPool] = - adaptTupleFlow(delegate.cachedHostConnectionPool[T](setup)(materializer)) + def cachedHostConnectionPool[T](to: ConnectHttp, + settings: ConnectionPoolSettings, + log: LoggingAdapter, materializer: Materializer): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], HostConnectionPool] = + adaptTupleFlow(delegate.cachedHostConnectionPoolHttps[T](to.host, to.port, to.effectiveConnectionContext(defaultClientHttpsContext).asScala, settings.asScala, log)(materializer)) /** * Creates a new "super connection pool flow", which routes incoming requests to a (cached) host connection pool @@ -446,12 +453,12 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * * Since the underlying transport usually comprises more than a single connection the produced flow might generate * responses in an order that doesn't directly match the consumed requests. - * For example, if two requests A and B enter the flow in that order the response for B might be produced before the - * response for A. + * For example, if two requests `A` and `B` enter the flow in that order the response for `B` might be produced before the + * response for `A`. * In order to allow for easy response-to-request association the flow takes in a custom, opaque context * object of type `T` from the application which is emitted together with the corresponding response. */ - def superPool[T](materializer: Materializer): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], Unit] = + def superPool[T](materializer: Materializer): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], NotUsed] = adaptTupleFlow(delegate.superPool[T]()(materializer)) /** @@ -459,131 +466,182 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * depending on their respective effective URIs. Note that incoming requests must have either an absolute URI or * a valid `Host` header. * - * If an explicit [[HttpsContext]] is given then it rather than the configured default [[HttpsContext]] will be used - * for setting up the HTTPS connection pool, if required. + * The given [[HttpsConnectionContext]] is used to configure TLS for the connection. * * Since the underlying transport usually comprises more than a single connection the produced flow might generate * responses in an order that doesn't directly match the consumed requests. - * For example, if two requests A and B enter the flow in that order the response for B might be produced before the - * response for A. + * For example, if two requests `A` and `B` enter the `flow` in that order the response for `B` might be produced before the + * response for `A`. + * * In order to allow for easy response-to-request association the flow takes in a custom, opaque context * object of type `T` from the application which is emitted together with the corresponding response. */ def superPool[T](settings: ConnectionPoolSettings, - httpsContext: Option[HttpsContext], - log: LoggingAdapter, materializer: Materializer): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], Unit] = - adaptTupleFlow(delegate.superPool[T](settings, httpsContext, log)(materializer)) + connectionContext: HttpsConnectionContext, + log: LoggingAdapter, materializer: Materializer): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], NotUsed] = + adaptTupleFlow(delegate.superPool[T](connectionContext.asScala, settings.asScala, log)(materializer)) + + /** + * Creates a new "super connection pool flow", which routes incoming requests to a (cached) host connection pool + * depending on their respective effective URIs. Note that incoming requests must have either an absolute URI or + * a valid `Host` header. + * + * The [[defaultClientHttpsContext]] is used to configure TLS for the connection. + * + * Since the underlying transport usually comprises more than a single connection the produced flow might generate + * responses in an order that doesn't directly match the consumed requests. + * For example, if two requests `A` and `B` enter the `flow` in that order the response for `B` might be produced before the + * response for `A`. + * + * In order to allow for easy response-to-request association the flow takes in a custom, opaque context + * object of type `T` from the application which is emitted together with the corresponding response. + */ + def superPool[T](settings: ConnectionPoolSettings, + log: LoggingAdapter, materializer: Materializer): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], NotUsed] = + adaptTupleFlow(delegate.superPool[T](defaultClientHttpsContext.asScala, settings.asScala, log)(materializer)) /** * Fires a single [[HttpRequest]] across the (cached) host connection pool for the request's * effective URI to produce a response future. * + * The [[defaultClientHttpsContext]] is used to configure TLS for the connection. + * * Note that the request must have either an absolute URI or a valid `Host` header, otherwise * the future will be completed with an error. */ - def singleRequest(request: HttpRequest, materializer: Materializer): Future[HttpResponse] = - delegate.singleRequest(request.asScala)(materializer) + def singleRequest(request: HttpRequest, materializer: Materializer): CompletionStage[HttpResponse] = + delegate.singleRequest(request.asScala)(materializer).toJava /** * Fires a single [[HttpRequest]] across the (cached) host connection pool for the request's * effective URI to produce a response future. * - * If an explicit [[HttpsContext]] is given then it rather than the configured default [[HttpsContext]] will be used - * for setting up the HTTPS connection pool, if required. + * The [[defaultClientHttpsContext]] is used to configure TLS for the connection. + * + * Note that the request must have either an absolute URI or a valid `Host` header, otherwise + * the future will be completed with an error. + */ + def singleRequest(request: HttpRequest, connectionContext: HttpsConnectionContext, materializer: Materializer): CompletionStage[HttpResponse] = + delegate.singleRequest(request.asScala, connectionContext.asScala)(materializer).toJava + + /** + * Fires a single [[HttpRequest]] across the (cached) host connection pool for the request's + * effective URI to produce a response future. + * + * The given [[HttpsConnectionContext]] will be used for encruption if the request is sent to an https endpoint. * * Note that the request must have either an absolute URI or a valid `Host` header, otherwise * the future will be completed with an error. */ def singleRequest(request: HttpRequest, + connectionContext: HttpsConnectionContext, settings: ConnectionPoolSettings, - httpsContext: Option[HttpsContext], - log: LoggingAdapter, materializer: Materializer): Future[HttpResponse] = - delegate.singleRequest(request.asScala, settings, httpsContext, log)(materializer) + log: LoggingAdapter, materializer: Materializer): CompletionStage[HttpResponse] = + delegate.singleRequest(request.asScala, connectionContext.asScala, settings.asScala, log)(materializer).toJava /** - * Constructs a Websocket [[BidiFlow]]. + * Constructs a WebSocket [[BidiFlow]]. * * The layer is not reusable and must only be materialized once. */ - def websocketClientLayer(request: WebsocketRequest): BidiFlow[Message, SslTlsOutbound, SslTlsInbound, Message, Future[WebsocketUpgradeResponse]] = - adaptWsBidiFlow(delegate.websocketClientLayer(request.asScala)) + def webSocketClientLayer(request: WebSocketRequest): BidiFlow[Message, SslTlsOutbound, SslTlsInbound, Message, CompletionStage[WebSocketUpgradeResponse]] = + adaptWsBidiFlow(delegate.webSocketClientLayer(request.asScala)) /** - * Constructs a Websocket [[BidiFlow]] using the configured default [[ClientConnectionSettings]], + * Constructs a WebSocket [[BidiFlow]] using the configured default [[ClientConnectionSettings]], * configured using the `akka.http.client` config section. * * The layer is not reusable and must only be materialized once. */ - def websocketClientLayer(request: WebsocketRequest, - settings: ClientConnectionSettings): BidiFlow[Message, SslTlsOutbound, SslTlsInbound, Message, Future[WebsocketUpgradeResponse]] = - adaptWsBidiFlow(delegate.websocketClientLayer(request.asScala, settings)) + def webSocketClientLayer(request: WebSocketRequest, + settings: ClientConnectionSettings): BidiFlow[Message, SslTlsOutbound, SslTlsInbound, Message, CompletionStage[WebSocketUpgradeResponse]] = + adaptWsBidiFlow(delegate.webSocketClientLayer(request.asScala, settings.asScala)) /** - * Constructs a Websocket [[BidiFlow]] using the configured default [[ClientConnectionSettings]], + * Constructs a WebSocket [[BidiFlow]] using the configured default [[ClientConnectionSettings]], * configured using the `akka.http.client` config section. * * The layer is not reusable and must only be materialized once. */ - def websocketClientLayer(request: WebsocketRequest, + def webSocketClientLayer(request: WebSocketRequest, settings: ClientConnectionSettings, - log: LoggingAdapter): BidiFlow[Message, SslTlsOutbound, SslTlsInbound, Message, Future[WebsocketUpgradeResponse]] = - adaptWsBidiFlow(delegate.websocketClientLayer(request.asScala, settings, log)) + log: LoggingAdapter): BidiFlow[Message, SslTlsOutbound, SslTlsInbound, Message, CompletionStage[WebSocketUpgradeResponse]] = + adaptWsBidiFlow(delegate.webSocketClientLayer(request.asScala, settings.asScala, log)) /** - * Constructs a flow that once materialized establishes a Websocket connection to the given Uri. + * Constructs a flow that once materialized establishes a WebSocket connection to the given Uri. * * The layer is not reusable and must only be materialized once. */ - def websocketClientFlow(request: WebsocketRequest): Flow[Message, Message, Future[WebsocketUpgradeResponse]] = + def webSocketClientFlow(request: WebSocketRequest): Flow[Message, Message, CompletionStage[WebSocketUpgradeResponse]] = adaptWsFlow { - delegate.websocketClientFlow(request.asScala) + delegate.webSocketClientFlow(request.asScala) } /** - * Constructs a flow that once materialized establishes a Websocket connection to the given Uri. + * Constructs a flow that once materialized establishes a WebSocket connection to the given Uri. * * The layer is not reusable and must only be materialized once. */ - def websocketClientFlow(request: WebsocketRequest, - localAddress: Option[InetSocketAddress], + def webSocketClientFlow(request: WebSocketRequest, + connectionContext: ConnectionContext, + localAddress: Optional[InetSocketAddress], settings: ClientConnectionSettings, - httpsContext: Option[HttpsContext], - log: LoggingAdapter): Flow[Message, Message, Future[WebsocketUpgradeResponse]] = + log: LoggingAdapter): Flow[Message, Message, CompletionStage[WebSocketUpgradeResponse]] = adaptWsFlow { - delegate.websocketClientFlow(request.asScala, localAddress, settings, httpsContext, log) + delegate.webSocketClientFlow(request.asScala, connectionContext.asScala, localAddress.asScala, settings.asScala, log) } /** - * Runs a single Websocket conversation given a Uri and a flow that represents the client side of the - * Websocket conversation. + * Runs a single WebSocket conversation given a Uri and a flow that represents the client side of the + * WebSocket conversation. + * + * The [[defaultClientHttpsContext]] is used to configure TLS for the connection. */ - def singleWebsocketRequest[T](request: WebsocketRequest, + def singleWebSocketRequest[T](request: WebSocketRequest, clientFlow: Flow[Message, Message, T], - materializer: Materializer): Pair[Future[WebsocketUpgradeResponse], T] = + materializer: Materializer): Pair[CompletionStage[WebSocketUpgradeResponse], T] = adaptWsResultTuple { - delegate.singleWebsocketRequest( + delegate.singleWebSocketRequest( request.asScala, adaptWsFlow[T](clientFlow))(materializer) } /** - * Runs a single Websocket conversation given a Uri and a flow that represents the client side of the - * Websocket conversation. + * Runs a single WebSocket conversation given a Uri and a flow that represents the client side of the + * WebSocket conversation. + * + * The [[defaultClientHttpsContext]] is used to configure TLS for the connection. */ - def singleWebsocketRequest[T](request: WebsocketRequest, + def singleWebSocketRequest[T](request: WebSocketRequest, clientFlow: Flow[Message, Message, T], - localAddress: Option[InetSocketAddress], - settings: ClientConnectionSettings, - httpsContext: Option[HttpsContext], - log: LoggingAdapter, - materializer: Materializer): Pair[Future[WebsocketUpgradeResponse], T] = + connectionContext: ConnectionContext, + materializer: Materializer): Pair[CompletionStage[WebSocketUpgradeResponse], T] = adaptWsResultTuple { - delegate.singleWebsocketRequest( + delegate.singleWebSocketRequest( request.asScala, adaptWsFlow[T](clientFlow), - localAddress, - settings, - httpsContext, + connectionContext.asScala)(materializer) + } + + /** + * Runs a single WebSocket conversation given a Uri and a flow that represents the client side of the + * WebSocket conversation. + */ + def singleWebSocketRequest[T](request: WebSocketRequest, + clientFlow: Flow[Message, Message, T], + connectionContext: ConnectionContext, + localAddress: Optional[InetSocketAddress], + settings: ClientConnectionSettings, + log: LoggingAdapter, + materializer: Materializer): Pair[CompletionStage[WebSocketUpgradeResponse], T] = + adaptWsResultTuple { + delegate.singleWebSocketRequest( + request.asScala, + adaptWsFlow[T](clientFlow), + connectionContext.asScala, + localAddress.asScala, + settings.asScala, log)(materializer) } @@ -595,40 +653,54 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { * If existing pool client flows are re-used or new ones materialized concurrently with or after this * method call the respective connection pools will be restarted and not contribute to the returned future. */ - def shutdownAllConnectionPools(): Future[Unit] = delegate.shutdownAllConnectionPools() + def shutdownAllConnectionPools(): CompletionStage[Unit] = delegate.shutdownAllConnectionPools().toJava /** - * Gets the current default client-side [[HttpsContext]]. + * Gets the default + * @return */ - def defaultClientHttpsContext: HttpsContext = delegate.defaultClientHttpsContext + def defaultServerHttpContext: ConnectionContext = + delegate.defaultServerHttpContext /** - * Sets the default client-side [[HttpsContext]]. + * Gets the current default client-side [[ConnectionContext]]. */ - def setDefaultClientHttpsContext(context: HttpsContext): Unit = - delegate.setDefaultClientHttpsContext(context.asInstanceOf[akka.http.scaladsl.HttpsContext]) + def defaultClientHttpsContext: akka.http.javadsl.HttpsConnectionContext = delegate.defaultClientHttpsContext + + /** + * Sets the default client-side [[ConnectionContext]]. + */ + def setDefaultClientHttpsContext(context: HttpsConnectionContext): Unit = + delegate.setDefaultClientHttpsContext(context.asInstanceOf[akka.http.scaladsl.HttpsConnectionContext]) private def adaptTupleFlow[T, Mat](scalaFlow: stream.scaladsl.Flow[(scaladsl.model.HttpRequest, T), (Try[scaladsl.model.HttpResponse], T), Mat]): Flow[Pair[HttpRequest, T], Pair[Try[HttpResponse], T], Mat] = { implicit val _ = JavaMapping.identity[T] JavaMapping.toJava(scalaFlow)(JavaMapping.flowMapping[Pair[HttpRequest, T], (scaladsl.model.HttpRequest, T), Pair[Try[HttpResponse], T], (Try[scaladsl.model.HttpResponse], T), Mat]) } - private def adaptServerLayer(serverLayer: scaladsl.Http.ServerLayer): BidiFlow[HttpResponse, SslTlsOutbound, SslTlsInbound, HttpRequest, Unit] = + private def adaptOutgoingFlow[T, Mat](scalaFlow: stream.scaladsl.Flow[scaladsl.model.HttpRequest, scaladsl.model.HttpResponse, Future[scaladsl.Http.OutgoingConnection]]): Flow[HttpRequest, HttpResponse, CompletionStage[OutgoingConnection]] = + Flow.fromGraph { + akka.stream.scaladsl.Flow[HttpRequest].map(_.asScala) + .viaMat(scalaFlow)(Keep.right) + .mapMaterializedValue(_.map(new OutgoingConnection(_))(ec).toJava) + } + + private def adaptServerLayer(serverLayer: scaladsl.Http.ServerLayer): BidiFlow[HttpResponse, SslTlsOutbound, SslTlsInbound, HttpRequest, NotUsed] = new BidiFlow( JavaMapping.adapterBidiFlow[HttpResponse, sm.HttpResponse, sm.HttpRequest, HttpRequest] .atop(serverLayer)) - private def adaptClientLayer(clientLayer: scaladsl.Http.ClientLayer): BidiFlow[HttpRequest, SslTlsOutbound, SslTlsInbound, HttpResponse, Unit] = + private def adaptClientLayer(clientLayer: scaladsl.Http.ClientLayer): BidiFlow[HttpRequest, SslTlsOutbound, SslTlsInbound, HttpResponse, NotUsed] = new BidiFlow( JavaMapping.adapterBidiFlow[HttpRequest, sm.HttpRequest, sm.HttpResponse, HttpResponse] .atop(clientLayer)) - private def adaptWsBidiFlow(wsLayer: scaladsl.Http.WebsocketClientLayer): BidiFlow[Message, SslTlsOutbound, SslTlsInbound, Message, Future[WebsocketUpgradeResponse]] = + private def adaptWsBidiFlow(wsLayer: scaladsl.Http.WebSocketClientLayer): BidiFlow[Message, SslTlsOutbound, SslTlsInbound, Message, CompletionStage[WebSocketUpgradeResponse]] = new BidiFlow( JavaMapping.adapterBidiFlow[Message, sm.ws.Message, sm.ws.Message, Message] .atopMat(wsLayer)((_, s) ⇒ adaptWsUpgradeResponse(s))) - private def adaptWsFlow(wsLayer: stream.scaladsl.Flow[sm.ws.Message, sm.ws.Message, Future[scaladsl.model.ws.WebsocketUpgradeResponse]]): Flow[Message, Message, Future[WebsocketUpgradeResponse]] = + private def adaptWsFlow(wsLayer: stream.scaladsl.Flow[sm.ws.Message, sm.ws.Message, Future[scaladsl.model.ws.WebSocketUpgradeResponse]]): Flow[Message, Message, CompletionStage[WebSocketUpgradeResponse]] = Flow.fromGraph(JavaMapping.adapterBidiFlow[Message, sm.ws.Message, sm.ws.Message, Message].joinMat(wsLayer)(Keep.right).mapMaterializedValue(adaptWsUpgradeResponse _)) private def adaptWsFlow[Mat](javaFlow: Flow[Message, Message, Mat]): stream.scaladsl.Flow[scaladsl.model.ws.Message, scaladsl.model.ws.Message, Mat] = @@ -637,10 +709,10 @@ class Http(system: ExtendedActorSystem) extends akka.actor.Extension { .viaMat(javaFlow.asScala)(Keep.right) .map(_.asScala) - private def adaptWsResultTuple[T](result: (Future[scaladsl.model.ws.WebsocketUpgradeResponse], T)): Pair[Future[WebsocketUpgradeResponse], T] = + private def adaptWsResultTuple[T](result: (Future[scaladsl.model.ws.WebSocketUpgradeResponse], T)): Pair[CompletionStage[WebSocketUpgradeResponse], T] = result match { case (fut, tMat) ⇒ Pair(adaptWsUpgradeResponse(fut), tMat) } - private def adaptWsUpgradeResponse(responseFuture: Future[scaladsl.model.ws.WebsocketUpgradeResponse]): Future[WebsocketUpgradeResponse] = - responseFuture.map(WebsocketUpgradeResponse.adapt)(system.dispatcher) + private def adaptWsUpgradeResponse(responseFuture: Future[scaladsl.model.ws.WebSocketUpgradeResponse]): CompletionStage[WebSocketUpgradeResponse] = + responseFuture.map(WebSocketUpgradeResponse.adapt)(system.dispatcher).toJava } diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/IncomingConnection.scala b/akka-http-core/src/main/scala/akka/http/javadsl/IncomingConnection.scala index 9dd45c5626..6b59e18e40 100644 --- a/akka-http-core/src/main/scala/akka/http/javadsl/IncomingConnection.scala +++ b/akka-http-core/src/main/scala/akka/http/javadsl/IncomingConnection.scala @@ -1,16 +1,19 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl import java.net.InetSocketAddress -import akka.japi.function.Function; -import scala.concurrent.Future +import akka.NotUsed +import akka.japi.function.Function import akka.stream.Materializer import akka.stream.javadsl.Flow import akka.http.javadsl.model._ import akka.http.scaladsl.{ model ⇒ sm } +import java.util.concurrent.CompletionStage +import scala.concurrent.Future +import scala.compat.java8.FutureConverters._ /** * Represents one accepted incoming HTTP connection. @@ -31,7 +34,7 @@ class IncomingConnection private[http] (delegate: akka.http.scaladsl.Http.Incomi * * Use `Flow.join` or one of the handleXXX methods to consume handle requests on this connection. */ - def flow: Flow[HttpResponse, HttpRequest, Unit] = Flow.fromGraph(delegate.flow).asInstanceOf[Flow[HttpResponse, HttpRequest, Unit]] + def flow: Flow[HttpResponse, HttpRequest, NotUsed] = Flow.fromGraph(delegate.flow).asInstanceOf[Flow[HttpResponse, HttpRequest, NotUsed]] /** * Handles the connection with the given flow, which is materialized exactly once @@ -49,12 +52,12 @@ class IncomingConnection private[http] (delegate: akka.http.scaladsl.Http.Incomi /** * Handles the connection with the given handler function. */ - def handleWithAsyncHandler(handler: Function[HttpRequest, Future[HttpResponse]], materializer: Materializer): Unit = - delegate.handleWithAsyncHandler(handler.apply(_).asInstanceOf[Future[sm.HttpResponse]])(materializer) + def handleWithAsyncHandler(handler: Function[HttpRequest, CompletionStage[HttpResponse]], materializer: Materializer): Unit = + delegate.handleWithAsyncHandler(handler.apply(_).toScala.asInstanceOf[Future[sm.HttpResponse]])(materializer) /** * Handles the connection with the given handler function. */ - def handleWithAsyncHandler(handler: Function[HttpRequest, Future[HttpResponse]], parallelism: Int, materializer: Materializer): Unit = - delegate.handleWithAsyncHandler(handler.apply(_).asInstanceOf[Future[sm.HttpResponse]], parallelism)(materializer) + def handleWithAsyncHandler(handler: Function[HttpRequest, CompletionStage[HttpResponse]], parallelism: Int, materializer: Materializer): Unit = + delegate.handleWithAsyncHandler(handler.apply(_).toScala.asInstanceOf[Future[sm.HttpResponse]], parallelism)(materializer) } diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/OutgoingConnection.scala b/akka-http-core/src/main/scala/akka/http/javadsl/OutgoingConnection.scala index 407f7d1aa4..7aebcc28be 100644 --- a/akka-http-core/src/main/scala/akka/http/javadsl/OutgoingConnection.scala +++ b/akka-http-core/src/main/scala/akka/http/javadsl/OutgoingConnection.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/ServerBinding.scala b/akka-http-core/src/main/scala/akka/http/javadsl/ServerBinding.scala index 35f8496f6e..2247fdfe89 100644 --- a/akka-http-core/src/main/scala/akka/http/javadsl/ServerBinding.scala +++ b/akka-http-core/src/main/scala/akka/http/javadsl/ServerBinding.scala @@ -1,11 +1,12 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl import java.net.InetSocketAddress -import scala.concurrent.Future +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters._ /** * Represents a prospective HTTP server binding. @@ -20,7 +21,7 @@ class ServerBinding private[http] (delegate: akka.http.scaladsl.Http.ServerBindi * Asynchronously triggers the unbinding of the port that was bound by the materialization of the `connections` * [[Source]] * - * The produced [[Future]] is fulfilled when the unbinding has been completed. + * The produced [[java.util.concurrent.CompletionStage]] is fulfilled when the unbinding has been completed. */ - def unbind(): Future[Unit] = delegate.unbind() + def unbind(): CompletionStage[Unit] = delegate.unbind().toJava } diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/model/ContentType.scala b/akka-http-core/src/main/scala/akka/http/javadsl/model/ContentType.scala new file mode 100644 index 0000000000..7f95cb4ce4 --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/javadsl/model/ContentType.scala @@ -0,0 +1,45 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ +package akka.http.javadsl.model + +import java.util.Optional + +/** + * Represents an Http content-type. A content-type consists of a media-type and an optional charset. + */ +// Has to be defined in Scala even though it's JavaDSL because of: +// https://issues.scala-lang.org/browse/SI-9621 +object ContentType { + + trait Binary extends ContentType { + } + + trait NonBinary extends ContentType { + def charset: HttpCharset + } + + trait WithFixedCharset extends NonBinary { + } + + trait WithCharset extends NonBinary { + } + +} + +trait ContentType { + /** + * The media-type of this content-type. + */ + def mediaType: MediaType + + /** + * True if this ContentType is non-textual. + */ + def binary: Boolean + + /** + * Returns the charset if this ContentType is non-binary. + */ + def getCharsetOption: Optional[HttpCharset] +} diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/model/MediaType.scala b/akka-http-core/src/main/scala/akka/http/javadsl/model/MediaType.scala new file mode 100644 index 0000000000..237213a362 --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/javadsl/model/MediaType.scala @@ -0,0 +1,78 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ +package akka.http.javadsl.model + +/** + * Represents an Http media-type. A media-type consists of a main-type and a sub-type. + * + */ +// Has to be defined in Scala even though it's JavaDSL because of: +// https://issues.scala-lang.org/browse/SI-9621 +object MediaType { + + trait Binary extends MediaType { + def toContentType: ContentType.Binary + } + + trait NonBinary extends MediaType { + } + + trait WithFixedCharset extends NonBinary { + def toContentType: ContentType.WithFixedCharset + } + + trait WithOpenCharset extends NonBinary { + def toContentType(charset: HttpCharset): ContentType.WithCharset + } + + trait Multipart extends WithOpenCharset { + } + +} + +trait MediaType { + /** + * The main-type of this media-type. + */ + def mainType: String + + /** + * The sub-type of this media-type. + */ + def subType: String + + /** + * True when this media-type is generally compressible. + */ + def isCompressible: Boolean + + /** + * True when this media-type is not character-based. + */ + def binary: Boolean + + def isApplication: Boolean + + def isAudio: Boolean + + def isImage: Boolean + + def isMessage: Boolean + + def isMultipart: Boolean + + def isText: Boolean + + def isVideo: Boolean + + /** + * Creates a media-range from this media-type. + */ + def toRange: MediaRange + + /** + * Creates a media-range from this media-type with a given qValue. + */ + def toRange(qValue: Float): MediaRange +} diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/Message.scala b/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/Message.scala index 92b89fef2d..0d5a1e91ac 100644 --- a/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/Message.scala +++ b/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/Message.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.ws @@ -9,7 +9,7 @@ import akka.stream.javadsl.Source import akka.util.ByteString /** - * Represents a Websocket message. A message can either be a binary message or a text message. + * Represents a WebSocket message. A message can either be a binary message or a text message. */ sealed abstract class Message { /** @@ -39,7 +39,7 @@ object Message { } /** - * Represents a Websocket text message. A text message can either be strict in which case + * Represents a WebSocket text message. A text message can either be strict in which case * the complete data is already available or it can be streamed in which case [[getStreamedText]] * will return a Source streaming the data as it comes in. */ diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/PeerClosedConnectionException.scala b/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/PeerClosedConnectionException.scala index 5ca5d43c98..1b8581a0e3 100644 --- a/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/PeerClosedConnectionException.scala +++ b/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/PeerClosedConnectionException.scala @@ -1,11 +1,11 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.ws /** - * A PeerClosedConnectionException will be reported to the Websocket handler if the peer has closed the connection. + * A PeerClosedConnectionException will be reported to the WebSocket handler if the peer has closed the connection. * `closeCode` and `closeReason` contain close messages as reported by the peer. */ trait PeerClosedConnectionException extends RuntimeException { diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/UpgradeToWebSocket.scala b/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/UpgradeToWebSocket.scala new file mode 100644 index 0000000000..7646ffa25c --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/UpgradeToWebSocket.scala @@ -0,0 +1,51 @@ +/* + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package akka.http.javadsl.model.ws + +import java.lang.{ Iterable ⇒ JIterable } +import akka.http.scaladsl.{ model ⇒ sm } +import akka.http.javadsl.model._ +import akka.stream.Materializer +import akka.stream._ + +/** + * A virtual header that WebSocket requests will contain. Use [[UpgradeToWebSocket.handleMessagesWith]] to + * create a WebSocket handshake response and handle the WebSocket message stream with the given handler. + */ +trait UpgradeToWebSocket extends sm.HttpHeader { + /** + * Returns the sequence of protocols the client accepts. + * + * See http://tools.ietf.org/html/rfc6455#section-1.9 + */ + def getRequestedProtocols(): JIterable[String] + + /** + * Returns a response that can be used to answer a WebSocket handshake request. The connection will afterwards + * use the given handlerFlow to handle WebSocket messages from the client. + */ + def handleMessagesWith(handlerFlow: Graph[FlowShape[Message, Message], _ <: Any]): HttpResponse + + /** + * Returns a response that can be used to answer a WebSocket handshake request. The connection will afterwards + * use the given handlerFlow to handle WebSocket messages from the client. The given subprotocol must be one + * of the ones offered by the client. + */ + def handleMessagesWith(handlerFlow: Graph[FlowShape[Message, Message], _ <: Any], subprotocol: String): HttpResponse + + /** + * Returns a response that can be used to answer a WebSocket handshake request. The connection will afterwards + * use the given inSink to handle WebSocket messages from the client and the given outSource to send messages to the client. + */ + def handleMessagesWith(inSink: Graph[SinkShape[Message], _ <: Any], outSource: Graph[SourceShape[Message], _ <: Any]): HttpResponse + + /** + * Returns a response that can be used to answer a WebSocket handshake request. The connection will afterwards + * use the given inSink to handle WebSocket messages from the client and the given outSource to send messages to the client. + * + * The given subprotocol must be one of the ones offered by the client. + */ + def handleMessagesWith(inSink: Graph[SinkShape[Message], _ <: Any], outSource: Graph[SourceShape[Message], _ <: Any], subprotocol: String): HttpResponse +} diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/UpgradeToWebsocket.scala b/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/UpgradeToWebsocket.scala deleted file mode 100644 index d265eba875..0000000000 --- a/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/UpgradeToWebsocket.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (C) 2009-2015 Typesafe Inc. - */ - -package akka.http.javadsl.model.ws - -import java.lang.{ Iterable ⇒ JIterable } -import akka.http.scaladsl.{ model ⇒ sm } -import akka.http.javadsl.model._ -import akka.stream.Materializer -import akka.stream.javadsl.{ Sink, Source, Flow } - -/** - * A virtual header that Websocket requests will contain. Use [[UpgradeToWebsocket.handleMessagesWith]] to - * create a Websocket handshake response and handle the Websocket message stream with the given handler. - */ -trait UpgradeToWebsocket extends sm.HttpHeader { - /** - * Returns the sequence of protocols the client accepts. - * - * See http://tools.ietf.org/html/rfc6455#section-1.9 - */ - def getRequestedProtocols(): JIterable[String] - - /** - * Returns a response that can be used to answer a Websocket handshake request. The connection will afterwards - * use the given handlerFlow to handle Websocket messages from the client. - */ - def handleMessagesWith(handlerFlow: Flow[Message, Message, _]): HttpResponse - - /** - * Returns a response that can be used to answer a Websocket handshake request. The connection will afterwards - * use the given handlerFlow to handle Websocket messages from the client. The given subprotocol must be one - * of the ones offered by the client. - */ - def handleMessagesWith(handlerFlow: Flow[Message, Message, _], subprotocol: String): HttpResponse - - /** - * Returns a response that can be used to answer a Websocket handshake request. The connection will afterwards - * use the given inSink to handle Websocket messages from the client and the given outSource to send messages to the client. - */ - def handleMessagesWith(inSink: Sink[Message, _], outSource: Source[Message, _]): HttpResponse - - /** - * Returns a response that can be used to answer a Websocket handshake request. The connection will afterwards - * use the given inSink to handle Websocket messages from the client and the given outSource to send messages to the client. - * - * The given subprotocol must be one of the ones offered by the client. - */ - def handleMessagesWith(inSink: Sink[Message, _], outSource: Source[Message, _], subprotocol: String): HttpResponse -} diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/WebSocket.scala b/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/WebSocket.scala new file mode 100644 index 0000000000..3ce4802e91 --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/WebSocket.scala @@ -0,0 +1,22 @@ +/* + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package akka.http.javadsl.model.ws + +import akka.stream.javadsl.Flow +import akka.http.javadsl.model._ +import akka.http.impl.util.JavaMapping.Implicits._ + +object WebSocket { + /** + * If a given request is a WebSocket request a response accepting the request is returned using the given handler to + * handle the WebSocket message stream. If the request wasn't a WebSocket request a response with status code 400 is + * returned. + */ + def handleWebSocketRequestWith(request: HttpRequest, handler: Flow[Message, Message, _]): HttpResponse = + request.asScala.header[UpgradeToWebSocket] match { + case Some(header) ⇒ header.handleMessagesWith(handler) + case None ⇒ HttpResponse.create().withStatus(StatusCodes.BAD_REQUEST).withEntity("Expected WebSocket request") + } +} diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/WebSocketRequest.scala b/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/WebSocketRequest.scala new file mode 100644 index 0000000000..bb91ac8125 --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/WebSocketRequest.scala @@ -0,0 +1,61 @@ +/* + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package akka.http.javadsl.model.ws + +import akka.http.javadsl.model.{ Uri, HttpHeader } +import akka.http.scaladsl.model.ws.{ WebSocketRequest ⇒ ScalaWebSocketRequest } + +/** + * Represents a WebSocket request. Use `WebSocketRequest.create` to create a request + * for a target URI and then use `addHeader` or `requestSubprotocol` to set optional + * details. + */ +abstract class WebSocketRequest { + /** + * Return a copy of this request that contains the given additional header. + */ + def addHeader(header: HttpHeader): WebSocketRequest + + /** + * Return a copy of this request that will require that the server uses the + * given WebSocket subprotocol. + */ + def requestSubprotocol(subprotocol: String): WebSocketRequest + + def asScala: ScalaWebSocketRequest +} +object WebSocketRequest { + import akka.http.impl.util.JavaMapping.Implicits._ + + /** + * Creates a WebSocketRequest to a target URI. Use the methods on `WebSocketRequest` + * to specify further details. + */ + def create(uri: Uri): WebSocketRequest = + wrap(ScalaWebSocketRequest(uri.asScala)) + + /** + * Creates a WebSocketRequest to a target URI. Use the methods on `WebSocketRequest` + * to specify further details. + */ + def create(uriString: String): WebSocketRequest = + create(Uri.create(uriString)) + + /** + * Wraps a Scala version of WebSocketRequest. + */ + def wrap(scalaRequest: ScalaWebSocketRequest): WebSocketRequest = + new WebSocketRequest { + def addHeader(header: HttpHeader): WebSocketRequest = + transform(s ⇒ s.copy(extraHeaders = s.extraHeaders :+ header.asScala)) + def requestSubprotocol(subprotocol: String): WebSocketRequest = + transform(_.copy(subprotocol = Some(subprotocol))) + + def asScala: ScalaWebSocketRequest = scalaRequest + + def transform(f: ScalaWebSocketRequest ⇒ ScalaWebSocketRequest): WebSocketRequest = + wrap(f(asScala)) + } +} diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/WebsocketUpgradeResponse.scala b/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/WebSocketUpgradeResponse.scala similarity index 67% rename from akka-http-core/src/main/scala/akka/http/javadsl/model/ws/WebsocketUpgradeResponse.scala rename to akka-http-core/src/main/scala/akka/http/javadsl/model/ws/WebSocketUpgradeResponse.scala index fc19fb7bcc..a404708067 100644 --- a/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/WebsocketUpgradeResponse.scala +++ b/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/WebSocketUpgradeResponse.scala @@ -1,20 +1,21 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model.ws +import java.util.Optional + import akka.http.javadsl.model.HttpResponse import akka.http.scaladsl import akka.http.scaladsl.model.ws.{ InvalidUpgradeResponse, ValidUpgrade } -import akka.japi.Option /** - * Represents an upgrade response for a Websocket upgrade request. Can either be valid, in which + * Represents an upgrade response for a WebSocket upgrade request. Can either be valid, in which * case the `chosenSubprotocol` method is valid, or if invalid, the `invalidationReason` method * can be used to find out why the upgrade failed. */ -trait WebsocketUpgradeResponse { +trait WebSocketUpgradeResponse { def isValid: Boolean /** @@ -26,7 +27,7 @@ trait WebsocketUpgradeResponse { * If valid, returns `Some(subprotocol)` (if any was requested), or `None` if none was * chosen or offered. */ - def chosenSubprotocol: Option[String] + def chosenSubprotocol: Optional[String] /** * If invalid, the reason why the server's upgrade response could not be accepted. @@ -34,23 +35,23 @@ trait WebsocketUpgradeResponse { def invalidationReason: String } -object WebsocketUpgradeResponse { +object WebSocketUpgradeResponse { import akka.http.impl.util.JavaMapping.Implicits._ - def adapt(scalaResponse: scaladsl.model.ws.WebsocketUpgradeResponse): WebsocketUpgradeResponse = + def adapt(scalaResponse: scaladsl.model.ws.WebSocketUpgradeResponse): WebSocketUpgradeResponse = scalaResponse match { case ValidUpgrade(resp, chosen) ⇒ - new WebsocketUpgradeResponse { + new WebSocketUpgradeResponse { def isValid: Boolean = true def response: HttpResponse = resp - def chosenSubprotocol: Option[String] = chosen.asJava + def chosenSubprotocol: Optional[String] = chosen.asJava def invalidationReason: String = throw new UnsupportedOperationException("invalidationReason must not be called for valid response") } case InvalidUpgradeResponse(resp, cause) ⇒ - new WebsocketUpgradeResponse { + new WebSocketUpgradeResponse { def isValid: Boolean = false def response: HttpResponse = resp - def chosenSubprotocol: Option[String] = throw new UnsupportedOperationException("chosenSubprotocol must not be called for valid response") + def chosenSubprotocol: Optional[String] = throw new UnsupportedOperationException("chosenSubprotocol must not be called for valid response") def invalidationReason: String = cause } } diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/Websocket.scala b/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/Websocket.scala deleted file mode 100644 index 31bdf1dc44..0000000000 --- a/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/Websocket.scala +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (C) 2009-2015 Typesafe Inc. - */ - -package akka.http.javadsl.model.ws - -import akka.stream.javadsl.Flow -import akka.http.javadsl.model._ -import akka.http.impl.util.JavaMapping.Implicits._ - -object Websocket { - /** - * If a given request is a Websocket request a response accepting the request is returned using the given handler to - * handle the Websocket message stream. If the request wasn't a Websocket request a response with status code 400 is - * returned. - */ - def handleWebsocketRequestWith(request: HttpRequest, handler: Flow[Message, Message, _]): HttpResponse = - request.asScala.header[UpgradeToWebsocket] match { - case Some(header) ⇒ header.handleMessagesWith(handler) - case None ⇒ HttpResponse.create().withStatus(StatusCodes.BAD_REQUEST).withEntity("Expected websocket request") - } -} diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/WebsocketRequest.scala b/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/WebsocketRequest.scala deleted file mode 100644 index d895cb6e56..0000000000 --- a/akka-http-core/src/main/scala/akka/http/javadsl/model/ws/WebsocketRequest.scala +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright (C) 2009-2015 Typesafe Inc. - */ - -package akka.http.javadsl.model.ws - -import akka.http.javadsl.model.{ Uri, HttpHeader } -import akka.http.scaladsl.model.ws.{ WebsocketRequest ⇒ ScalaWebsocketRequest } - -/** - * Represents a Websocket request. Use `WebsocketRequest.create` to create a request - * for a target URI and then use `addHeader` or `requestSubprotocol` to set optional - * details. - */ -abstract class WebsocketRequest { - /** - * Return a copy of this request that contains the given additional header. - */ - def addHeader(header: HttpHeader): WebsocketRequest - - /** - * Return a copy of this request that will require that the server uses the - * given Websocket subprotocol. - */ - def requestSubprotocol(subprotocol: String): WebsocketRequest - - def asScala: ScalaWebsocketRequest -} -object WebsocketRequest { - import akka.http.impl.util.JavaMapping.Implicits._ - - /** - * Creates a WebsocketRequest to a target URI. Use the methods on `WebsocketRequest` - * to specify further details. - */ - def create(uri: Uri): WebsocketRequest = - wrap(ScalaWebsocketRequest(uri.asScala)) - - /** - * Creates a WebsocketRequest to a target URI. Use the methods on `WebsocketRequest` - * to specify further details. - */ - def create(uriString: String): WebsocketRequest = - create(Uri.create(uriString)) - - /** - * Wraps a Scala version of WebsocketRequest. - */ - def wrap(scalaRequest: ScalaWebsocketRequest): WebsocketRequest = - new WebsocketRequest { - def addHeader(header: HttpHeader): WebsocketRequest = - transform(s ⇒ s.copy(extraHeaders = s.extraHeaders :+ header.asScala)) - def requestSubprotocol(subprotocol: String): WebsocketRequest = - transform(_.copy(subprotocol = Some(subprotocol))) - - def asScala: ScalaWebsocketRequest = scalaRequest - - def transform(f: ScalaWebsocketRequest ⇒ ScalaWebsocketRequest): WebsocketRequest = - wrap(f(asScala)) - } -} diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/settings/ClientConnectionSettings.scala b/akka-http-core/src/main/scala/akka/http/javadsl/settings/ClientConnectionSettings.scala new file mode 100644 index 0000000000..dbf2a7f16b --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/javadsl/settings/ClientConnectionSettings.scala @@ -0,0 +1,45 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ +package akka.http.javadsl.settings + +import java.util.{ Optional, Random } + +import akka.http.impl.settings.ClientConnectionSettingsImpl +import akka.http.javadsl.model.headers.UserAgent +import akka.io.Inet.SocketOption +import com.typesafe.config.Config + +import akka.http.impl.util.JavaMapping.Implicits._ +import scala.collection.JavaConverters._ +import scala.compat.java8.OptionConverters._ +import scala.concurrent.duration.{ Duration, FiniteDuration } + +/** + * Public API but not intended for subclassing + */ +abstract class ClientConnectionSettings private[akka] () { self: ClientConnectionSettingsImpl ⇒ + def getUserAgentHeader: Optional[UserAgent] + def getConnectingTimeout: FiniteDuration + def getIdleTimeout: Duration + def getRequestHeaderSizeHint: Int + def getWebsocketRandomFactory: java.util.function.Supplier[Random] + def getSocketOptions: java.lang.Iterable[SocketOption] + def getParserSettings: ParserSettings + + // --- + + def withUserAgentHeader(newValue: Optional[UserAgent]): ClientConnectionSettings = self.copy(userAgentHeader = newValue.asScala.map(_.asScala)) + def withConnectingTimeout(newValue: FiniteDuration): ClientConnectionSettings = self.copy(connectingTimeout = newValue) + def withIdleTimeout(newValue: Duration): ClientConnectionSettings = self.copy(idleTimeout = newValue) + def withRequestHeaderSizeHint(newValue: Int): ClientConnectionSettings = self.copy(requestHeaderSizeHint = newValue) + def withWebsocketRandomFactory(newValue: java.util.function.Supplier[Random]): ClientConnectionSettings = self.copy(websocketRandomFactory = () ⇒ newValue.get()) + def withSocketOptions(newValue: java.lang.Iterable[SocketOption]): ClientConnectionSettings = self.copy(socketOptions = newValue.asScala.toList) + def withParserSettings(newValue: ParserSettings): ClientConnectionSettings = self.copy(parserSettings = newValue.asScala) + +} + +object ClientConnectionSettings extends SettingsCompanion[ClientConnectionSettings] { + def create(config: Config): ClientConnectionSettings = ClientConnectionSettingsImpl(config) + def create(configOverrides: String): ClientConnectionSettings = ClientConnectionSettingsImpl(configOverrides) +} diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/settings/ConnectionPoolSettings.scala b/akka-http-core/src/main/scala/akka/http/javadsl/settings/ConnectionPoolSettings.scala new file mode 100644 index 0000000000..5f14c49432 --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/javadsl/settings/ConnectionPoolSettings.scala @@ -0,0 +1,36 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ +package akka.http.javadsl.settings + +import akka.http.impl.settings.ConnectionPoolSettingsImpl +import com.typesafe.config.Config + +import scala.concurrent.duration.Duration +import akka.http.impl.util.JavaMapping.Implicits._ + +/** + * Public API but not intended for subclassing + */ +abstract class ConnectionPoolSettings private[akka] () { self: ConnectionPoolSettingsImpl ⇒ + def getMaxConnections: Int + def getMaxRetries: Int + def getMaxOpenRequests: Int + def getPipeliningLimit: Int + def getIdleTimeout: Duration + def getConnectionSettings: ClientConnectionSettings + + // --- + + def withMaxConnections(n: Int): ConnectionPoolSettings = self.copy(maxConnections = n) + def withMaxRetries(n: Int): ConnectionPoolSettings = self.copy(maxRetries = n) + def withMaxOpenRequests(newValue: Int): ConnectionPoolSettings = self.copy(maxOpenRequests = newValue) + def withPipeliningLimit(newValue: Int): ConnectionPoolSettings = self.copy(pipeliningLimit = newValue) + def withIdleTimeout(newValue: Duration): ConnectionPoolSettings = self.copy(idleTimeout = newValue) + def withConnectionSettings(newValue: ClientConnectionSettings): ConnectionPoolSettings = self.copy(connectionSettings = newValue.asScala) +} + +object ConnectionPoolSettings extends SettingsCompanion[ConnectionPoolSettings] { + override def create(config: Config): ConnectionPoolSettings = ConnectionPoolSettingsImpl(config) + override def create(configOverrides: String): ConnectionPoolSettings = ConnectionPoolSettingsImpl(configOverrides) +} \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/settings/ParserSettings.scala b/akka-http-core/src/main/scala/akka/http/javadsl/settings/ParserSettings.scala new file mode 100644 index 0000000000..888dfdaadd --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/javadsl/settings/ParserSettings.scala @@ -0,0 +1,81 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ +package akka.http.javadsl.settings + +import java.util.Optional + +import akka.http.impl.engine.parsing.BodyPartParser +import akka.http.impl.settings.ParserSettingsImpl +import java.{ util ⇒ ju } +import akka.http.impl.util.JavaMapping.Implicits._ +import scala.annotation.varargs +import scala.collection.JavaConverters._ +import scala.compat.java8.OptionConverters + +import akka.http.javadsl.model.{ HttpMethod, StatusCode, Uri } +import com.typesafe.config.Config + +/** + * Public API but not intended for subclassing + */ +abstract class ParserSettings private[akka] () extends BodyPartParser.Settings { self: ParserSettingsImpl ⇒ + def getMaxUriLength: Int + def getMaxMethodLength: Int + def getMaxResponseReasonLength: Int + def getMaxHeaderNameLength: Int + def getMaxHeaderValueLength: Int + def getMaxHeaderCount: Int + def getMaxContentLength: Long + def getMaxChunkExtLength: Int + def getMaxChunkSize: Int + def getUriParsingMode: Uri.ParsingMode + def getCookieParsingMode: ParserSettings.CookieParsingMode + def getIllegalHeaderWarnings: Boolean + def getErrorLoggingVerbosity: ParserSettings.ErrorLoggingVerbosity + def getHeaderValueCacheLimits: ju.Map[String, Int] + def getIncludeTlsSessionInfoHeader: Boolean + def headerValueCacheLimits: Map[String, Int] + def getCustomMethods: java.util.function.Function[String, Optional[HttpMethod]] + def getCustomStatusCodes: java.util.function.Function[Int, Optional[StatusCode]] + + // --- + + def withMaxUriLength(newValue: Int): ParserSettings = self.copy(maxUriLength = newValue) + def withMaxMethodLength(newValue: Int): ParserSettings = self.copy(maxMethodLength = newValue) + def withMaxResponseReasonLength(newValue: Int): ParserSettings = self.copy(maxResponseReasonLength = newValue) + def withMaxHeaderNameLength(newValue: Int): ParserSettings = self.copy(maxHeaderNameLength = newValue) + def withMaxHeaderValueLength(newValue: Int): ParserSettings = self.copy(maxHeaderValueLength = newValue) + def withMaxHeaderCount(newValue: Int): ParserSettings = self.copy(maxHeaderCount = newValue) + def withMaxContentLength(newValue: Long): ParserSettings = self.copy(maxContentLength = newValue) + def withMaxChunkExtLength(newValue: Int): ParserSettings = self.copy(maxChunkExtLength = newValue) + def withMaxChunkSize(newValue: Int): ParserSettings = self.copy(maxChunkSize = newValue) + def withUriParsingMode(newValue: Uri.ParsingMode): ParserSettings = self.copy(uriParsingMode = newValue.asScala) + def withCookieParsingMode(newValue: ParserSettings.CookieParsingMode): ParserSettings = self.copy(cookieParsingMode = newValue.asScala) + def withIllegalHeaderWarnings(newValue: Boolean): ParserSettings = self.copy(illegalHeaderWarnings = newValue) + def withErrorLoggingVerbosity(newValue: ParserSettings.ErrorLoggingVerbosity): ParserSettings = self.copy(errorLoggingVerbosity = newValue.asScala) + def withHeaderValueCacheLimits(newValue: ju.Map[String, Int]): ParserSettings = self.copy(headerValueCacheLimits = newValue.asScala.toMap) + def withIncludeTlsSessionInfoHeader(newValue: Boolean): ParserSettings = self.copy(includeTlsSessionInfoHeader = newValue) + + // special --- + + @varargs + def withCustomMethods(methods: HttpMethod*): ParserSettings = { + val map = methods.map(m ⇒ m.name -> m.asScala).toMap + self.copy(customMethods = map.get) + } + @varargs + def withCustomStatusCodes(codes: StatusCode*): ParserSettings = { + val map = codes.map(c ⇒ c.intValue -> c.asScala).toMap + self.copy(customStatusCodes = map.get) + } + +} + +object ParserSettings extends SettingsCompanion[ParserSettings] { + trait CookieParsingMode + trait ErrorLoggingVerbosity + + override def create(config: Config): ParserSettings = ParserSettingsImpl(config) + override def create(configOverrides: String): ParserSettings = ParserSettingsImpl(configOverrides) +} diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/settings/RoutingSettings.scala b/akka-http-core/src/main/scala/akka/http/javadsl/settings/RoutingSettings.scala new file mode 100644 index 0000000000..2850c42592 --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/javadsl/settings/RoutingSettings.scala @@ -0,0 +1,33 @@ +/* + * Copyright (C) 2016 Typesafe Inc. + */ +package akka.http.javadsl.settings + +import akka.http.impl.settings.RoutingSettingsImpl +import com.typesafe.config.Config + +/** + * Public API but not intended for subclassing + */ +abstract class RoutingSettings private[akka] () { self: RoutingSettingsImpl ⇒ + def getVerboseErrorMessages: Boolean + def getFileGetConditional: Boolean + def getRenderVanityFooter: Boolean + def getRangeCountLimit: Int + def getRangeCoalescingThreshold: Long + def getDecodeMaxBytesPerChunk: Int + def getFileIODispatcher: String + + def withVerboseErrorMessages(verboseErrorMessages: Boolean): RoutingSettings = self.copy(verboseErrorMessages = verboseErrorMessages) + def withFileGetConditional(fileGetConditional: Boolean): RoutingSettings = self.copy(fileGetConditional = fileGetConditional) + def withRenderVanityFooter(renderVanityFooter: Boolean): RoutingSettings = self.copy(renderVanityFooter = renderVanityFooter) + def withRangeCountLimit(rangeCountLimit: Int): RoutingSettings = self.copy(rangeCountLimit = rangeCountLimit) + def withRangeCoalescingThreshold(rangeCoalescingThreshold: Long): RoutingSettings = self.copy(rangeCoalescingThreshold = rangeCoalescingThreshold) + def withDecodeMaxBytesPerChunk(decodeMaxBytesPerChunk: Int): RoutingSettings = self.copy(decodeMaxBytesPerChunk = decodeMaxBytesPerChunk) + def withFileIODispatcher(fileIODispatcher: String): RoutingSettings = self.copy(fileIODispatcher = fileIODispatcher) +} + +object RoutingSettings extends SettingsCompanion[RoutingSettings] { + override def create(config: Config): RoutingSettings = RoutingSettingsImpl(config) + override def create(configOverrides: String): RoutingSettings = RoutingSettingsImpl(configOverrides) +} diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/settings/ServerSettings.scala b/akka-http-core/src/main/scala/akka/http/javadsl/settings/ServerSettings.scala new file mode 100644 index 0000000000..f835a2c2e0 --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/javadsl/settings/ServerSettings.scala @@ -0,0 +1,70 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ +package akka.http.javadsl.settings + +import java.util.{ Optional, Random } + +import akka.http.impl.settings.ServerSettingsImpl +import akka.http.javadsl.model.headers.Host +import akka.http.javadsl.model.headers.Server +import akka.io.Inet.SocketOption +import akka.http.impl.util.JavaMapping.Implicits._ +import scala.collection.JavaConverters._ +import scala.compat.java8.OptionConverters._ + +import scala.concurrent.duration.{ Duration, FiniteDuration } + +/** + * Public API but not intended for subclassing + */ +abstract class ServerSettings { self: ServerSettingsImpl ⇒ + def getServerHeader: Optional[Server] + def getTimeouts: ServerSettings.Timeouts + def getMaxConnections: Int + def getPipeliningLimit: Int + def getRemoteAddressHeader: Boolean + def getRawRequestUriHeader: Boolean + def getTransparentHeadRequests: Boolean + def getVerboseErrorMessages: Boolean + def getResponseHeaderSizeHint: Int + def getBacklog: Int + def getSocketOptions: java.lang.Iterable[SocketOption] + def getDefaultHostHeader: Host + def getWebsocketRandomFactory: java.util.function.Supplier[Random] + def getParserSettings: ParserSettings + + // --- + + def withServerHeader(newValue: Optional[Server]): ServerSettings = self.copy(serverHeader = newValue.asScala.map(_.asScala)) + def withTimeouts(newValue: ServerSettings.Timeouts): ServerSettings = self.copy(timeouts = newValue.asScala) + def withMaxConnections(newValue: Int): ServerSettings = self.copy(maxConnections = newValue) + def withPipeliningLimit(newValue: Int): ServerSettings = self.copy(pipeliningLimit = newValue) + def withRemoteAddressHeader(newValue: Boolean): ServerSettings = self.copy(remoteAddressHeader = newValue) + def withRawRequestUriHeader(newValue: Boolean): ServerSettings = self.copy(rawRequestUriHeader = newValue) + def withTransparentHeadRequests(newValue: Boolean): ServerSettings = self.copy(transparentHeadRequests = newValue) + def withVerboseErrorMessages(newValue: Boolean): ServerSettings = self.copy(verboseErrorMessages = newValue) + def withResponseHeaderSizeHint(newValue: Int): ServerSettings = self.copy(responseHeaderSizeHint = newValue) + def withBacklog(newValue: Int): ServerSettings = self.copy(backlog = newValue) + def withSocketOptions(newValue: java.lang.Iterable[SocketOption]): ServerSettings = self.copy(socketOptions = newValue.asScala.toList) + def withDefaultHostHeader(newValue: Host): ServerSettings = self.copy(defaultHostHeader = newValue.asScala) + def withParserSettings(newValue: ParserSettings): ServerSettings = self.copy(parserSettings = newValue.asScala) + def withWebsocketRandomFactory(newValue: java.util.function.Supplier[Random]): ServerSettings = self.copy(websocketRandomFactory = () ⇒ newValue.get()) + +} + +object ServerSettings { + trait Timeouts { + def idleTimeout: Duration + def requestTimeout: Duration + def bindTimeout: FiniteDuration + + // --- + def withIdleTimeout(newValue: Duration): ServerSettings.Timeouts = self.copy(idleTimeout = newValue) + def withRequestTimeout(newValue: Duration): ServerSettings.Timeouts = self.copy(requestTimeout = newValue) + def withBindTimeout(newValue: FiniteDuration): ServerSettings.Timeouts = self.copy(bindTimeout = newValue) + + /** INTERNAL API */ + protected def self = this.asInstanceOf[ServerSettingsImpl.Timeouts] + } +} \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/javadsl/settings/SettingsCompanion.scala b/akka-http-core/src/main/scala/akka/http/javadsl/settings/SettingsCompanion.scala new file mode 100644 index 0000000000..c9f7414d43 --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/javadsl/settings/SettingsCompanion.scala @@ -0,0 +1,30 @@ +package akka.http.javadsl.settings + +import akka.actor.ActorSystem +import com.typesafe.config.Config + +trait SettingsCompanion[T] { + + /** + * Creates an instance of settings using the configuration provided by the given ActorSystem. + * + * Java API + */ + final def create(system: ActorSystem): T = create(system.settings.config) + + /** + * Creates an instance of settings using the given Config. + * + * Java API + */ + def create(config: Config): T + + /** + * Create an instance of settings using the given String of config overrides to override + * settings set in the class loader of this class (i.e. by application.conf or reference.conf files in + * the class loader of this class). + * + * Java API + */ + def create(configOverrides: String): T +} diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/ConnectionContext.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/ConnectionContext.scala new file mode 100644 index 0000000000..95b21f1f88 --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/ConnectionContext.scala @@ -0,0 +1,55 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package akka.http.scaladsl + +import akka.stream.io.{ ClientAuth, NegotiateNewSession } + +import scala.collection.JavaConverters._ +import java.util.{ Optional, Collection ⇒ JCollection } +import javax.net.ssl._ + +import scala.collection.immutable +import scala.compat.java8.OptionConverters._ + +trait ConnectionContext extends akka.http.javadsl.ConnectionContext { + final def defaultPort = getDefaultPort +} + +object ConnectionContext { + //#https-context-creation + def https(sslContext: SSLContext, + enabledCipherSuites: Option[immutable.Seq[String]] = None, + enabledProtocols: Option[immutable.Seq[String]] = None, + clientAuth: Option[ClientAuth] = None, + sslParameters: Option[SSLParameters] = None) = { + new HttpsConnectionContext(sslContext, enabledCipherSuites, enabledProtocols, clientAuth, sslParameters) + } + //#https-context-creation + + def noEncryption() = HttpConnectionContext +} + +final class HttpsConnectionContext( + val sslContext: SSLContext, + val enabledCipherSuites: Option[immutable.Seq[String]] = None, + val enabledProtocols: Option[immutable.Seq[String]] = None, + val clientAuth: Option[ClientAuth] = None, + val sslParameters: Option[SSLParameters] = None) + extends akka.http.javadsl.HttpsConnectionContext with ConnectionContext { + + def firstSession = NegotiateNewSession(enabledCipherSuites, enabledProtocols, clientAuth, sslParameters) + + override def getSslContext = sslContext + override def getEnabledCipherSuites: Optional[JCollection[String]] = enabledCipherSuites.map(_.asJavaCollection).asJava + override def getEnabledProtocols: Optional[JCollection[String]] = enabledProtocols.map(_.asJavaCollection).asJava + override def getClientAuth: Optional[ClientAuth] = clientAuth.asJava + override def getSslParameters: Optional[SSLParameters] = sslParameters.asJava +} + +sealed class HttpConnectionContext extends akka.http.javadsl.HttpConnectionContext with ConnectionContext +final object HttpConnectionContext extends HttpConnectionContext { + /** Java API */ + def getInstance() = this +} diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/Http.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/Http.scala index 4a1edf67fb..265560b18a 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/Http.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/Http.scala @@ -1,12 +1,11 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl import java.net.InetSocketAddress import java.util.concurrent.ConcurrentHashMap -import java.util.{ Collection ⇒ JCollection } import javax.net.ssl._ import akka.actor._ @@ -15,22 +14,23 @@ import akka.http._ import akka.http.impl.engine.HttpConnectionTimeoutException import akka.http.impl.engine.client._ import akka.http.impl.engine.server._ -import akka.http.impl.engine.ws.WebsocketClientBlueprint -import akka.http.impl.util.{ Java6Compat, ReadTheDocumentationException, StreamUtils } +import akka.http.impl.engine.ws.WebSocketClientBlueprint +import akka.http.impl.settings.{ ConnectionPoolSetup, HostConnectionPoolSetup } +import akka.http.impl.util.StreamUtils import akka.http.scaladsl.model._ import akka.http.scaladsl.model.headers.Host -import akka.http.scaladsl.model.ws.{ WebsocketUpgradeResponse, WebsocketRequest, Message } +import akka.http.scaladsl.model.ws.{ Message, WebSocketRequest, WebSocketUpgradeResponse } +import akka.http.scaladsl.settings.{ ServerSettings, ClientConnectionSettings, ConnectionPoolSettings } import akka.http.scaladsl.util.FastFuture -import akka.japi +import akka.NotUsed import akka.stream.Materializer import akka.stream.io._ import akka.stream.scaladsl._ import com.typesafe.config.Config -import com.typesafe.sslconfig.akka.util.AkkaLoggerFactory import com.typesafe.sslconfig.akka._ -import com.typesafe.sslconfig.ssl._ +import com.typesafe.sslconfig.akka.util.AkkaLoggerFactory +import com.typesafe.sslconfig.ssl.ConfigSSLContextBuilder -import scala.collection.immutable import scala.concurrent.{ ExecutionContext, Future, Promise, TimeoutException } import scala.util.Try import scala.util.control.NonFatal @@ -44,35 +44,40 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) exte // configured default HttpsContext for the client-side // SYNCHRONIZED ACCESS ONLY! - private[this] var _defaultClientHttpsContext: HttpsContext = _ + private[this] var _defaultClientHttpsConnectionContext: HttpsConnectionContext = _ + private[this] var _defaultServerConnectionContext: ConnectionContext = _ // ** SERVER ** // + private[this] final val DefaultPortForProtocol = -1 // any negative value + /** * Creates a [[Source]] of [[IncomingConnection]] instances which represents a prospective HTTP server binding * on the given `endpoint`. + * * If the given port is 0 the resulting source can be materialized several times. Each materialization will * then be assigned a new local port by the operating system, which can then be retrieved by the materialized * [[ServerBinding]]. + * * If the given port is non-zero subsequent materialization attempts of the produced source will immediately * fail, unless the first materialization has already been unbound. Unbinding can be triggered via the materialized * [[ServerBinding]]. * - * If an [[HttpsContext]] is given it will be used for setting up TLS encryption on the binding. + * If an [[ConnectionContext]] is given it will be used for setting up TLS encryption on the binding. * Otherwise the binding will be unencrypted. * * If no `port` is explicitly given (or the port value is negative) the protocol's default port will be used, * which is 80 for HTTP and 443 for HTTPS. * * To configure additional settings for a server started using this method, - * use the `akka.http.server` config section or pass in a [[ServerSettings]] explicitly. + * use the `akka.http.server` config section or pass in a [[akka.http.scaladsl.settings.ServerSettings]] explicitly. */ - def bind(interface: String, port: Int = -1, + def bind(interface: String, port: Int = DefaultPortForProtocol, + connectionContext: ConnectionContext = defaultServerHttpContext, settings: ServerSettings = ServerSettings(system), - httpsContext: Option[HttpsContext] = None, log: LoggingAdapter = system.log)(implicit fm: Materializer): Source[IncomingConnection, Future[ServerBinding]] = { - val effectivePort = if (port >= 0) port else if (httpsContext.isEmpty) 80 else 443 - val tlsStage = sslTlsStage(httpsContext, Server) + val effectivePort = if (port >= 0) port else connectionContext.defaultPort + val tlsStage = sslTlsStage(connectionContext, Server) val connections: Source[Tcp.IncomingConnection, Future[Tcp.ServerBinding]] = Tcp().bind(interface, effectivePort, settings.backlog, settings.socketOptions, halfClose = false, settings.timeouts.idleTimeout) connections.map { @@ -96,9 +101,9 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) exte * use the `akka.http.server` config section or pass in a [[ServerSettings]] explicitly. */ def bindAndHandle(handler: Flow[HttpRequest, HttpResponse, Any], - interface: String, port: Int = -1, + interface: String, port: Int = DefaultPortForProtocol, + connectionContext: ConnectionContext = defaultServerHttpContext, settings: ServerSettings = ServerSettings(system), - httpsContext: Option[HttpsContext] = None, log: LoggingAdapter = system.log)(implicit fm: Materializer): Future[ServerBinding] = { def handleOneConnection(incomingConnection: IncomingConnection): Future[Unit] = try @@ -112,7 +117,7 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) exte throw e } - bind(interface, port, settings, httpsContext, log) + bind(interface, port, connectionContext, settings, log) .mapAsyncUnordered(settings.maxConnections) { connection ⇒ handleOneConnection(connection).recoverWith { // Ignore incoming errors from the connection as they will cancel the binding. @@ -137,11 +142,11 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) exte * use the `akka.http.server` config section or pass in a [[ServerSettings]] explicitly. */ def bindAndHandleSync(handler: HttpRequest ⇒ HttpResponse, - interface: String, port: Int = -1, + interface: String, port: Int = DefaultPortForProtocol, + connectionContext: ConnectionContext = defaultServerHttpContext, settings: ServerSettings = ServerSettings(system), - httpsContext: Option[HttpsContext] = None, log: LoggingAdapter = system.log)(implicit fm: Materializer): Future[ServerBinding] = - bindAndHandle(Flow[HttpRequest].map(handler), interface, port, settings, httpsContext, log) + bindAndHandle(Flow[HttpRequest].map(handler), interface, port, connectionContext, settings, log) /** * Convenience method which starts a new HTTP server at the given endpoint and uses the given `handler` @@ -154,12 +159,12 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) exte * use the `akka.http.server` config section or pass in a [[ServerSettings]] explicitly. */ def bindAndHandleAsync(handler: HttpRequest ⇒ Future[HttpResponse], - interface: String, port: Int = -1, + interface: String, port: Int = DefaultPortForProtocol, + connectionContext: ConnectionContext = defaultServerHttpContext, settings: ServerSettings = ServerSettings(system), - httpsContext: Option[HttpsContext] = None, parallelism: Int = 1, log: LoggingAdapter = system.log)(implicit fm: Materializer): Future[ServerBinding] = - bindAndHandle(Flow[HttpRequest].mapAsync(parallelism)(handler), interface, port, settings, httpsContext, log) + bindAndHandle(Flow[HttpRequest].mapAsync(parallelism)(handler), interface, port, connectionContext, settings, log) type ServerLayer = Http.ServerLayer @@ -194,36 +199,36 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) exte localAddress: Option[InetSocketAddress] = None, settings: ClientConnectionSettings = ClientConnectionSettings(system), log: LoggingAdapter = system.log): Flow[HttpRequest, HttpResponse, Future[OutgoingConnection]] = - _outgoingConnection(host, port, localAddress, settings, None, log) + _outgoingConnection(host, port, localAddress, settings, ConnectionContext.noEncryption(), log) /** * Same as [[outgoingConnection]] but for encrypted (HTTPS) connections. * - * If an explicit [[HttpsContext]] is given then it rather than the configured default [[HttpsContext]] will be used + * If an explicit [[HttpsConnectionContext]] is given then it rather than the configured default [[HttpsConnectionContext]] will be used * for encryption on the connection. * * To configure additional settings for requests made using this method, * use the `akka.http.client` config section or pass in a [[ClientConnectionSettings]] explicitly. */ - def outgoingConnectionTls(host: String, port: Int = 443, - localAddress: Option[InetSocketAddress] = None, - settings: ClientConnectionSettings = ClientConnectionSettings(system), - httpsContext: Option[HttpsContext] = None, - log: LoggingAdapter = system.log): Flow[HttpRequest, HttpResponse, Future[OutgoingConnection]] = - _outgoingConnection(host, port, localAddress, settings, effectiveHttpsContext(httpsContext), log) + def outgoingConnectionHttps(host: String, port: Int = 443, + connectionContext: HttpsConnectionContext = defaultClientHttpsContext, + localAddress: Option[InetSocketAddress] = None, + settings: ClientConnectionSettings = ClientConnectionSettings(system), + log: LoggingAdapter = system.log): Flow[HttpRequest, HttpResponse, Future[OutgoingConnection]] = + _outgoingConnection(host, port, localAddress, settings, connectionContext, log) private def _outgoingConnection(host: String, port: Int, localAddress: Option[InetSocketAddress], - settings: ClientConnectionSettings, httpsContext: Option[HttpsContext], + settings: ClientConnectionSettings, connectionContext: ConnectionContext, log: LoggingAdapter): Flow[HttpRequest, HttpResponse, Future[OutgoingConnection]] = { - val hostHeader = if (port == (if (httpsContext.isEmpty) 80 else 443)) Host(host) else Host(host, port) + val hostHeader = if (port == connectionContext.defaultPort) Host(host) else Host(host, port) val layer = clientLayer(hostHeader, settings, log) - layer.joinMat(_outgoingTlsConnectionLayer(host, port, localAddress, settings, httpsContext, log))(Keep.right) + layer.joinMat(_outgoingTlsConnectionLayer(host, port, localAddress, settings, connectionContext, log))(Keep.right) } private def _outgoingTlsConnectionLayer(host: String, port: Int, localAddress: Option[InetSocketAddress], - settings: ClientConnectionSettings, httpsContext: Option[HttpsContext], + settings: ClientConnectionSettings, connectionContext: ConnectionContext, log: LoggingAdapter): Flow[SslTlsOutbound, SslTlsInbound, Future[OutgoingConnection]] = { - val tlsStage = sslTlsStage(httpsContext, Client, Some(host -> port)) + val tlsStage = sslTlsStage(connectionContext, Client, Some(host -> port)) val transportFlow = Tcp().outgoingConnection(new InetSocketAddress(host, port), localAddress, settings.socketOptions, halfClose = true, settings.connectingTimeout, settings.idleTimeout) @@ -272,28 +277,30 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) exte def newHostConnectionPool[T](host: String, port: Int = 80, settings: ConnectionPoolSettings = ConnectionPoolSettings(system), log: LoggingAdapter = system.log)(implicit fm: Materializer): Flow[(HttpRequest, T), (Try[HttpResponse], T), HostConnectionPool] = { - val cps = ConnectionPoolSetup(settings, None, log) + val cps = ConnectionPoolSetup(settings, ConnectionContext.noEncryption(), log) newHostConnectionPool(HostConnectionPoolSetup(host, port, cps)) } /** * Same as [[newHostConnectionPool]] but for encrypted (HTTPS) connections. * - * If an explicit [[HttpsContext]] is given then it rather than the configured default [[HttpsContext]] will be used + * If an explicit [[ConnectionContext]] is given then it rather than the configured default [[ConnectionContext]] will be used * for encryption on the connections. * * To configure additional settings for the pool (and requests made using it), * use the `akka.http.host-connection-pool` config section or pass in a [[ConnectionPoolSettings]] explicitly. */ - def newHostConnectionPoolTls[T](host: String, port: Int = 443, - settings: ConnectionPoolSettings = ConnectionPoolSettings(system), - httpsContext: Option[HttpsContext] = None, - log: LoggingAdapter = system.log)(implicit fm: Materializer): Flow[(HttpRequest, T), (Try[HttpResponse], T), HostConnectionPool] = { - val cps = ConnectionPoolSetup(settings, effectiveHttpsContext(httpsContext), log) + def newHostConnectionPoolHttps[T](host: String, port: Int = 443, + connectionContext: HttpsConnectionContext = defaultClientHttpsContext, + settings: ConnectionPoolSettings = ConnectionPoolSettings(system), + log: LoggingAdapter = system.log)(implicit fm: Materializer): Flow[(HttpRequest, T), (Try[HttpResponse], T), HostConnectionPool] = { + val cps = ConnectionPoolSetup(settings, connectionContext, log) newHostConnectionPool(HostConnectionPoolSetup(host, port, cps)) } /** + * INTERNAL API + * * Starts a new connection pool to the given host and configuration and returns a [[Flow]] which dispatches * the requests from all its materializations across this pool. * While the started host connection pool internally shuts itself down automatically after the configured idle @@ -307,7 +314,7 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) exte * In order to allow for easy response-to-request association the flow takes in a custom, opaque context * object of type `T` from the application which is emitted together with the corresponding response. */ - def newHostConnectionPool[T](setup: HostConnectionPoolSetup)( + private[akka] def newHostConnectionPool[T](setup: HostConnectionPoolSetup)( implicit fm: Materializer): Flow[(HttpRequest, T), (Try[HttpResponse], T), HostConnectionPool] = { val gatewayFuture = FastFuture.successful(new PoolGateway(setup, Promise())) gatewayClientFlow(setup, gatewayFuture) @@ -336,7 +343,7 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) exte def cachedHostConnectionPool[T](host: String, port: Int = 80, settings: ConnectionPoolSettings = ConnectionPoolSettings(system), log: LoggingAdapter = system.log)(implicit fm: Materializer): Flow[(HttpRequest, T), (Try[HttpResponse], T), HostConnectionPool] = { - val cps = ConnectionPoolSetup(settings, None, log) + val cps = ConnectionPoolSetup(settings, ConnectionContext.noEncryption(), log) val setup = HostConnectionPoolSetup(host, port, cps) cachedHostConnectionPool(setup) } @@ -344,17 +351,17 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) exte /** * Same as [[cachedHostConnectionPool]] but for encrypted (HTTPS) connections. * - * If an explicit [[HttpsContext]] is given then it rather than the configured default [[HttpsContext]] will be used + * If an explicit [[ConnectionContext]] is given then it rather than the configured default [[ConnectionContext]] will be used * for encryption on the connections. * * To configure additional settings for the pool (and requests made using it), * use the `akka.http.host-connection-pool` config section or pass in a [[ConnectionPoolSettings]] explicitly. */ - def cachedHostConnectionPoolTls[T](host: String, port: Int = 443, - settings: ConnectionPoolSettings = ConnectionPoolSettings(system), - httpsContext: Option[HttpsContext] = None, - log: LoggingAdapter = system.log)(implicit fm: Materializer): Flow[(HttpRequest, T), (Try[HttpResponse], T), HostConnectionPool] = { - val cps = ConnectionPoolSetup(settings, effectiveHttpsContext(httpsContext), log) + def cachedHostConnectionPoolHttps[T](host: String, port: Int = 443, + connectionContext: HttpsConnectionContext = defaultClientHttpsContext, + settings: ConnectionPoolSettings = ConnectionPoolSettings(system), + log: LoggingAdapter = system.log)(implicit fm: Materializer): Flow[(HttpRequest, T), (Try[HttpResponse], T), HostConnectionPool] = { + val cps = ConnectionPoolSetup(settings, connectionContext, log) val setup = HostConnectionPoolSetup(host, port, cps) cachedHostConnectionPool(setup) } @@ -376,7 +383,7 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) exte * In order to allow for easy response-to-request association the flow takes in a custom, opaque context * object of type `T` from the application which is emitted together with the corresponding response. */ - def cachedHostConnectionPool[T](setup: HostConnectionPoolSetup)( + private def cachedHostConnectionPool[T](setup: HostConnectionPoolSetup)( implicit fm: Materializer): Flow[(HttpRequest, T), (Try[HttpResponse], T), HostConnectionPool] = gatewayClientFlow(setup, cachedGateway(setup)) @@ -384,7 +391,7 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) exte * Creates a new "super connection pool flow", which routes incoming requests to a (cached) host connection pool * depending on their respective effective URIs. Note that incoming requests must have an absolute URI. * - * If an explicit [[HttpsContext]] is given then it rather than the configured default [[HttpsContext]] will be used + * If an explicit [[ConnectionContext]] is given then it rather than the configured default [[ConnectionContext]] will be used * for setting up HTTPS connection pools, if required. * * Since the underlying transport usually comprises more than a single connection the produced flow might generate @@ -397,80 +404,81 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) exte * To configure additional settings for the pool (and requests made using it), * use the `akka.http.host-connection-pool` config section or pass in a [[ConnectionPoolSettings]] explicitly. */ - def superPool[T](settings: ConnectionPoolSettings = ConnectionPoolSettings(system), - httpsContext: Option[HttpsContext] = None, - log: LoggingAdapter = system.log)(implicit fm: Materializer): Flow[(HttpRequest, T), (Try[HttpResponse], T), Unit] = - clientFlow[T](settings) { request ⇒ request -> cachedGateway(request, settings, httpsContext, log) } + def superPool[T](connectionContext: HttpsConnectionContext = defaultClientHttpsContext, + settings: ConnectionPoolSettings = ConnectionPoolSettings(system), + log: LoggingAdapter = system.log)(implicit fm: Materializer): Flow[(HttpRequest, T), (Try[HttpResponse], T), NotUsed] = + clientFlow[T](settings) { request ⇒ request -> cachedGateway(request, settings, connectionContext, log) } /** * Fires a single [[HttpRequest]] across the (cached) host connection pool for the request's * effective URI to produce a response future. * - * If an explicit [[HttpsContext]] is given then it rather than the configured default [[HttpsContext]] will be used - * for setting up the HTTPS connection pool, if required. + * If an explicit [[ConnectionContext]] is given then it rather than the configured default [[ConnectionContext]] will be used + * for setting up the HTTPS connection pool, if the request is targetted towards an `https` endpoint. * * Note that the request must have an absolute URI, otherwise the future will be completed with an error. */ def singleRequest(request: HttpRequest, + connectionContext: HttpsConnectionContext = defaultClientHttpsContext, settings: ConnectionPoolSettings = ConnectionPoolSettings(system), - httpsContext: Option[HttpsContext] = None, log: LoggingAdapter = system.log)(implicit fm: Materializer): Future[HttpResponse] = try { - val gatewayFuture = cachedGateway(request, settings, httpsContext, log) + val gatewayFuture = cachedGateway(request, settings, connectionContext, log) gatewayFuture.flatMap(_(request))(fm.executionContext) } catch { case e: IllegalUriException ⇒ FastFuture.failed(e) } /** - * Constructs a [[WebsocketClientLayer]] stage using the configured default [[ClientConnectionSettings]], + * Constructs a [[WebSocketClientLayer]] stage using the configured default [[ClientConnectionSettings]], * configured using the `akka.http.client` config section. * * The layer is not reusable and must only be materialized once. */ - def websocketClientLayer(request: WebsocketRequest, + def webSocketClientLayer(request: WebSocketRequest, settings: ClientConnectionSettings = ClientConnectionSettings(system), - log: LoggingAdapter = system.log): Http.WebsocketClientLayer = - WebsocketClientBlueprint(request, settings, log) + log: LoggingAdapter = system.log): Http.WebSocketClientLayer = + WebSocketClientBlueprint(request, settings, log) /** - * Constructs a flow that once materialized establishes a Websocket connection to the given Uri. + * Constructs a flow that once materialized establishes a WebSocket connection to the given Uri. * * The layer is not reusable and must only be materialized once. */ - def websocketClientFlow(request: WebsocketRequest, + def webSocketClientFlow(request: WebSocketRequest, + connectionContext: ConnectionContext = defaultClientHttpsContext, localAddress: Option[InetSocketAddress] = None, settings: ClientConnectionSettings = ClientConnectionSettings(system), - httpsContext: Option[HttpsContext] = None, - log: LoggingAdapter = system.log): Flow[Message, Message, Future[WebsocketUpgradeResponse]] = { + log: LoggingAdapter = system.log): Flow[Message, Message, Future[WebSocketUpgradeResponse]] = { import request.uri - require(uri.isAbsolute, s"Websocket request URI must be absolute but was '$uri'") + require(uri.isAbsolute, s"WebSocket request URI must be absolute but was '$uri'") val ctx = uri.scheme match { - case "ws" ⇒ None - case "wss" ⇒ effectiveHttpsContext(httpsContext) - case scheme @ _ ⇒ - throw new IllegalArgumentException(s"Illegal URI scheme '$scheme' in '$uri' for Websocket request. " + - s"Websocket requests must use either 'ws' or 'wss'") + case "ws" ⇒ ConnectionContext.noEncryption() + case "wss" if connectionContext.isSecure ⇒ connectionContext + case "wss" ⇒ throw new IllegalArgumentException("Provided connectionContext is not secure, yet request to secure `wss` endpoint detected!") + case scheme ⇒ + throw new IllegalArgumentException(s"Illegal URI scheme '$scheme' in '$uri' for WebSocket request. " + + s"WebSocket requests must use either 'ws' or 'wss'") } val host = uri.authority.host.address val port = uri.effectivePort - websocketClientLayer(request, settings, log) + webSocketClientLayer(request, settings, log) .joinMat(_outgoingTlsConnectionLayer(host, port, localAddress, settings, ctx, log))(Keep.left) } /** - * Runs a single Websocket conversation given a Uri and a flow that represents the client side of the - * Websocket conversation. + * Runs a single WebSocket conversation given a Uri and a flow that represents the client side of the + * WebSocket conversation. */ - def singleWebsocketRequest[T](request: WebsocketRequest, + def singleWebSocketRequest[T](request: WebSocketRequest, clientFlow: Flow[Message, Message, T], + connectionContext: ConnectionContext = defaultClientHttpsContext, localAddress: Option[InetSocketAddress] = None, settings: ClientConnectionSettings = ClientConnectionSettings(system), - httpsContext: Option[HttpsContext] = None, - log: LoggingAdapter = system.log)(implicit mat: Materializer): (Future[WebsocketUpgradeResponse], T) = - websocketClientFlow(request, localAddress, settings, httpsContext, log) + log: LoggingAdapter = system.log)(implicit mat: Materializer): (Future[WebSocketUpgradeResponse], T) = + webSocketClientFlow(request, connectionContext, localAddress, settings, log) .joinMat(clientFlow)(Keep.both).run() /** @@ -491,35 +499,55 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) exte } /** - * Gets the current default client-side [[HttpsContext]]. + * Gets the current default server-side [[ConnectionContext]] – defaults to plain HTTP. */ - def defaultClientHttpsContext: HttpsContext = + def defaultServerHttpContext: ConnectionContext = synchronized { - _defaultClientHttpsContext match { + if (_defaultServerConnectionContext == null) + _defaultServerConnectionContext = ConnectionContext.noEncryption() + _defaultServerConnectionContext + } + + /** + * Sets the default server-side [[ConnectionContext]]. + * If it is an instance of [[HttpsConnectionContext]] then the server will be bound using HTTPS. + */ + def setDefaultClientHttpsContext(context: ConnectionContext): Unit = + synchronized { + _defaultServerConnectionContext = context + } + + /** + * Gets the current default client-side [[HttpsConnectionContext]]. + * Defaults used here can be configured using ssl-config or the context can be replaced using [[setDefaultClientHttpsContext]] + */ + def defaultClientHttpsContext: HttpsConnectionContext = + synchronized { + _defaultClientHttpsConnectionContext match { case null ⇒ val ctx = createDefaultClientHttpsContext() - _defaultClientHttpsContext = ctx + _defaultClientHttpsConnectionContext = ctx ctx case ctx ⇒ ctx } } /** - * Sets the default client-side [[HttpsContext]]. + * Sets the default client-side [[HttpsConnectionContext]]. */ - def setDefaultClientHttpsContext(context: HttpsContext): Unit = + def setDefaultClientHttpsContext(context: HttpsConnectionContext): Unit = synchronized { - _defaultClientHttpsContext = context + _defaultClientHttpsConnectionContext = context } // every ActorSystem maintains its own connection pools private[this] val hostPoolCache = new ConcurrentHashMap[HostConnectionPoolSetup, Future[PoolGateway]] private def cachedGateway(request: HttpRequest, - settings: ConnectionPoolSettings, httpsContext: Option[HttpsContext], + settings: ConnectionPoolSettings, connectionContext: ConnectionContext, log: LoggingAdapter)(implicit fm: Materializer): Future[PoolGateway] = if (request.uri.scheme.nonEmpty && request.uri.authority.nonEmpty) { - val httpsCtx = if (request.uri.scheme.equalsIgnoreCase("https")) effectiveHttpsContext(httpsContext) else None + val httpsCtx = if (request.uri.scheme.equalsIgnoreCase("https")) connectionContext else ConnectionContext.noEncryption() val setup = ConnectionPoolSetup(settings, httpsCtx, log) val host = request.uri.authority.host.toString() val hcps = HostConnectionPoolSetup(host, request.uri.effectivePort, setup) @@ -529,6 +557,7 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) exte throw new IllegalUriException(ErrorInfo(msg)) } + /** INTERNAL API */ private[http] def cachedGateway(setup: HostConnectionPoolSetup)(implicit fm: Materializer): Future[PoolGateway] = { val gatewayPromise = Promise[PoolGateway]() hostPoolCache.putIfAbsent(setup, gatewayPromise.future) match { @@ -559,7 +588,7 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) exte .mapMaterializedValue(_ ⇒ HostConnectionPool(hcps)(gatewayFuture)) private def clientFlow[T](settings: ConnectionPoolSettings)(f: HttpRequest ⇒ (HttpRequest, Future[PoolGateway]))( - implicit system: ActorSystem, fm: Materializer): Flow[(HttpRequest, T), (Try[HttpResponse], T), Unit] = { + implicit system: ActorSystem, fm: Materializer): Flow[(HttpRequest, T), (Try[HttpResponse], T), NotUsed] = { // a connection pool can never have more than pipeliningLimit * maxConnections requests in flight at any point val parallelism = settings.pipeliningLimit * settings.maxConnections Flow[(HttpRequest, T)].mapAsyncUnordered(parallelism) { @@ -573,15 +602,11 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) exte } } - private def effectiveHttpsContext(ctx: Option[HttpsContext]): Option[HttpsContext] = - ctx orElse Some(defaultClientHttpsContext) - - private[http] def sslTlsStage(httpsContext: Option[HttpsContext], role: Role, hostInfo: Option[(String, Int)] = None) = - httpsContext match { - case Some(hctx) ⇒ - SslTls(hctx.sslContext, hctx.firstSession, role, hostInfo = hostInfo) - case None ⇒ - SslTlsPlacebo.forScala + /** Creates real or placebo SslTls stage based on if ConnectionContext is HTTPS or not. */ + private[http] def sslTlsStage(connectionContext: ConnectionContext, role: Role, hostInfo: Option[(String, Int)] = None) = + connectionContext match { + case hctx: HttpsConnectionContext ⇒ SslTls(hctx.sslContext, hctx.firstSession, role, hostInfo = hostInfo) + case other ⇒ SslTlsPlacebo.forScala // if it's not HTTPS, we don't enable SSL/TLS } } @@ -600,7 +625,7 @@ object Http extends ExtensionId[HttpExt] with ExtensionIdProvider { * +------+ * }}} */ - type ServerLayer = BidiFlow[HttpResponse, SslTlsOutbound, SslTlsInbound, HttpRequest, Unit] + type ServerLayer = BidiFlow[HttpResponse, SslTlsOutbound, SslTlsInbound, HttpRequest, NotUsed] //# //#client-layer @@ -616,11 +641,11 @@ object Http extends ExtensionId[HttpExt] with ExtensionIdProvider { * +------+ * }}} */ - type ClientLayer = BidiFlow[HttpRequest, SslTlsOutbound, SslTlsInbound, HttpResponse, Unit] + type ClientLayer = BidiFlow[HttpRequest, SslTlsOutbound, SslTlsInbound, HttpResponse, NotUsed] //# /** - * The type of the client-side Websocket layer as a stand-alone BidiFlow + * The type of the client-side WebSocket layer as a stand-alone BidiFlow * that can be put atop the TCP layer to form an HTTP client. * * {{{ @@ -631,7 +656,7 @@ object Http extends ExtensionId[HttpExt] with ExtensionIdProvider { * +------+ * }}} */ - type WebsocketClientLayer = BidiFlow[Message, SslTlsOutbound, SslTlsInbound, Message, Future[WebsocketUpgradeResponse]] + type WebSocketClientLayer = BidiFlow[Message, SslTlsOutbound, SslTlsInbound, Message, Future[WebSocketUpgradeResponse]] /** * Represents a prospective HTTP server binding. @@ -656,7 +681,7 @@ object Http extends ExtensionId[HttpExt] with ExtensionIdProvider { final case class IncomingConnection( localAddress: InetSocketAddress, remoteAddress: InetSocketAddress, - flow: Flow[HttpResponse, HttpRequest, Unit]) { + flow: Flow[HttpResponse, HttpRequest, NotUsed]) { /** * Handles the connection with the given flow, which is materialized exactly once @@ -707,40 +732,18 @@ object Http extends ExtensionId[HttpExt] with ExtensionIdProvider { new HttpExt(system.settings.config getConfig "akka.http")(system) } -import scala.collection.JavaConverters._ - -//# https-context-impl -final case class HttpsContext(sslContext: SSLContext, - enabledCipherSuites: Option[immutable.Seq[String]] = None, - enabledProtocols: Option[immutable.Seq[String]] = None, - clientAuth: Option[ClientAuth] = None, - sslParameters: Option[SSLParameters] = None) - //# - extends akka.http.javadsl.HttpsContext { - def firstSession = NegotiateNewSession(enabledCipherSuites, enabledProtocols, clientAuth, sslParameters) - - /** Java API */ - override def getSslContext: SSLContext = sslContext - - /** Java API */ - override def getEnabledCipherSuites: japi.Option[JCollection[String]] = enabledCipherSuites.map(_.asJavaCollection) - - /** Java API */ - override def getEnabledProtocols: japi.Option[JCollection[String]] = enabledProtocols.map(_.asJavaCollection) - - /** Java API */ - override def getClientAuth: japi.Option[ClientAuth] = clientAuth - - /** Java API */ - override def getSslParameters: japi.Option[SSLParameters] = sslParameters -} - +/** + * TLS configuration for an HTTPS server binding or client connection. + * For the sslContext please refer to the com.typeasfe.ssl-config library. + * The remaining four parameters configure the initial session that will + * be negotiated, see [[akka.stream.io.NegotiateNewSession]] for details. + */ trait DefaultSSLContextCreation { protected def system: ActorSystem protected def sslConfig: AkkaSSLConfig - protected def createDefaultClientHttpsContext(): HttpsContext = { + protected def createDefaultClientHttpsContext(): HttpsConnectionContext = { val config = sslConfig.config val log = Logging(system, getClass) @@ -769,15 +772,18 @@ trait DefaultSSLContextCreation { val cipherSuites = sslConfig.configureCipherSuites(defaultCiphers, config) defaultParams.setCipherSuites(cipherSuites) - // hostname! - if (!Java6Compat.trySetEndpointIdentificationAlgorithm(defaultParams, "https")) { - log.info("Unable to use JDK built-in hostname verification, please consider upgrading your Java runtime to " + - "a more up to date version (JDK7+). Using Typesafe ssl-config hostname verification.") - // enabling the JDK7+ solution did not work, however this is fine since we do handle hostname - // verification directly in SslTlsCipherActor manually by applying an ssl-config provider verifier + // auth! + import com.typesafe.sslconfig.ssl.{ ClientAuth ⇒ SslClientAuth } + val clientAuth = config.sslParametersConfig.clientAuth match { + case SslClientAuth.Default ⇒ None + case SslClientAuth.Want ⇒ Some(ClientAuth.Want) + case SslClientAuth.Need ⇒ Some(ClientAuth.Need) + case SslClientAuth.None ⇒ Some(ClientAuth.None) } + // hostname! + defaultParams.setEndpointIdentificationAlgorithm("https") - HttpsContext(sslContext, sslParameters = Some(defaultParams)) + new HttpsConnectionContext(sslContext, Some(cipherSuites.toList), Some(defaultProtocols.toList), clientAuth, Some(defaultParams)) } -} \ No newline at end of file +} diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/TimeoutAccess.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/TimeoutAccess.scala new file mode 100644 index 0000000000..5ca09604ca --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/TimeoutAccess.scala @@ -0,0 +1,42 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package akka.http.scaladsl + +import scala.concurrent.duration.Duration +import akka.http.scaladsl.model.{ HttpResponse, HttpRequest } + +/** + * Enables programmatic access to the server-side request timeout logic. + */ +trait TimeoutAccess extends akka.http.javadsl.TimeoutAccess { + + /** + * Tries to set a new timeout. + * The timeout period is measured as of the point in time that the end of the request has been received, + * which may be in the past or in the future! + * Use `Duration.Inf` to completely disable request timeout checking for this request. + * + * Due to the inherent raciness it is not guaranteed that the update will be applied before + * the previously set timeout has expired! + */ + def updateTimeout(timeout: Duration): Unit + + /** + * Tries to set a new timeout handler, which produces the timeout response for a + * given request. Note that the handler must produce the response synchronously and shouldn't block! + * + * Due to the inherent raciness it is not guaranteed that the update will be applied before + * the previously set timeout has expired! + */ + def updateHandler(handler: HttpRequest ⇒ HttpResponse): Unit + + /** + * Tries to set a new timeout and handler at the same time. + * + * Due to the inherent raciness it is not guaranteed that the update will be applied before + * the previously set timeout has expired! + */ + def update(timeout: Duration, handler: HttpRequest ⇒ HttpResponse): Unit +} diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/ContentRange.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/ContentRange.scala index 0ac6094b29..f094d366db 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/ContentRange.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/ContentRange.scala @@ -1,21 +1,23 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model +import java.util.{ OptionalLong, Optional } import java.{ lang ⇒ jl } import akka.http.impl.util.{ Rendering, ValueRenderable } import akka.http.javadsl.{ model ⇒ jm } import akka.http.impl.util.JavaMapping.Implicits._ +import scala.compat.java8.OptionConverters._ sealed trait ContentRange extends jm.ContentRange with ValueRenderable { // default implementations to override def isSatisfiable: Boolean = false def isOther: Boolean = false - def getSatisfiableFirst: akka.japi.Option[jl.Long] = akka.japi.Option.none - def getSatisfiableLast: akka.japi.Option[jl.Long] = akka.japi.Option.none - def getOtherValue: akka.japi.Option[String] = akka.japi.Option.none + def getSatisfiableFirst: OptionalLong = OptionalLong.empty() + def getSatisfiableLast: OptionalLong = OptionalLong.empty() + def getOtherValue: Optional[String] = Optional.empty() } sealed trait ByteContentRange extends ContentRange { @@ -24,7 +26,7 @@ sealed trait ByteContentRange extends ContentRange { /** Java API */ def isByteContentRange: Boolean = true /** Java API */ - def getInstanceLength: akka.japi.Option[jl.Long] = instanceLength.asJava + def getInstanceLength: OptionalLong = instanceLength.asPrimitive } // http://tools.ietf.org/html/rfc7233#section-4.2 @@ -48,9 +50,9 @@ object ContentRange { /** Java API */ override def isSatisfiable: Boolean = true /** Java API */ - override def getSatisfiableFirst: akka.japi.Option[jl.Long] = akka.japi.Option.some(first) + override def getSatisfiableFirst: OptionalLong = OptionalLong.of(first) /** Java API */ - override def getSatisfiableLast: akka.japi.Option[jl.Long] = akka.japi.Option.some(last) + override def getSatisfiableLast: OptionalLong = OptionalLong.of(last) } /** @@ -70,8 +72,8 @@ object ContentRange { /** Java API */ def isByteContentRange = false /** Java API */ - def getInstanceLength: akka.japi.Option[jl.Long] = akka.japi.Option.none + def getInstanceLength: OptionalLong = OptionalLong.empty() /** Java API */ - override def getOtherValue: akka.japi.Option[String] = akka.japi.Option.some(value) + override def getOtherValue: Optional[String] = Optional.of(value) } } \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/ContentType.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/ContentType.scala index 752694cd55..6f331a8c9d 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/ContentType.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/ContentType.scala @@ -1,12 +1,12 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model import language.implicitConversions import akka.http.impl.util._ -import akka.japi.{ Option ⇒ JOption } +import java.util.Optional import akka.http.javadsl.{ model ⇒ jm } import akka.http.impl.util.JavaMapping.Implicits._ @@ -49,7 +49,7 @@ sealed trait ContentType extends jm.ContentType with ValueRenderable { private[http] def render[R <: Rendering](r: R): r.type = r ~~ mediaType /** Java API */ - def getCharsetOption: JOption[jm.HttpCharset] = charsetOption.asJava + def getCharsetOption: Optional[jm.HttpCharset] = charsetOption.asJava } object ContentType { diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/DateTime.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/DateTime.scala index 20ddeaf579..c0b9cb9dd2 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/DateTime.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/DateTime.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/ErrorInfo.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/ErrorInfo.scala index aef5eb4917..ec11f152eb 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/ErrorInfo.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/ErrorInfo.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/FormData.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/FormData.scala index a42e6ab24d..aacb54a6b3 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/FormData.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/FormData.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpCharset.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpCharset.scala index 92bc1eda1f..c6fbcad6db 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpCharset.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpCharset.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpEntity.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpEntity.scala index dc89ff58b2..0a47f901ed 100755 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpEntity.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpEntity.scala @@ -1,9 +1,11 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model +import java.util.OptionalLong + import language.implicitConversions import java.io.File import java.lang.{ Iterable ⇒ JIterable, Long ⇒ JLong } @@ -15,16 +17,24 @@ import akka.util.ByteString import akka.stream.scaladsl._ import akka.stream.stage._ import akka.stream._ -import akka.{ japi, stream } +import akka.{ NotUsed, japi, stream } import akka.http.scaladsl.model.ContentType.{ NonBinary, Binary } import akka.http.scaladsl.util.FastFuture import akka.http.javadsl.{ model ⇒ jm } +import akka.http.impl.util.StreamUtils import akka.http.impl.util.JavaMapping.Implicits._ +import scala.compat.java8.OptionConverters._ +import scala.compat.java8.FutureConverters._ +import java.util.concurrent.CompletionStage + /** * Models the entity (aka "body" or "content) of an HTTP message. */ sealed trait HttpEntity extends jm.HttpEntity { + import language.implicitConversions + private implicit def completionStageCovariant[T, U >: T](in: CompletionStage[T]): CompletionStage[U] = in.asInstanceOf[CompletionStage[U]] + /** * Determines whether this entity is known to be empty. */ @@ -82,8 +92,7 @@ sealed trait HttpEntity extends jm.HttpEntity { stream.javadsl.Source.fromGraph(dataBytes.asInstanceOf[Source[ByteString, AnyRef]]) /** Java API */ - override def getContentLengthOption: japi.Option[JLong] = - japi.Option.fromScalaOption(contentLengthOption.asInstanceOf[Option[JLong]]) // Scala autoboxing + override def getContentLengthOption: OptionalLong = contentLengthOption.asPrimitive // default implementations, should be overridden override def isCloseDelimited: Boolean = false @@ -92,8 +101,8 @@ sealed trait HttpEntity extends jm.HttpEntity { override def isChunked: Boolean = false /** Java API */ - override def toStrict(timeoutMillis: Long, materializer: Materializer): Future[jm.HttpEntity.Strict] = - toStrict(timeoutMillis.millis)(materializer) + override def toStrict(timeoutMillis: Long, materializer: Materializer): CompletionStage[jm.HttpEntity.Strict] = + toStrict(timeoutMillis.millis)(materializer).toJava } /* An entity that can be used for body parts */ @@ -204,7 +213,7 @@ object HttpEntity { override def isKnownEmpty: Boolean = data.isEmpty - override def dataBytes: Source[ByteString, Unit] = Source(data :: Nil) + override def dataBytes: Source[ByteString, NotUsed] = Source(data :: Nil) override def toStrict(timeout: FiniteDuration)(implicit fm: Materializer) = FastFuture.successful(this) @@ -500,4 +509,24 @@ object HttpEntity { private object SizeLimit { val Disabled = -1 // any negative value will do } + + /** + * INTERNAL API + */ + private[http] def captureTermination[T <: HttpEntity](entity: T): (T, Future[Unit]) = + entity match { + case x: HttpEntity.Strict ⇒ x.asInstanceOf[T] -> FastFuture.successful(()) + case x: HttpEntity.Default ⇒ + val (newData, whenCompleted) = StreamUtils.captureTermination(x.data) + x.copy(data = newData).asInstanceOf[T] -> whenCompleted + case x: HttpEntity.Chunked ⇒ + val (newChunks, whenCompleted) = StreamUtils.captureTermination(x.chunks) + x.copy(chunks = newChunks).asInstanceOf[T] -> whenCompleted + case x: HttpEntity.CloseDelimited ⇒ + val (newData, whenCompleted) = StreamUtils.captureTermination(x.data) + x.copy(data = newData).asInstanceOf[T] -> whenCompleted + case x: HttpEntity.IndefiniteLength ⇒ + val (newData, whenCompleted) = StreamUtils.captureTermination(x.data) + x.copy(data = newData).asInstanceOf[T] -> whenCompleted + } } diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpHeader.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpHeader.scala index d33e52594f..0a8bfe38ad 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpHeader.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpHeader.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMessage.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMessage.scala index 9a94586ddd..b6a86a87bd 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMessage.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMessage.scala @@ -1,10 +1,11 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model import java.lang.{ Iterable ⇒ JIterable } +import java.util.Optional import scala.concurrent.duration.FiniteDuration import scala.concurrent.{ Future, ExecutionContext } @@ -18,6 +19,8 @@ import akka.http.javadsl.{ model ⇒ jm } import akka.http.scaladsl.util.FastFuture._ import headers._ +import scala.compat.java8.OptionConverters._ + /** * Common base class of HttpRequest and HttpResponse. */ @@ -110,11 +113,11 @@ sealed trait HttpMessage extends jm.HttpMessage { /** Java API */ def getHeaders: JIterable[jm.HttpHeader] = (headers: immutable.Seq[jm.HttpHeader]).asJava /** Java API */ - def getHeader[T <: jm.HttpHeader](headerClass: Class[T]): akka.japi.Option[T] = header(ClassTag(headerClass)) + def getHeader[T <: jm.HttpHeader](headerClass: Class[T]): Optional[T] = header(ClassTag(headerClass)).asJava /** Java API */ - def getHeader(headerName: String): akka.japi.Option[jm.HttpHeader] = { + def getHeader(headerName: String): Optional[jm.HttpHeader] = { val lowerCased = headerName.toRootLowerCase - headers.find(_.is(lowerCased)) + Util.convertOption(headers.find(_.is(lowerCased))) // Upcast because of invariance } /** Java API */ def addHeaders(headers: JIterable[jm.HttpHeader]): Self = mapHeaders(_ ++ headers.asScala.asInstanceOf[Iterable[HttpHeader]]) diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMethod.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMethod.scala index 786de0c1cd..d1e00b2968 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMethod.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMethod.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model @@ -33,8 +33,6 @@ final case class HttpMethod private[http] (override val value: String, isSafe: Boolean, isIdempotent: Boolean, requestEntityAcceptance: RequestEntityAcceptance) extends jm.HttpMethod with SingletonValueRenderable { - def name = value - override def isEntityAccepted: Boolean = requestEntityAcceptance.isEntityAccepted override def toString: String = s"HttpMethod($value)" } diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpProtocol.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpProtocol.scala index 748a794632..1451a3e090 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpProtocol.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpProtocol.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/MediaRange.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/MediaRange.scala index a3dade03e6..e58a7a1933 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/MediaRange.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/MediaRange.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/MediaType.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/MediaType.scala index 6c5cbccbe4..7728f5e5ce 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/MediaType.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/MediaType.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/Multipart.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/Multipart.scala index e050999a88..78416f5dda 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/Multipart.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/Multipart.scala @@ -1,17 +1,18 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model import java.io.File +import java.util.Optional +import akka.http.impl.util.Util import scala.collection.immutable.VectorBuilder import scala.concurrent.duration.FiniteDuration import scala.concurrent.Future import scala.collection.immutable import scala.collection.JavaConverters._ import scala.util.{ Failure, Success, Try } -import akka.japi.{ Option ⇒ JOption } import akka.event.{ NoLogging, LoggingAdapter } import akka.stream.impl.ConstantFun import akka.stream.Materializer @@ -22,6 +23,9 @@ import akka.http.scaladsl.model.headers._ import akka.http.impl.engine.rendering.BodyPartRenderer import akka.http.javadsl.{ model ⇒ jm } import FastFuture._ +import scala.compat.java8.OptionConverters._ +import scala.compat.java8.FutureConverters._ +import java.util.concurrent.CompletionStage /** * The model of multipart content for media-types `multipart/\*` (general multipart content), @@ -70,8 +74,8 @@ sealed trait Multipart extends jm.Multipart { JSource.fromGraph(parts.asInstanceOf[Source[Multipart.BodyPart, AnyRef]]) /** Java API */ - def toStrict(timeoutMillis: Long, materializer: Materializer): Future[_ <: jm.Multipart.Strict] = - toStrict(FiniteDuration(timeoutMillis, concurrent.duration.MILLISECONDS))(materializer) + def toStrict(timeoutMillis: Long, materializer: Materializer): CompletionStage[_ <: jm.Multipart.Strict] = + toStrict(FiniteDuration(timeoutMillis, concurrent.duration.MILLISECONDS))(materializer).toJava /** Java API */ def toEntity(charset: jm.HttpCharset, boundary: String): jm.RequestEntity = @@ -157,18 +161,17 @@ object Multipart { def getHeaders: java.lang.Iterable[jm.HttpHeader] = (headers: immutable.Seq[jm.HttpHeader]).asJava /** Java API */ - def getContentDispositionHeader: JOption[jm.headers.ContentDisposition] = - JOption.fromScalaOption(contentDispositionHeader) + def getContentDispositionHeader: Optional[jm.headers.ContentDisposition] = Util.convertOption(contentDispositionHeader) /** Java API */ def getDispositionParams: java.util.Map[String, String] = dispositionParams.asJava /** Java API */ - def getDispositionType: JOption[jm.headers.ContentDispositionType] = JOption.fromScalaOption(dispositionType) + def getDispositionType: Optional[jm.headers.ContentDispositionType] = Util.convertOption(dispositionType) /** Java API */ - def toStrict(timeoutMillis: Long, materializer: Materializer): Future[_ <: jm.Multipart.BodyPart.Strict] = - toStrict(FiniteDuration(timeoutMillis, concurrent.duration.MILLISECONDS))(materializer) + def toStrict(timeoutMillis: Long, materializer: Materializer): CompletionStage[_ <: jm.Multipart.BodyPart.Strict] = + toStrict(FiniteDuration(timeoutMillis, concurrent.duration.MILLISECONDS))(materializer).toJava } object BodyPart { @@ -208,8 +211,8 @@ object Multipart { super.getParts.asInstanceOf[JSource[_ <: jm.Multipart.General.BodyPart, AnyRef]] /** Java API */ - override def toStrict(timeoutMillis: Long, materializer: Materializer): Future[jm.Multipart.General.Strict] = - super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.General.Strict]] + override def toStrict(timeoutMillis: Long, materializer: Materializer): CompletionStage[jm.Multipart.General.Strict] = + super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.General.Strict]].toJava } object General { def apply(mediaType: MediaType.Multipart, parts: BodyPart.Strict*): Strict = Strict(mediaType, parts.toVector) @@ -254,8 +257,8 @@ object Multipart { def toByteRangesBodyPart: Try[Multipart.ByteRanges.BodyPart] /** Java API */ - override def toStrict(timeoutMillis: Long, materializer: Materializer): Future[jm.Multipart.General.BodyPart.Strict] = - super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.General.BodyPart.Strict]] + override def toStrict(timeoutMillis: Long, materializer: Materializer): CompletionStage[jm.Multipart.General.BodyPart.Strict] = + super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.General.BodyPart.Strict]].toJava private[BodyPart] def tryCreateFormDataBodyPart[T](f: (String, Map[String, String], immutable.Seq[HttpHeader]) ⇒ T): Try[T] = { val params = dispositionParams @@ -319,8 +322,8 @@ object Multipart { super.getParts.asInstanceOf[JSource[_ <: jm.Multipart.FormData.BodyPart, AnyRef]] /** Java API */ - override def toStrict(timeoutMillis: Long, materializer: Materializer): Future[jm.Multipart.FormData.Strict] = - super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.FormData.Strict]] + override def toStrict(timeoutMillis: Long, materializer: Materializer): CompletionStage[jm.Multipart.FormData.Strict] = + super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.FormData.Strict]].toJava } object FormData { def apply(parts: Multipart.FormData.BodyPart.Strict*): Multipart.FormData.Strict = Strict(parts.toVector) @@ -409,11 +412,11 @@ object Multipart { (additionalHeaders: immutable.Seq[jm.HttpHeader]).asJava /** Java API */ - def getFilename: JOption[String] = JOption.fromScalaOption(filename) + def getFilename: Optional[String] = filename.asJava /** Java API */ - override def toStrict(timeoutMillis: Long, materializer: Materializer): Future[jm.Multipart.FormData.BodyPart.Strict] = - super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.FormData.BodyPart.Strict]] + override def toStrict(timeoutMillis: Long, materializer: Materializer): CompletionStage[jm.Multipart.FormData.BodyPart.Strict] = + super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.FormData.BodyPart.Strict]].toJava } object BodyPart { def apply(_name: String, _entity: BodyPartEntity, @@ -467,8 +470,8 @@ object Multipart { super.getParts.asInstanceOf[JSource[_ <: jm.Multipart.ByteRanges.BodyPart, AnyRef]] /** Java API */ - override def toStrict(timeoutMillis: Long, materializer: Materializer): Future[jm.Multipart.ByteRanges.Strict] = - super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.ByteRanges.Strict]] + override def toStrict(timeoutMillis: Long, materializer: Materializer): CompletionStage[jm.Multipart.ByteRanges.Strict] = + super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.ByteRanges.Strict]].toJava } object ByteRanges { def apply(parts: Multipart.ByteRanges.BodyPart.Strict*): Strict = Strict(parts.toVector) @@ -542,8 +545,8 @@ object Multipart { def getContentRangeHeader: jm.headers.ContentRange = contentRangeHeader /** Java API */ - override def toStrict(timeoutMillis: Long, materializer: Materializer): Future[jm.Multipart.ByteRanges.BodyPart.Strict] = - super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.ByteRanges.BodyPart.Strict]] + override def toStrict(timeoutMillis: Long, materializer: Materializer): CompletionStage[jm.Multipart.ByteRanges.BodyPart.Strict] = + super.toStrict(timeoutMillis, materializer).asInstanceOf[Future[jm.Multipart.ByteRanges.BodyPart.Strict]].toJava } object BodyPart { def apply(_contentRange: ContentRange, _entity: BodyPartEntity, _rangeUnit: RangeUnit = RangeUnits.Bytes, diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/RemoteAddress.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/RemoteAddress.scala index 2d5762bcc7..0fa2605dc2 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/RemoteAddress.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/RemoteAddress.scala @@ -1,10 +1,11 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model import java.net.{ InetSocketAddress, UnknownHostException, InetAddress } +import java.util.Optional import akka.http.impl.util._ import akka.http.javadsl.{ model ⇒ jm } import akka.http.impl.util.JavaMapping.Implicits._ @@ -15,7 +16,7 @@ sealed abstract class RemoteAddress extends jm.RemoteAddress with ValueRenderabl def isUnknown: Boolean /** Java API */ - def getAddress: akka.japi.Option[InetAddress] = toOption.asJava + def getAddress: Optional[InetAddress] = toOption.asJava /** Java API */ def getPort: Int = toIP.flatMap(_.port).getOrElse(0) diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/StatusCode.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/StatusCode.scala index e3bf031ff8..e525e6cec0 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/StatusCode.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/StatusCode.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/TransferEncoding.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/TransferEncoding.scala index 21a306e231..e8250164ce 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/TransferEncoding.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/TransferEncoding.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/Uri.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/Uri.scala index 9ee95ea011..a1cfdd67da 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/Uri.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/Uri.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model @@ -43,11 +43,13 @@ sealed abstract case class Uri(scheme: String, authority: Authority, path: Path, def queryString(charset: Charset = UTF8): Option[String] = rawQueryString.map(s ⇒ decode(s, charset)) /** + * INTERNAL API + * * The effective port of this Uri given the currently set authority and scheme values. * If the authority has an explicitly set port (i.e. a non-zero port value) then this port * is the effective port. Otherwise the default port for the current scheme is returned. */ - def effectivePort: Int = if (authority.port != 0) authority.port else defaultPorts(scheme) + private[akka] def effectivePort: Int = if (authority.port != 0) authority.port else defaultPorts(scheme) /** * Returns a copy of this Uri with the given components. @@ -592,13 +594,12 @@ object Uri { } } - val defaultPorts: Map[String, Int] = + private val defaultPorts: Map[String, Int] = Map("ftp" -> 21, "ssh" -> 22, "telnet" -> 23, "smtp" -> 25, "domain" -> 53, "tftp" -> 69, "http" -> 80, "ws" -> 80, "pop3" -> 110, "nntp" -> 119, "imap" -> 143, "snmp" -> 161, "ldap" -> 389, "https" -> 443, "wss" -> 443, "imaps" -> 993, "nfs" -> 2049).withDefaultValue(-1) - sealed trait ParsingMode - + sealed trait ParsingMode extends akka.http.javadsl.model.Uri.ParsingMode object ParsingMode { case object Strict extends ParsingMode case object Relaxed extends ParsingMode diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/WithQValue.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/WithQValue.scala index 45b11a7883..9e14e8d323 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/WithQValue.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/WithQValue.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/ByteRange.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/ByteRange.scala index 365ebd2df9..5d607b11b5 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/ByteRange.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/ByteRange.scala @@ -1,23 +1,23 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.headers -import java.{ lang ⇒ jl } -import akka.japi.{ Option ⇒ JOption } +import java.util.OptionalLong + import akka.http.impl.util.{ Rendering, ValueRenderable } import akka.http.javadsl.{ model ⇒ jm } sealed abstract class ByteRange extends jm.headers.ByteRange with ValueRenderable { /** Java API */ - def getSliceFirst: JOption[jl.Long] = JOption.none + def getSliceFirst: OptionalLong = OptionalLong.empty /** Java API */ - def getSliceLast: JOption[jl.Long] = JOption.none + def getSliceLast: OptionalLong = OptionalLong.empty /** Java API */ - def getOffset: JOption[jl.Long] = JOption.none + def getOffset: OptionalLong = OptionalLong.empty /** Java API */ - def getSuffixLength: JOption[jl.Long] = JOption.none + def getSuffixLength: OptionalLong = OptionalLong.empty /** Java API */ def isSlice: Boolean = false @@ -41,9 +41,9 @@ object ByteRange { /** Java API */ override def isSlice: Boolean = true /** Java API */ - override def getSliceFirst: JOption[jl.Long] = JOption.some(first) + override def getSliceFirst: OptionalLong = OptionalLong.of(first) /** Java API */ - override def getSliceLast: JOption[jl.Long] = JOption.some(last) + override def getSliceLast: OptionalLong = OptionalLong.of(last) } final case class FromOffset(offset: Long) extends ByteRange { @@ -53,7 +53,7 @@ object ByteRange { /** Java API */ override def isFromOffset: Boolean = true /** Java API */ - override def getOffset: JOption[jl.Long] = JOption.some(offset) + override def getOffset: OptionalLong = OptionalLong.of(offset) } final case class Suffix(length: Long) extends ByteRange { @@ -63,6 +63,6 @@ object ByteRange { /** Java API */ override def isSuffix: Boolean = true /** Java API */ - override def getSuffixLength: JOption[jl.Long] = JOption.some(length) + override def getSuffixLength: OptionalLong = OptionalLong.of(length) } } \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/CacheDirective.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/CacheDirective.scala index 29f73b1b1c..0977a5f8a8 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/CacheDirective.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/CacheDirective.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.headers diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/ContentDispositionType.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/ContentDispositionType.scala index ce31a29755..63f0504876 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/ContentDispositionType.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/ContentDispositionType.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.headers diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/EntityTag.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/EntityTag.scala index 16079604f7..cf0a1a42c1 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/EntityTag.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/EntityTag.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.headers diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/HttpChallenge.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/HttpChallenge.scala index e918b472c1..cccd5d3b8e 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/HttpChallenge.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/HttpChallenge.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.headers diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/HttpCookie.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/HttpCookie.scala index c319aff900..368acc5b52 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/HttpCookie.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/HttpCookie.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.headers @@ -7,11 +7,12 @@ package akka.http.scaladsl.model.headers import akka.http.impl.model.parser.CharacterClasses import akka.http.javadsl.model.headers import akka.parboiled2.CharPredicate -import akka.japi.{ Option ⇒ JOption } +import java.util.{ Optional, OptionalLong } import akka.http.scaladsl.model.DateTime import akka.http.impl.util._ import akka.http.javadsl.{ model ⇒ jm } import akka.http.impl.util.JavaMapping.Implicits._ +import scala.compat.java8.OptionConverters._ // see http://tools.ietf.org/html/rfc6265 // sealed abstract to prevent generation of default apply method in companion @@ -59,7 +60,7 @@ final case class HttpCookie( httpOnly: Boolean = false, extension: Option[String] = None) extends jm.headers.HttpCookie with ToStringRenderable { - /** Returns the name/value pair for this cookie, to be used in [[Cookiie]] headers. */ + /** Returns the name/value pair for this cookie, to be used in [[Cookie]] headers. */ def pair: HttpCookiePair = HttpCookiePair(name, value) // TODO: suppress running these requires for cookies created from our header parser @@ -84,15 +85,15 @@ final case class HttpCookie( } /** Java API */ - def getExtension: JOption[String] = extension.asJava + def getExtension: Optional[String] = extension.asJava /** Java API */ - def getPath: JOption[String] = path.asJava + def getPath: Optional[String] = path.asJava /** Java API */ - def getDomain: JOption[String] = domain.asJava + def getDomain: Optional[String] = domain.asJava /** Java API */ - def getMaxAge: JOption[java.lang.Long] = maxAge.asJava + def getMaxAge: OptionalLong = maxAge.asPrimitive /** Java API */ - def getExpires: JOption[jm.DateTime] = expires.asJava + def getExpires: Optional[jm.DateTime] = expires.map(_.asJava).asJava /** Java API */ def withExpires(dateTime: jm.DateTime): headers.HttpCookie = copy(expires = Some(dateTime.asScala)) /** Java API */ diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/HttpCredentials.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/HttpCredentials.scala index 28b1e6e0ef..5e9146f1a1 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/HttpCredentials.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/HttpCredentials.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.headers diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/HttpEncoding.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/HttpEncoding.scala index 2296e92069..ded463dba1 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/HttpEncoding.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/HttpEncoding.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.headers diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/HttpOrigin.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/HttpOrigin.scala index 0f1c9cbe8a..eb943c7faa 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/HttpOrigin.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/HttpOrigin.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.headers diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/LanguageRange.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/LanguageRange.scala index 5826b52158..0e856be181 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/LanguageRange.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/LanguageRange.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.headers diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/LinkValue.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/LinkValue.scala index 48650db9fd..d55233da95 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/LinkValue.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/LinkValue.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.headers diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/ProductVersion.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/ProductVersion.scala index e1b6becd01..8da29c0ee3 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/ProductVersion.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/ProductVersion.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.headers diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/RangeUnit.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/RangeUnit.scala index c7297be281..aebf50bfc8 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/RangeUnit.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/RangeUnit.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.headers diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/UpgradeProtocol.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/UpgradeProtocol.scala index b9c3dbb73c..aa9eebcd86 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/UpgradeProtocol.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/UpgradeProtocol.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.headers diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/WebsocketExtension.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/WebSocketExtension.scala similarity index 80% rename from akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/WebsocketExtension.scala rename to akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/WebSocketExtension.scala index f11f0bbaba..d14f61ad8f 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/WebsocketExtension.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/WebSocketExtension.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.headers @@ -10,7 +10,7 @@ import akka.http.impl.util.{ Rendering, ValueRenderable } /** * A websocket extension as defined in http://tools.ietf.org/html/rfc6455#section-4.3 */ -final case class WebsocketExtension(name: String, params: immutable.Map[String, String] = Map.empty) extends ValueRenderable { +final case class WebSocketExtension(name: String, params: immutable.Map[String, String] = Map.empty) extends ValueRenderable { def render[R <: Rendering](r: R): r.type = { r ~~ name if (params.nonEmpty) diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/headers.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/headers.scala index 7df3a213c6..c737083f35 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/headers.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/headers/headers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.headers @@ -9,17 +9,12 @@ import java.net.InetSocketAddress import java.security.MessageDigest import java.util import javax.net.ssl.SSLSession - -import akka.event.Logging -import akka.stream.io.ScalaSessionAPI - import scala.reflect.ClassTag import scala.util.{ Failure, Success, Try } import scala.annotation.tailrec import scala.collection.immutable - import akka.parboiled2.util.Base64 - +import akka.stream.io.ScalaSessionAPI import akka.http.impl.util._ import akka.http.javadsl.{ model ⇒ jm } import akka.http.scaladsl.model._ @@ -42,6 +37,8 @@ sealed abstract class ModeledCompanion[T: ClassTag] extends Renderable { } sealed trait ModeledHeader extends HttpHeader with Serializable { + def renderInRequests: Boolean = false // default implementation + def renderInResponses: Boolean = false // default implementation def name: String = companion.name def value: String = renderValue(new StringRendering).get def lowercaseName: String = companion.lowercaseName @@ -50,6 +47,11 @@ sealed trait ModeledHeader extends HttpHeader with Serializable { protected def companion: ModeledCompanion[_] } +private[headers] sealed trait RequestHeader extends ModeledHeader { override def renderInRequests = true } +private[headers] sealed trait ResponseHeader extends ModeledHeader { override def renderInResponses = true } +private[headers] sealed trait RequestResponseHeader extends RequestHeader with ResponseHeader +private[headers] sealed trait SyntheticHeader extends ModeledHeader + /** * Superclass for user-defined custom headers defined by implementing `name` and `value`. * @@ -58,9 +60,6 @@ sealed trait ModeledHeader extends HttpHeader with Serializable { * as they allow the custom header to be matched from [[RawHeader]] and vice-versa. */ abstract class CustomHeader extends jm.headers.CustomHeader { - /** Override to return true if this header shouldn't be rendered */ - def suppressRendering: Boolean = false - def lowercaseName: String = name.toRootLowerCase final def render[R <: Rendering](r: R): r.type = r ~~ name ~~ ':' ~~ ' ' ~~ value } @@ -99,17 +98,243 @@ abstract class ModeledCustomHeader[H <: ModeledCustomHeader[H]] extends CustomHe def companion: ModeledCustomHeaderCompanion[H] final override def name = companion.name - final override def lowercaseName: String = name.toRootLowerCase + final override def lowercaseName = name.toRootLowerCase } import akka.http.impl.util.JavaMapping.Implicits._ +// http://tools.ietf.org/html/rfc7231#section-5.3.2 +object Accept extends ModeledCompanion[Accept] { + def apply(mediaRanges: MediaRange*): Accept = apply(immutable.Seq(mediaRanges: _*)) + implicit val mediaRangesRenderer = Renderer.defaultSeqRenderer[MediaRange] // cache +} +final case class Accept(mediaRanges: immutable.Seq[MediaRange]) extends jm.headers.Accept with RequestHeader { + import Accept.mediaRangesRenderer + def renderValue[R <: Rendering](r: R): r.type = r ~~ mediaRanges + protected def companion = Accept + def acceptsAll = mediaRanges.exists(mr ⇒ mr.isWildcard && mr.qValue > 0f) + + /** Java API */ + def getMediaRanges: Iterable[jm.MediaRange] = mediaRanges.asJava +} + +// http://tools.ietf.org/html/rfc7231#section-5.3.3 +object `Accept-Charset` extends ModeledCompanion[`Accept-Charset`] { + def apply(first: HttpCharsetRange, more: HttpCharsetRange*): `Accept-Charset` = apply(immutable.Seq(first +: more: _*)) + implicit val charsetRangesRenderer = Renderer.defaultSeqRenderer[HttpCharsetRange] // cache +} +final case class `Accept-Charset`(charsetRanges: immutable.Seq[HttpCharsetRange]) extends jm.headers.AcceptCharset + with RequestHeader { + require(charsetRanges.nonEmpty, "charsetRanges must not be empty") + import `Accept-Charset`.charsetRangesRenderer + def renderValue[R <: Rendering](r: R): r.type = r ~~ charsetRanges + protected def companion = `Accept-Charset` + + /** Java API */ + def getCharsetRanges: Iterable[jm.HttpCharsetRange] = charsetRanges.asJava +} + +// http://tools.ietf.org/html/rfc7231#section-5.3.4 +object `Accept-Encoding` extends ModeledCompanion[`Accept-Encoding`] { + def apply(encodings: HttpEncodingRange*): `Accept-Encoding` = apply(immutable.Seq(encodings: _*)) + implicit val encodingsRenderer = Renderer.defaultSeqRenderer[HttpEncodingRange] // cache +} +final case class `Accept-Encoding`(encodings: immutable.Seq[HttpEncodingRange]) extends jm.headers.AcceptEncoding + with RequestHeader { + import `Accept-Encoding`.encodingsRenderer + def renderValue[R <: Rendering](r: R): r.type = r ~~ encodings + protected def companion = `Accept-Encoding` + + /** Java API */ + def getEncodings: Iterable[jm.headers.HttpEncodingRange] = encodings.asJava +} + +// http://tools.ietf.org/html/rfc7231#section-5.3.5 +object `Accept-Language` extends ModeledCompanion[`Accept-Language`] { + def apply(first: LanguageRange, more: LanguageRange*): `Accept-Language` = apply(immutable.Seq(first +: more: _*)) + implicit val languagesRenderer = Renderer.defaultSeqRenderer[LanguageRange] // cache +} +final case class `Accept-Language`(languages: immutable.Seq[LanguageRange]) extends jm.headers.AcceptLanguage + with RequestHeader { + require(languages.nonEmpty, "languages must not be empty") + import `Accept-Language`.languagesRenderer + def renderValue[R <: Rendering](r: R): r.type = r ~~ languages + protected def companion = `Accept-Language` + + /** Java API */ + def getLanguages: Iterable[jm.headers.LanguageRange] = languages.asJava +} + +// http://tools.ietf.org/html/rfc7233#section-2.3 +object `Accept-Ranges` extends ModeledCompanion[`Accept-Ranges`] { + def apply(rangeUnits: RangeUnit*): `Accept-Ranges` = apply(immutable.Seq(rangeUnits: _*)) + implicit val rangeUnitsRenderer = Renderer.defaultSeqRenderer[RangeUnit] // cache +} +final case class `Accept-Ranges`(rangeUnits: immutable.Seq[RangeUnit]) extends jm.headers.AcceptRanges + with RequestHeader { + import `Accept-Ranges`.rangeUnitsRenderer + def renderValue[R <: Rendering](r: R): r.type = if (rangeUnits.isEmpty) r ~~ "none" else r ~~ rangeUnits + protected def companion = `Accept-Ranges` + + /** Java API */ + def getRangeUnits: Iterable[jm.headers.RangeUnit] = rangeUnits.asJava +} + +// http://www.w3.org/TR/cors/#access-control-allow-credentials-response-header +object `Access-Control-Allow-Credentials` extends ModeledCompanion[`Access-Control-Allow-Credentials`] +final case class `Access-Control-Allow-Credentials`(allow: Boolean) + extends jm.headers.AccessControlAllowCredentials with ResponseHeader { + def renderValue[R <: Rendering](r: R): r.type = r ~~ allow.toString + protected def companion = `Access-Control-Allow-Credentials` +} + +// http://www.w3.org/TR/cors/#access-control-allow-headers-response-header +object `Access-Control-Allow-Headers` extends ModeledCompanion[`Access-Control-Allow-Headers`] { + def apply(headers: String*): `Access-Control-Allow-Headers` = apply(immutable.Seq(headers: _*)) + implicit val headersRenderer = Renderer.defaultSeqRenderer[String] // cache +} +final case class `Access-Control-Allow-Headers`(headers: immutable.Seq[String]) + extends jm.headers.AccessControlAllowHeaders with ResponseHeader { + import `Access-Control-Allow-Headers`.headersRenderer + def renderValue[R <: Rendering](r: R): r.type = r ~~ headers + protected def companion = `Access-Control-Allow-Headers` + + /** Java API */ + def getHeaders: Iterable[String] = headers.asJava +} + +// http://www.w3.org/TR/cors/#access-control-allow-methods-response-header +object `Access-Control-Allow-Methods` extends ModeledCompanion[`Access-Control-Allow-Methods`] { + def apply(methods: HttpMethod*): `Access-Control-Allow-Methods` = apply(immutable.Seq(methods: _*)) + implicit val methodsRenderer = Renderer.defaultSeqRenderer[HttpMethod] // cache +} +final case class `Access-Control-Allow-Methods`(methods: immutable.Seq[HttpMethod]) + extends jm.headers.AccessControlAllowMethods with ResponseHeader { + import `Access-Control-Allow-Methods`.methodsRenderer + def renderValue[R <: Rendering](r: R): r.type = r ~~ methods + protected def companion = `Access-Control-Allow-Methods` + + /** Java API */ + def getMethods: Iterable[jm.HttpMethod] = methods.asJava +} + +// http://www.w3.org/TR/cors/#access-control-allow-origin-response-header +object `Access-Control-Allow-Origin` extends ModeledCompanion[`Access-Control-Allow-Origin`] { + val `*` = forRange(HttpOriginRange.`*`) + val `null` = forRange(HttpOriginRange()) + def apply(origin: HttpOrigin) = forRange(HttpOriginRange(origin)) + + /** + * Creates an `Access-Control-Allow-Origin` header for the given origin range. + * + * CAUTION: Even though allowed by the spec (http://www.w3.org/TR/cors/#access-control-allow-origin-response-header) + * `Access-Control-Allow-Origin` headers with more than a single origin appear to be largely unsupported in the field. + * Make sure to thoroughly test such usages with all expected clients! + */ + def forRange(range: HttpOriginRange) = new `Access-Control-Allow-Origin`(range) +} +final case class `Access-Control-Allow-Origin` private (range: HttpOriginRange) + extends jm.headers.AccessControlAllowOrigin with ResponseHeader { + def renderValue[R <: Rendering](r: R): r.type = r ~~ range + protected def companion = `Access-Control-Allow-Origin` +} + +// http://www.w3.org/TR/cors/#access-control-expose-headers-response-header +object `Access-Control-Expose-Headers` extends ModeledCompanion[`Access-Control-Expose-Headers`] { + def apply(headers: String*): `Access-Control-Expose-Headers` = apply(immutable.Seq(headers: _*)) + implicit val headersRenderer = Renderer.defaultSeqRenderer[String] // cache +} +final case class `Access-Control-Expose-Headers`(headers: immutable.Seq[String]) + extends jm.headers.AccessControlExposeHeaders with ResponseHeader { + import `Access-Control-Expose-Headers`.headersRenderer + def renderValue[R <: Rendering](r: R): r.type = r ~~ headers + protected def companion = `Access-Control-Expose-Headers` + + /** Java API */ + def getHeaders: Iterable[String] = headers.asJava +} + +// http://www.w3.org/TR/cors/#access-control-max-age-response-header +object `Access-Control-Max-Age` extends ModeledCompanion[`Access-Control-Max-Age`] +final case class `Access-Control-Max-Age`(deltaSeconds: Long) extends jm.headers.AccessControlMaxAge + with ResponseHeader { + def renderValue[R <: Rendering](r: R): r.type = r ~~ deltaSeconds + protected def companion = `Access-Control-Max-Age` +} + +// http://www.w3.org/TR/cors/#access-control-request-headers-request-header +object `Access-Control-Request-Headers` extends ModeledCompanion[`Access-Control-Request-Headers`] { + def apply(headers: String*): `Access-Control-Request-Headers` = apply(immutable.Seq(headers: _*)) + implicit val headersRenderer = Renderer.defaultSeqRenderer[String] // cache +} +final case class `Access-Control-Request-Headers`(headers: immutable.Seq[String]) + extends jm.headers.AccessControlRequestHeaders with RequestHeader { + import `Access-Control-Request-Headers`.headersRenderer + def renderValue[R <: Rendering](r: R): r.type = r ~~ headers + protected def companion = `Access-Control-Request-Headers` + + /** Java API */ + def getHeaders: Iterable[String] = headers.asJava +} + +// http://www.w3.org/TR/cors/#access-control-request-method-request-header +object `Access-Control-Request-Method` extends ModeledCompanion[`Access-Control-Request-Method`] +final case class `Access-Control-Request-Method`(method: HttpMethod) extends jm.headers.AccessControlRequestMethod + with RequestHeader { + def renderValue[R <: Rendering](r: R): r.type = r ~~ method + protected def companion = `Access-Control-Request-Method` +} + +// http://tools.ietf.org/html/rfc7234#section-5.1 +object Age extends ModeledCompanion[Age] +final case class Age(deltaSeconds: Long) extends jm.headers.Age with ResponseHeader { + def renderValue[R <: Rendering](r: R): r.type = r ~~ deltaSeconds + protected def companion = Age +} + +// http://tools.ietf.org/html/rfc7231#section-7.4.1 +object Allow extends ModeledCompanion[Allow] { + def apply(methods: HttpMethod*): Allow = apply(immutable.Seq(methods: _*)) + implicit val methodsRenderer = Renderer.defaultSeqRenderer[HttpMethod] // cache +} +final case class Allow(methods: immutable.Seq[HttpMethod]) extends jm.headers.Allow with ResponseHeader { + import Allow.methodsRenderer + def renderValue[R <: Rendering](r: R): r.type = r ~~ methods + protected def companion = Allow + + /** Java API */ + def getMethods: Iterable[jm.HttpMethod] = methods.asJava +} + +// http://tools.ietf.org/html/rfc7235#section-4.2 +object Authorization extends ModeledCompanion[Authorization] +final case class Authorization(credentials: HttpCredentials) extends jm.headers.Authorization with RequestHeader { + def renderValue[R <: Rendering](r: R): r.type = r ~~ credentials + protected def companion = Authorization +} + +// http://tools.ietf.org/html/rfc7234#section-5.2 +object `Cache-Control` extends ModeledCompanion[`Cache-Control`] { + def apply(first: CacheDirective, more: CacheDirective*): `Cache-Control` = apply(immutable.Seq(first +: more: _*)) + implicit val directivesRenderer = Renderer.defaultSeqRenderer[CacheDirective] // cache +} +final case class `Cache-Control`(directives: immutable.Seq[CacheDirective]) extends jm.headers.CacheControl + with RequestResponseHeader { + require(directives.nonEmpty, "directives must not be empty") + import `Cache-Control`.directivesRenderer + def renderValue[R <: Rendering](r: R): r.type = r ~~ directives + protected def companion = `Cache-Control` + + /** Java API */ + def getDirectives: Iterable[jm.headers.CacheDirective] = directives.asJava +} + // http://tools.ietf.org/html/rfc7230#section-6.1 object Connection extends ModeledCompanion[Connection] { def apply(first: String, more: String*): Connection = apply(immutable.Seq(first +: more: _*)) implicit val tokensRenderer = Renderer.defaultSeqRenderer[String] // cache } -final case class Connection(tokens: immutable.Seq[String]) extends ModeledHeader { +final case class Connection(tokens: immutable.Seq[String]) extends RequestResponseHeader { require(tokens.nonEmpty, "tokens must not be empty") import Connection.tokensRenderer def renderValue[R <: Rendering](r: R): r.type = r ~~ tokens @@ -134,277 +359,20 @@ object `Content-Length` extends ModeledCompanion[`Content-Length`] * Instances of this class will only be created transiently during header parsing and will never appear * in HttpMessage.header. To access the Content-Length, see subclasses of HttpEntity. */ -final case class `Content-Length` private[http] (length: Long) extends ModeledHeader { +final case class `Content-Length` private[http] (length: Long) extends RequestResponseHeader { def renderValue[R <: Rendering](r: R): r.type = r ~~ length protected def companion = `Content-Length` } -// http://tools.ietf.org/html/rfc7231#section-5.1.1 -object Expect extends ModeledCompanion[Expect] { - val `100-continue` = new Expect() {} -} -sealed abstract case class Expect private () extends ModeledHeader { - final def renderValue[R <: Rendering](r: R): r.type = r ~~ "100-continue" - protected def companion = Expect -} - -// http://tools.ietf.org/html/rfc7230#section-5.4 -object Host extends ModeledCompanion[Host] { - def apply(authority: Uri.Authority): Host = apply(authority.host, authority.port) - def apply(address: InetSocketAddress): Host = apply(address.getHostStringJava6Compatible, address.getPort) - def apply(host: String): Host = apply(host, 0) - def apply(host: String, port: Int): Host = apply(Uri.Host(host), port) - val empty = Host("") -} -final case class Host(host: Uri.Host, port: Int = 0) extends jm.headers.Host with ModeledHeader { - import UriRendering.HostRenderer - require((port >> 16) == 0, "Illegal port: " + port) - def isEmpty = host.isEmpty - def renderValue[R <: Rendering](r: R): r.type = if (port > 0) r ~~ host ~~ ':' ~~ port else r ~~ host - protected def companion = Host - def equalsIgnoreCase(other: Host): Boolean = host.equalsIgnoreCase(other.host) && port == other.port -} - -// http://tools.ietf.org/html/rfc7233#section-3.2 -object `If-Range` extends ModeledCompanion[`If-Range`] { - def apply(tag: EntityTag): `If-Range` = apply(Left(tag)) - def apply(timestamp: DateTime): `If-Range` = apply(Right(timestamp)) -} -final case class `If-Range`(entityTagOrDateTime: Either[EntityTag, DateTime]) extends ModeledHeader { - def renderValue[R <: Rendering](r: R): r.type = - entityTagOrDateTime match { - case Left(tag) ⇒ r ~~ tag - case Right(dateTime) ⇒ dateTime.renderRfc1123DateTimeString(r) - } - protected def companion = `If-Range` -} - -final case class RawHeader(name: String, value: String) extends jm.headers.RawHeader { - val lowercaseName = name.toRootLowerCase - def render[R <: Rendering](r: R): r.type = r ~~ name ~~ ':' ~~ ' ' ~~ value -} -object RawHeader { - def unapply[H <: HttpHeader](customHeader: H): Option[(String, String)] = - Some(customHeader.name -> customHeader.value) -} - -// http://tools.ietf.org/html/rfc7231#section-5.3.2 -object Accept extends ModeledCompanion[Accept] { - def apply(mediaRanges: MediaRange*): Accept = apply(immutable.Seq(mediaRanges: _*)) - implicit val mediaRangesRenderer = Renderer.defaultSeqRenderer[MediaRange] // cache -} -final case class Accept(mediaRanges: immutable.Seq[MediaRange]) extends jm.headers.Accept with ModeledHeader { - import Accept.mediaRangesRenderer - def renderValue[R <: Rendering](r: R): r.type = r ~~ mediaRanges - protected def companion = Accept - def acceptsAll = mediaRanges.exists(mr ⇒ mr.isWildcard && mr.qValue > 0f) - - /** Java API */ - def getMediaRanges: Iterable[jm.MediaRange] = mediaRanges.asJava -} - -// http://tools.ietf.org/html/rfc7231#section-5.3.3 -object `Accept-Charset` extends ModeledCompanion[`Accept-Charset`] { - def apply(first: HttpCharsetRange, more: HttpCharsetRange*): `Accept-Charset` = apply(immutable.Seq(first +: more: _*)) - implicit val charsetRangesRenderer = Renderer.defaultSeqRenderer[HttpCharsetRange] // cache -} -final case class `Accept-Charset`(charsetRanges: immutable.Seq[HttpCharsetRange]) extends jm.headers.AcceptCharset with ModeledHeader { - require(charsetRanges.nonEmpty, "charsetRanges must not be empty") - import `Accept-Charset`.charsetRangesRenderer - def renderValue[R <: Rendering](r: R): r.type = r ~~ charsetRanges - protected def companion = `Accept-Charset` - - /** Java API */ - def getCharsetRanges: Iterable[jm.HttpCharsetRange] = charsetRanges.asJava -} - -// http://tools.ietf.org/html/rfc7231#section-5.3.4 -object `Accept-Encoding` extends ModeledCompanion[`Accept-Encoding`] { - def apply(encodings: HttpEncodingRange*): `Accept-Encoding` = apply(immutable.Seq(encodings: _*)) - implicit val encodingsRenderer = Renderer.defaultSeqRenderer[HttpEncodingRange] // cache -} -final case class `Accept-Encoding`(encodings: immutable.Seq[HttpEncodingRange]) extends jm.headers.AcceptEncoding with ModeledHeader { - import `Accept-Encoding`.encodingsRenderer - def renderValue[R <: Rendering](r: R): r.type = r ~~ encodings - protected def companion = `Accept-Encoding` - - /** Java API */ - def getEncodings: Iterable[jm.headers.HttpEncodingRange] = encodings.asJava -} - -// http://tools.ietf.org/html/rfc7231#section-5.3.5 -object `Accept-Language` extends ModeledCompanion[`Accept-Language`] { - def apply(first: LanguageRange, more: LanguageRange*): `Accept-Language` = apply(immutable.Seq(first +: more: _*)) - implicit val languagesRenderer = Renderer.defaultSeqRenderer[LanguageRange] // cache -} -final case class `Accept-Language`(languages: immutable.Seq[LanguageRange]) extends jm.headers.AcceptLanguage with ModeledHeader { - require(languages.nonEmpty, "languages must not be empty") - import `Accept-Language`.languagesRenderer - def renderValue[R <: Rendering](r: R): r.type = r ~~ languages - protected def companion = `Accept-Language` - - /** Java API */ - def getLanguages: Iterable[jm.headers.LanguageRange] = languages.asJava -} - -// http://tools.ietf.org/html/rfc7233#section-2.3 -object `Accept-Ranges` extends ModeledCompanion[`Accept-Ranges`] { - def apply(rangeUnits: RangeUnit*): `Accept-Ranges` = apply(immutable.Seq(rangeUnits: _*)) - implicit val rangeUnitsRenderer = Renderer.defaultSeqRenderer[RangeUnit] // cache -} -final case class `Accept-Ranges`(rangeUnits: immutable.Seq[RangeUnit]) extends jm.headers.AcceptRanges with ModeledHeader { - import `Accept-Ranges`.rangeUnitsRenderer - def renderValue[R <: Rendering](r: R): r.type = if (rangeUnits.isEmpty) r ~~ "none" else r ~~ rangeUnits - protected def companion = `Accept-Ranges` - - /** Java API */ - def getRangeUnits: Iterable[jm.headers.RangeUnit] = rangeUnits.asJava -} - -// http://www.w3.org/TR/cors/#access-control-allow-credentials-response-header -object `Access-Control-Allow-Credentials` extends ModeledCompanion[`Access-Control-Allow-Credentials`] -final case class `Access-Control-Allow-Credentials`(allow: Boolean) extends jm.headers.AccessControlAllowCredentials with ModeledHeader { - def renderValue[R <: Rendering](r: R): r.type = r ~~ allow.toString - protected def companion = `Access-Control-Allow-Credentials` -} - -// http://www.w3.org/TR/cors/#access-control-allow-headers-response-header -object `Access-Control-Allow-Headers` extends ModeledCompanion[`Access-Control-Allow-Headers`] { - def apply(headers: String*): `Access-Control-Allow-Headers` = apply(immutable.Seq(headers: _*)) - implicit val headersRenderer = Renderer.defaultSeqRenderer[String] // cache -} -final case class `Access-Control-Allow-Headers`(headers: immutable.Seq[String]) extends jm.headers.AccessControlAllowHeaders with ModeledHeader { - import `Access-Control-Allow-Headers`.headersRenderer - def renderValue[R <: Rendering](r: R): r.type = r ~~ headers - protected def companion = `Access-Control-Allow-Headers` - - /** Java API */ - def getHeaders: Iterable[String] = headers.asJava -} - -// http://www.w3.org/TR/cors/#access-control-allow-methods-response-header -object `Access-Control-Allow-Methods` extends ModeledCompanion[`Access-Control-Allow-Methods`] { - def apply(methods: HttpMethod*): `Access-Control-Allow-Methods` = apply(immutable.Seq(methods: _*)) - implicit val methodsRenderer = Renderer.defaultSeqRenderer[HttpMethod] // cache -} -final case class `Access-Control-Allow-Methods`(methods: immutable.Seq[HttpMethod]) extends jm.headers.AccessControlAllowMethods with ModeledHeader { - import `Access-Control-Allow-Methods`.methodsRenderer - def renderValue[R <: Rendering](r: R): r.type = r ~~ methods - protected def companion = `Access-Control-Allow-Methods` - - /** Java API */ - def getMethods: Iterable[jm.HttpMethod] = methods.asJava -} - -// http://www.w3.org/TR/cors/#access-control-allow-origin-response-header -object `Access-Control-Allow-Origin` extends ModeledCompanion[`Access-Control-Allow-Origin`] { - val `*` = forRange(HttpOriginRange.`*`) - val `null` = forRange(HttpOriginRange()) - def apply(origin: HttpOrigin) = forRange(HttpOriginRange(origin)) - - /** - * Creates an `Access-Control-Allow-Origin` header for the given origin range. - * - * CAUTION: Even though allowed by the spec (http://www.w3.org/TR/cors/#access-control-allow-origin-response-header) - * `Access-Control-Allow-Origin` headers with more than a single origin appear to be largely unsupported in the field. - * Make sure to thoroughly test such usages with all expected clients! - */ - def forRange(range: HttpOriginRange) = new `Access-Control-Allow-Origin`(range) -} -final case class `Access-Control-Allow-Origin` private (range: HttpOriginRange) extends jm.headers.AccessControlAllowOrigin with ModeledHeader { - def renderValue[R <: Rendering](r: R): r.type = r ~~ range - protected def companion = `Access-Control-Allow-Origin` -} - -// http://www.w3.org/TR/cors/#access-control-expose-headers-response-header -object `Access-Control-Expose-Headers` extends ModeledCompanion[`Access-Control-Expose-Headers`] { - def apply(headers: String*): `Access-Control-Expose-Headers` = apply(immutable.Seq(headers: _*)) - implicit val headersRenderer = Renderer.defaultSeqRenderer[String] // cache -} -final case class `Access-Control-Expose-Headers`(headers: immutable.Seq[String]) extends jm.headers.AccessControlExposeHeaders with ModeledHeader { - import `Access-Control-Expose-Headers`.headersRenderer - def renderValue[R <: Rendering](r: R): r.type = r ~~ headers - protected def companion = `Access-Control-Expose-Headers` - - /** Java API */ - def getHeaders: Iterable[String] = headers.asJava -} - -// http://www.w3.org/TR/cors/#access-control-max-age-response-header -object `Access-Control-Max-Age` extends ModeledCompanion[`Access-Control-Max-Age`] -final case class `Access-Control-Max-Age`(deltaSeconds: Long) extends jm.headers.AccessControlMaxAge with ModeledHeader { - def renderValue[R <: Rendering](r: R): r.type = r ~~ deltaSeconds - protected def companion = `Access-Control-Max-Age` -} - -// http://www.w3.org/TR/cors/#access-control-request-headers-request-header -object `Access-Control-Request-Headers` extends ModeledCompanion[`Access-Control-Request-Headers`] { - def apply(headers: String*): `Access-Control-Request-Headers` = apply(immutable.Seq(headers: _*)) - implicit val headersRenderer = Renderer.defaultSeqRenderer[String] // cache -} -final case class `Access-Control-Request-Headers`(headers: immutable.Seq[String]) extends jm.headers.AccessControlRequestHeaders with ModeledHeader { - import `Access-Control-Request-Headers`.headersRenderer - def renderValue[R <: Rendering](r: R): r.type = r ~~ headers - protected def companion = `Access-Control-Request-Headers` - - /** Java API */ - def getHeaders: Iterable[String] = headers.asJava -} - -// http://www.w3.org/TR/cors/#access-control-request-method-request-header -object `Access-Control-Request-Method` extends ModeledCompanion[`Access-Control-Request-Method`] -final case class `Access-Control-Request-Method`(method: HttpMethod) extends jm.headers.AccessControlRequestMethod with ModeledHeader { - def renderValue[R <: Rendering](r: R): r.type = r ~~ method - protected def companion = `Access-Control-Request-Method` -} - -// http://tools.ietf.org/html/rfc7234#section-5.1 -object Age extends ModeledCompanion[Age] -final case class Age(deltaSeconds: Long) extends jm.headers.Age with ModeledHeader { - def renderValue[R <: Rendering](r: R): r.type = r ~~ deltaSeconds - protected def companion = Age -} - -// http://tools.ietf.org/html/rfc7231#section-7.4.1 -object Allow extends ModeledCompanion[Allow] { - def apply(methods: HttpMethod*): Allow = apply(immutable.Seq(methods: _*)) - implicit val methodsRenderer = Renderer.defaultSeqRenderer[HttpMethod] // cache -} -final case class Allow(methods: immutable.Seq[HttpMethod]) extends jm.headers.Allow with ModeledHeader { - import Allow.methodsRenderer - def renderValue[R <: Rendering](r: R): r.type = r ~~ methods - protected def companion = Allow - - /** Java API */ - def getMethods: Iterable[jm.HttpMethod] = methods.asJava -} - -// http://tools.ietf.org/html/rfc7235#section-4.2 -object Authorization extends ModeledCompanion[Authorization] -final case class Authorization(credentials: HttpCredentials) extends jm.headers.Authorization with ModeledHeader { - def renderValue[R <: Rendering](r: R): r.type = r ~~ credentials - protected def companion = Authorization -} - -// http://tools.ietf.org/html/rfc7234#section-5.2 -object `Cache-Control` extends ModeledCompanion[`Cache-Control`] { - def apply(first: CacheDirective, more: CacheDirective*): `Cache-Control` = apply(immutable.Seq(first +: more: _*)) - implicit val directivesRenderer = Renderer.defaultSeqRenderer[CacheDirective] // cache -} -final case class `Cache-Control`(directives: immutable.Seq[CacheDirective]) extends jm.headers.CacheControl with ModeledHeader { - require(directives.nonEmpty, "directives must not be empty") - import `Cache-Control`.directivesRenderer - def renderValue[R <: Rendering](r: R): r.type = r ~~ directives - protected def companion = `Cache-Control` - - /** Java API */ - def getDirectives: Iterable[jm.headers.CacheDirective] = directives.asJava -} - // http://tools.ietf.org/html/rfc6266 object `Content-Disposition` extends ModeledCompanion[`Content-Disposition`] -final case class `Content-Disposition`(dispositionType: ContentDispositionType, params: Map[String, String] = Map.empty) extends jm.headers.ContentDisposition with ModeledHeader { - def renderValue[R <: Rendering](r: R): r.type = { r ~~ dispositionType; params foreach { case (k, v) ⇒ r ~~ "; " ~~ k ~~ '=' ~~# v }; r } +final case class `Content-Disposition`(dispositionType: ContentDispositionType, params: Map[String, String] = Map.empty) + extends jm.headers.ContentDisposition with RequestResponseHeader { + def renderValue[R <: Rendering](r: R): r.type = { + r ~~ dispositionType + params foreach { case (k, v) ⇒ r ~~ "; " ~~ k ~~ '=' ~~# v } + r + } protected def companion = `Content-Disposition` /** Java API */ @@ -416,7 +384,8 @@ object `Content-Encoding` extends ModeledCompanion[`Content-Encoding`] { def apply(first: HttpEncoding, more: HttpEncoding*): `Content-Encoding` = apply(immutable.Seq(first +: more: _*)) implicit val encodingsRenderer = Renderer.defaultSeqRenderer[HttpEncoding] // cache } -final case class `Content-Encoding`(encodings: immutable.Seq[HttpEncoding]) extends jm.headers.ContentEncoding with ModeledHeader { +final case class `Content-Encoding`(encodings: immutable.Seq[HttpEncoding]) extends jm.headers.ContentEncoding + with RequestResponseHeader { require(encodings.nonEmpty, "encodings must not be empty") import `Content-Encoding`.encodingsRenderer def renderValue[R <: Rendering](r: R): r.type = r ~~ encodings @@ -430,7 +399,8 @@ final case class `Content-Encoding`(encodings: immutable.Seq[HttpEncoding]) exte object `Content-Range` extends ModeledCompanion[`Content-Range`] { def apply(byteContentRange: ByteContentRange): `Content-Range` = apply(RangeUnits.Bytes, byteContentRange) } -final case class `Content-Range`(rangeUnit: RangeUnit, contentRange: ContentRange) extends jm.headers.ContentRange with ModeledHeader { +final case class `Content-Range`(rangeUnit: RangeUnit, contentRange: ContentRange) extends jm.headers.ContentRange + with RequestResponseHeader { def renderValue[R <: Rendering](r: R): r.type = r ~~ rangeUnit ~~ ' ' ~~ contentRange protected def companion = `Content-Range` } @@ -441,7 +411,8 @@ object `Content-Type` extends ModeledCompanion[`Content-Type`] * Instances of this class will only be created transiently during header parsing and will never appear * in HttpMessage.header. To access the Content-Type, see subclasses of HttpEntity. */ -final case class `Content-Type` private[http] (contentType: ContentType) extends jm.headers.ContentType with ModeledHeader { +final case class `Content-Type` private[http] (contentType: ContentType) extends jm.headers.ContentType + with RequestResponseHeader { def renderValue[R <: Rendering](r: R): r.type = r ~~ contentType protected def companion = `Content-Type` } @@ -453,7 +424,7 @@ object Cookie extends ModeledCompanion[Cookie] { def apply(values: (String, String)*): Cookie = apply(values.map(HttpCookiePair(_)).toList) implicit val cookiePairsRenderer = Renderer.seqRenderer[HttpCookiePair](separator = "; ") // cache } -final case class Cookie(cookies: immutable.Seq[HttpCookiePair]) extends jm.headers.Cookie with ModeledHeader { +final case class Cookie(cookies: immutable.Seq[HttpCookiePair]) extends jm.headers.Cookie with RequestHeader { require(cookies.nonEmpty, "cookies must not be empty") import Cookie.cookiePairsRenderer def renderValue[R <: Rendering](r: R): r.type = r ~~ cookies @@ -465,42 +436,80 @@ final case class Cookie(cookies: immutable.Seq[HttpCookiePair]) extends jm.heade // http://tools.ietf.org/html/rfc7231#section-7.1.1.2 object Date extends ModeledCompanion[Date] -final case class Date(date: DateTime) extends jm.headers.Date with ModeledHeader { +final case class Date(date: DateTime) extends jm.headers.Date with RequestResponseHeader { def renderValue[R <: Rendering](r: R): r.type = date.renderRfc1123DateTimeString(r) protected def companion = Date } +/** + * INTERNAL API + */ +private[headers] object EmptyCompanion extends ModeledCompanion[EmptyHeader.type] +/** + * INTERNAL API + */ +private[http] object EmptyHeader extends SyntheticHeader { + def renderValue[R <: Rendering](r: R): r.type = r + protected def companion: ModeledCompanion[EmptyHeader.type] = EmptyCompanion +} + // http://tools.ietf.org/html/rfc7232#section-2.3 object ETag extends ModeledCompanion[ETag] { def apply(tag: String, weak: Boolean = false): ETag = ETag(EntityTag(tag, weak)) } -final case class ETag(etag: EntityTag) extends jm.headers.ETag with ModeledHeader { +final case class ETag(etag: EntityTag) extends jm.headers.ETag with ResponseHeader { def renderValue[R <: Rendering](r: R): r.type = r ~~ etag protected def companion = ETag } +// http://tools.ietf.org/html/rfc7231#section-5.1.1 +object Expect extends ModeledCompanion[Expect] { + val `100-continue` = new Expect() {} +} +sealed abstract case class Expect private () extends RequestHeader { + final def renderValue[R <: Rendering](r: R): r.type = r ~~ "100-continue" + protected def companion = Expect +} + // http://tools.ietf.org/html/rfc7234#section-5.3 object Expires extends ModeledCompanion[Expires] -final case class Expires(date: DateTime) extends jm.headers.Expires with ModeledHeader { +final case class Expires(date: DateTime) extends jm.headers.Expires with ResponseHeader { def renderValue[R <: Rendering](r: R): r.type = date.renderRfc1123DateTimeString(r) protected def companion = Expires } +// http://tools.ietf.org/html/rfc7230#section-5.4 +object Host extends ModeledCompanion[Host] { + def apply(authority: Uri.Authority): Host = apply(authority.host, authority.port) + def apply(address: InetSocketAddress): Host = apply(address.getHostString, address.getPort) + def apply(host: String): Host = apply(host, 0) + def apply(host: String, port: Int): Host = apply(Uri.Host(host), port) + val empty = Host("") +} +final case class Host(host: Uri.Host, port: Int = 0) extends jm.headers.Host with RequestHeader { + import UriRendering.HostRenderer + require((port >> 16) == 0, "Illegal port: " + port) + def isEmpty = host.isEmpty + def renderValue[R <: Rendering](r: R): r.type = if (port > 0) r ~~ host ~~ ':' ~~ port else r ~~ host + protected def companion = Host + def equalsIgnoreCase(other: Host): Boolean = host.equalsIgnoreCase(other.host) && port == other.port +} + // http://tools.ietf.org/html/rfc7232#section-3.1 object `If-Match` extends ModeledCompanion[`If-Match`] { val `*` = `If-Match`(EntityTagRange.`*`) def apply(first: EntityTag, more: EntityTag*): `If-Match` = `If-Match`(EntityTagRange(first +: more: _*)) } -final case class `If-Match`(m: EntityTagRange) extends jm.headers.IfMatch with ModeledHeader { +final case class `If-Match`(m: EntityTagRange) extends jm.headers.IfMatch with RequestHeader { def renderValue[R <: Rendering](r: R): r.type = r ~~ m protected def companion = `If-Match` } // http://tools.ietf.org/html/rfc7232#section-3.3 object `If-Modified-Since` extends ModeledCompanion[`If-Modified-Since`] -final case class `If-Modified-Since`(date: DateTime) extends jm.headers.IfModifiedSince with ModeledHeader { +final case class `If-Modified-Since`(date: DateTime) extends jm.headers.IfModifiedSince with RequestHeader { def renderValue[R <: Rendering](r: R): r.type = date.renderRfc1123DateTimeString(r) protected def companion = `If-Modified-Since` } @@ -511,21 +520,35 @@ object `If-None-Match` extends ModeledCompanion[`If-None-Match`] { def apply(first: EntityTag, more: EntityTag*): `If-None-Match` = `If-None-Match`(EntityTagRange(first +: more: _*)) } -final case class `If-None-Match`(m: EntityTagRange) extends jm.headers.IfNoneMatch with ModeledHeader { +final case class `If-None-Match`(m: EntityTagRange) extends jm.headers.IfNoneMatch with RequestHeader { def renderValue[R <: Rendering](r: R): r.type = r ~~ m protected def companion = `If-None-Match` } +// http://tools.ietf.org/html/rfc7233#section-3.2 +object `If-Range` extends ModeledCompanion[`If-Range`] { + def apply(tag: EntityTag): `If-Range` = apply(Left(tag)) + def apply(timestamp: DateTime): `If-Range` = apply(Right(timestamp)) +} +final case class `If-Range`(entityTagOrDateTime: Either[EntityTag, DateTime]) extends RequestHeader { + def renderValue[R <: Rendering](r: R): r.type = + entityTagOrDateTime match { + case Left(tag) ⇒ r ~~ tag + case Right(dateTime) ⇒ dateTime.renderRfc1123DateTimeString(r) + } + protected def companion = `If-Range` +} + // http://tools.ietf.org/html/rfc7232#section-3.4 object `If-Unmodified-Since` extends ModeledCompanion[`If-Unmodified-Since`] -final case class `If-Unmodified-Since`(date: DateTime) extends jm.headers.IfUnmodifiedSince with ModeledHeader { +final case class `If-Unmodified-Since`(date: DateTime) extends jm.headers.IfUnmodifiedSince with RequestHeader { def renderValue[R <: Rendering](r: R): r.type = date.renderRfc1123DateTimeString(r) protected def companion = `If-Unmodified-Since` } // http://tools.ietf.org/html/rfc7232#section-2.2 object `Last-Modified` extends ModeledCompanion[`Last-Modified`] -final case class `Last-Modified`(date: DateTime) extends jm.headers.LastModified with ModeledHeader { +final case class `Last-Modified`(date: DateTime) extends jm.headers.LastModified with ResponseHeader { def renderValue[R <: Rendering](r: R): r.type = date.renderRfc1123DateTimeString(r) protected def companion = `Last-Modified` } @@ -536,7 +559,7 @@ object Link extends ModeledCompanion[Link] { def apply(values: LinkValue*): Link = apply(immutable.Seq(values: _*)) implicit val valuesRenderer = Renderer.defaultSeqRenderer[LinkValue] // cache } -final case class Link(values: immutable.Seq[LinkValue]) extends jm.headers.Link with ModeledHeader { +final case class Link(values: immutable.Seq[LinkValue]) extends jm.headers.Link with RequestResponseHeader { import Link.valuesRenderer def renderValue[R <: Rendering](r: R): r.type = r ~~ values protected def companion = Link @@ -547,7 +570,7 @@ final case class Link(values: immutable.Seq[LinkValue]) extends jm.headers.Link // http://tools.ietf.org/html/rfc7231#section-7.1.2 object Location extends ModeledCompanion[Location] -final case class Location(uri: Uri) extends jm.headers.Location with ModeledHeader { +final case class Location(uri: Uri) extends jm.headers.Location with ResponseHeader { def renderValue[R <: Rendering](r: R): r.type = { import UriRendering.UriRenderer; r ~~ uri } protected def companion = Location @@ -559,7 +582,7 @@ final case class Location(uri: Uri) extends jm.headers.Location with ModeledHead object Origin extends ModeledCompanion[Origin] { def apply(origins: HttpOrigin*): Origin = apply(immutable.Seq(origins: _*)) } -final case class Origin(origins: immutable.Seq[HttpOrigin]) extends jm.headers.Origin with ModeledHeader { +final case class Origin(origins: immutable.Seq[HttpOrigin]) extends jm.headers.Origin with RequestHeader { def renderValue[R <: Rendering](r: R): r.type = if (origins.isEmpty) r ~~ "null" else r ~~ origins protected def companion = Origin @@ -572,7 +595,8 @@ object `Proxy-Authenticate` extends ModeledCompanion[`Proxy-Authenticate`] { def apply(first: HttpChallenge, more: HttpChallenge*): `Proxy-Authenticate` = apply(immutable.Seq(first +: more: _*)) implicit val challengesRenderer = Renderer.defaultSeqRenderer[HttpChallenge] // cache } -final case class `Proxy-Authenticate`(challenges: immutable.Seq[HttpChallenge]) extends jm.headers.ProxyAuthenticate with ModeledHeader { +final case class `Proxy-Authenticate`(challenges: immutable.Seq[HttpChallenge]) extends jm.headers.ProxyAuthenticate + with ResponseHeader { require(challenges.nonEmpty, "challenges must not be empty") import `Proxy-Authenticate`.challengesRenderer def renderValue[R <: Rendering](r: R): r.type = r ~~ challenges @@ -584,7 +608,8 @@ final case class `Proxy-Authenticate`(challenges: immutable.Seq[HttpChallenge]) // http://tools.ietf.org/html/rfc7235#section-4.4 object `Proxy-Authorization` extends ModeledCompanion[`Proxy-Authorization`] -final case class `Proxy-Authorization`(credentials: HttpCredentials) extends jm.headers.ProxyAuthorization with ModeledHeader { +final case class `Proxy-Authorization`(credentials: HttpCredentials) extends jm.headers.ProxyAuthorization + with RequestHeader { def renderValue[R <: Rendering](r: R): r.type = r ~~ credentials protected def companion = `Proxy-Authorization` } @@ -595,7 +620,8 @@ object Range extends ModeledCompanion[Range] { def apply(ranges: immutable.Seq[ByteRange]): Range = Range(RangeUnits.Bytes, ranges) implicit val rangesRenderer = Renderer.defaultSeqRenderer[ByteRange] // cache } -final case class Range(rangeUnit: RangeUnit, ranges: immutable.Seq[ByteRange]) extends jm.headers.Range with ModeledHeader { +final case class Range(rangeUnit: RangeUnit, ranges: immutable.Seq[ByteRange]) extends jm.headers.Range + with RequestHeader { require(ranges.nonEmpty, "ranges must not be empty") import Range.rangesRenderer def renderValue[R <: Rendering](r: R): r.type = r ~~ rangeUnit ~~ '=' ~~ ranges @@ -605,22 +631,33 @@ final case class Range(rangeUnit: RangeUnit, ranges: immutable.Seq[ByteRange]) e def getRanges: Iterable[jm.headers.ByteRange] = ranges.asJava } +final case class RawHeader(name: String, value: String) extends jm.headers.RawHeader { + def renderInRequests = true + def renderInResponses = true + val lowercaseName = name.toRootLowerCase + def render[R <: Rendering](r: R): r.type = r ~~ name ~~ ':' ~~ ' ' ~~ value +} +object RawHeader { + def unapply[H <: HttpHeader](customHeader: H): Option[(String, String)] = + Some(customHeader.name -> customHeader.value) +} + object `Raw-Request-URI` extends ModeledCompanion[`Raw-Request-URI`] -final case class `Raw-Request-URI`(uri: String) extends jm.headers.RawRequestURI with ModeledHeader { +final case class `Raw-Request-URI`(uri: String) extends jm.headers.RawRequestURI with SyntheticHeader { def renderValue[R <: Rendering](r: R): r.type = r ~~ uri protected def companion = `Raw-Request-URI` } object `Remote-Address` extends ModeledCompanion[`Remote-Address`] -final case class `Remote-Address`(address: RemoteAddress) extends jm.headers.RemoteAddress with ModeledHeader { +final case class `Remote-Address`(address: RemoteAddress) extends jm.headers.RemoteAddress with SyntheticHeader { def renderValue[R <: Rendering](r: R): r.type = r ~~ address protected def companion = `Remote-Address` } // http://tools.ietf.org/html/rfc7231#section-5.5.2 object Referer extends ModeledCompanion[Referer] -final case class Referer(uri: Uri) extends jm.headers.Referer with ModeledHeader { - require(uri.fragment == None, "Referer header URI must not contain a fragment") +final case class Referer(uri: Uri) extends jm.headers.Referer with RequestHeader { + require(uri.fragment.isEmpty, "Referer header URI must not contain a fragment") require(uri.authority.userinfo.isEmpty, "Referer header URI must not contain a userinfo component") def renderValue[R <: Rendering](r: R): r.type = { import UriRendering.UriRenderer; r ~~ uri } @@ -650,7 +687,7 @@ private[http] object `Sec-WebSocket-Accept` extends ModeledCompanion[`Sec-WebSoc /** * INTERNAL API */ -private[http] final case class `Sec-WebSocket-Accept`(key: String) extends ModeledHeader { +private[http] final case class `Sec-WebSocket-Accept`(key: String) extends ResponseHeader { protected[http] def renderValue[R <: Rendering](r: R): r.type = r ~~ key protected def companion = `Sec-WebSocket-Accept` @@ -661,16 +698,16 @@ private[http] final case class `Sec-WebSocket-Accept`(key: String) extends Model */ // http://tools.ietf.org/html/rfc6455#section-4.3 private[http] object `Sec-WebSocket-Extensions` extends ModeledCompanion[`Sec-WebSocket-Extensions`] { - implicit val extensionsRenderer = Renderer.defaultSeqRenderer[WebsocketExtension] + implicit val extensionsRenderer = Renderer.defaultSeqRenderer[WebSocketExtension] } /** * INTERNAL API */ -private[http] final case class `Sec-WebSocket-Extensions`(extensions: immutable.Seq[WebsocketExtension]) extends ModeledHeader { +private[http] final case class `Sec-WebSocket-Extensions`(extensions: immutable.Seq[WebSocketExtension]) + extends ResponseHeader { require(extensions.nonEmpty, "Sec-WebSocket-Extensions.extensions must not be empty") import `Sec-WebSocket-Extensions`.extensionsRenderer protected[http] def renderValue[R <: Rendering](r: R): r.type = r ~~ extensions - protected def companion = `Sec-WebSocket-Extensions` } @@ -687,13 +724,13 @@ private[http] object `Sec-WebSocket-Key` extends ModeledCompanion[`Sec-WebSocket /** * INTERNAL API */ -private[http] final case class `Sec-WebSocket-Key`(key: String) extends ModeledHeader { +private[http] final case class `Sec-WebSocket-Key`(key: String) extends RequestHeader { protected[http] def renderValue[R <: Rendering](r: R): r.type = r ~~ key protected def companion = `Sec-WebSocket-Key` /** - * Checks if the key value is valid according to the Websocket specification, i.e. + * Checks if the key value is valid according to the WebSocket specification, i.e. * if the String is a Base64 representation of 16 bytes. */ def isValid: Boolean = Try(Base64.rfc2045().decode(key)).toOption.exists(_.length == 16) @@ -709,11 +746,11 @@ private[http] object `Sec-WebSocket-Protocol` extends ModeledCompanion[`Sec-WebS /** * INTERNAL API */ -private[http] final case class `Sec-WebSocket-Protocol`(protocols: immutable.Seq[String]) extends ModeledHeader { +private[http] final case class `Sec-WebSocket-Protocol`(protocols: immutable.Seq[String]) + extends RequestResponseHeader { require(protocols.nonEmpty, "Sec-WebSocket-Protocol.protocols must not be empty") import `Sec-WebSocket-Protocol`.protocolsRenderer protected[http] def renderValue[R <: Rendering](r: R): r.type = r ~~ protocols - protected def companion = `Sec-WebSocket-Protocol` } @@ -727,14 +764,13 @@ private[http] object `Sec-WebSocket-Version` extends ModeledCompanion[`Sec-WebSo /** * INTERNAL API */ -private[http] final case class `Sec-WebSocket-Version`(versions: immutable.Seq[Int]) extends ModeledHeader { +private[http] final case class `Sec-WebSocket-Version`(versions: immutable.Seq[Int]) + extends RequestResponseHeader { require(versions.nonEmpty, "Sec-WebSocket-Version.versions must not be empty") require(versions.forall(v ⇒ v >= 0 && v <= 255), s"Sec-WebSocket-Version.versions must be in the range 0 <= version <= 255 but were $versions") import `Sec-WebSocket-Version`.versionsRenderer protected[http] def renderValue[R <: Rendering](r: R): r.type = r ~~ versions - - def hasVersion(versionNumber: Int): Boolean = versions.exists(_ == versionNumber) - + def hasVersion(versionNumber: Int): Boolean = versions contains versionNumber protected def companion = `Sec-WebSocket-Version` } @@ -744,7 +780,7 @@ object Server extends ModeledCompanion[Server] { def apply(first: ProductVersion, more: ProductVersion*): Server = apply(immutable.Seq(first +: more: _*)) implicit val productsRenderer = Renderer.seqRenderer[ProductVersion](separator = " ") // cache } -final case class Server(products: immutable.Seq[ProductVersion]) extends jm.headers.Server with ModeledHeader { +final case class Server(products: immutable.Seq[ProductVersion]) extends jm.headers.Server with ResponseHeader { require(products.nonEmpty, "products must not be empty") import Server.productsRenderer def renderValue[R <: Rendering](r: R): r.type = r ~~ products @@ -756,11 +792,18 @@ final case class Server(products: immutable.Seq[ProductVersion]) extends jm.head // https://tools.ietf.org/html/rfc6265 object `Set-Cookie` extends ModeledCompanion[`Set-Cookie`] -final case class `Set-Cookie`(cookie: HttpCookie) extends jm.headers.SetCookie with ModeledHeader { +final case class `Set-Cookie`(cookie: HttpCookie) extends jm.headers.SetCookie with ResponseHeader { def renderValue[R <: Rendering](r: R): r.type = r ~~ cookie protected def companion = `Set-Cookie` } +object `Timeout-Access` extends ModeledCompanion[`Timeout-Access`] +final case class `Timeout-Access`(timeoutAccess: akka.http.scaladsl.TimeoutAccess) + extends jm.headers.TimeoutAccess with SyntheticHeader { + def renderValue[R <: Rendering](r: R): r.type = r ~~ timeoutAccess.toString + protected def companion = `Timeout-Access` +} + /** * Model for the synthetic `Tls-Session-Info` header which carries the SSLSession of the connection * the message carrying this header was received with. @@ -771,17 +814,14 @@ final case class `Set-Cookie`(cookie: HttpCookie) extends jm.headers.SetCookie w * akka.http.[client|server].parsing.tls-session-info-header = on * ``` */ -final case class `Tls-Session-Info`(session: SSLSession) extends jm.headers.TlsSessionInfo with ScalaSessionAPI { - override def suppressRendering: Boolean = true - override def toString = s"SSL-Session-Info($session)" - def name(): String = "SSL-Session-Info" - def value(): String = "" +object `Tls-Session-Info` extends ModeledCompanion[`Tls-Session-Info`] +final case class `Tls-Session-Info`(session: SSLSession) extends jm.headers.TlsSessionInfo with SyntheticHeader + with ScalaSessionAPI { + def renderValue[R <: Rendering](r: R): r.type = r ~~ session.toString + protected def companion = `Tls-Session-Info` /** Java API */ - def getSession(): SSLSession = session - - def lowercaseName: String = name.toRootLowerCase - def render[R <: Rendering](r: R): r.type = r ~~ name ~~ ':' ~~ ' ' ~~ value + def getSession: SSLSession = session } // http://tools.ietf.org/html/rfc7230#section-3.3.1 @@ -789,7 +829,8 @@ object `Transfer-Encoding` extends ModeledCompanion[`Transfer-Encoding`] { def apply(first: TransferEncoding, more: TransferEncoding*): `Transfer-Encoding` = apply(immutable.Seq(first +: more: _*)) implicit val encodingsRenderer = Renderer.defaultSeqRenderer[TransferEncoding] // cache } -final case class `Transfer-Encoding`(encodings: immutable.Seq[TransferEncoding]) extends jm.headers.TransferEncoding with ModeledHeader { +final case class `Transfer-Encoding`(encodings: immutable.Seq[TransferEncoding]) extends jm.headers.TransferEncoding + with RequestResponseHeader { require(encodings.nonEmpty, "encodings must not be empty") import `Transfer-Encoding`.encodingsRenderer def isChunked: Boolean = encodings.last == TransferEncodings.chunked @@ -813,13 +854,13 @@ final case class `Transfer-Encoding`(encodings: immutable.Seq[TransferEncoding]) object Upgrade extends ModeledCompanion[Upgrade] { implicit val protocolsRenderer = Renderer.defaultSeqRenderer[UpgradeProtocol] } -final case class Upgrade(protocols: immutable.Seq[UpgradeProtocol]) extends ModeledHeader { +final case class Upgrade(protocols: immutable.Seq[UpgradeProtocol]) extends RequestResponseHeader { import Upgrade.protocolsRenderer protected[http] def renderValue[R <: Rendering](r: R): r.type = r ~~ protocols protected def companion = Upgrade - def hasWebsocket: Boolean = protocols.exists(_.name equalsIgnoreCase "websocket") + def hasWebSocket: Boolean = protocols.exists(_.name equalsIgnoreCase "websocket") } // http://tools.ietf.org/html/rfc7231#section-5.5.3 @@ -828,7 +869,7 @@ object `User-Agent` extends ModeledCompanion[`User-Agent`] { def apply(first: ProductVersion, more: ProductVersion*): `User-Agent` = apply(immutable.Seq(first +: more: _*)) implicit val productsRenderer = Renderer.seqRenderer[ProductVersion](separator = " ") // cache } -final case class `User-Agent`(products: immutable.Seq[ProductVersion]) extends jm.headers.UserAgent with ModeledHeader { +final case class `User-Agent`(products: immutable.Seq[ProductVersion]) extends jm.headers.UserAgent with RequestHeader { require(products.nonEmpty, "products must not be empty") import `User-Agent`.productsRenderer def renderValue[R <: Rendering](r: R): r.type = r ~~ products @@ -843,7 +884,8 @@ object `WWW-Authenticate` extends ModeledCompanion[`WWW-Authenticate`] { def apply(first: HttpChallenge, more: HttpChallenge*): `WWW-Authenticate` = apply(immutable.Seq(first +: more: _*)) implicit val challengesRenderer = Renderer.defaultSeqRenderer[HttpChallenge] // cache } -final case class `WWW-Authenticate`(challenges: immutable.Seq[HttpChallenge]) extends jm.headers.WWWAuthenticate with ModeledHeader { +final case class `WWW-Authenticate`(challenges: immutable.Seq[HttpChallenge]) extends jm.headers.WWWAuthenticate + with ResponseHeader { require(challenges.nonEmpty, "challenges must not be empty") import `WWW-Authenticate`.challengesRenderer def renderValue[R <: Rendering](r: R): r.type = r ~~ challenges @@ -859,7 +901,8 @@ object `X-Forwarded-For` extends ModeledCompanion[`X-Forwarded-For`] { def apply(first: RemoteAddress, more: RemoteAddress*): `X-Forwarded-For` = apply(immutable.Seq(first +: more: _*)) implicit val addressesRenderer = Renderer.defaultSeqRenderer[RemoteAddress] // cache } -final case class `X-Forwarded-For`(addresses: immutable.Seq[RemoteAddress]) extends jm.headers.XForwardedFor with ModeledHeader { +final case class `X-Forwarded-For`(addresses: immutable.Seq[RemoteAddress]) extends jm.headers.XForwardedFor + with RequestHeader { require(addresses.nonEmpty, "addresses must not be empty") import `X-Forwarded-For`.addressesRenderer def renderValue[R <: Rendering](r: R): r.type = r ~~ addresses @@ -868,4 +911,3 @@ final case class `X-Forwarded-For`(addresses: immutable.Seq[RemoteAddress]) exte /** Java API */ def getAddresses: Iterable[jm.RemoteAddress] = addresses.asJava } - diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/package.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/package.scala index d78f8d99c9..37f7b3462b 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/package.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/package.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/Message.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/Message.scala index 4a38aa64bd..a8b5622316 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/Message.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/Message.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.ws @@ -9,7 +9,7 @@ import akka.util.ByteString //#message-model /** - * The ADT for Websocket messages. A message can either be a binary or a text message. + * The ADT for WebSocket messages. A message can either be a binary or a text message. */ sealed trait Message diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/PeerClosedConnectionException.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/PeerClosedConnectionException.scala index 8f768252f5..1328e74863 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/PeerClosedConnectionException.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/PeerClosedConnectionException.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.ws @@ -7,7 +7,7 @@ package akka.http.scaladsl.model.ws import akka.http.javadsl /** - * A PeerClosedConnectionException will be reported to the Websocket handler if the peer has closed the connection. + * A PeerClosedConnectionException will be reported to the WebSocket handler if the peer has closed the connection. * `closeCode` and `closeReason` contain close messages as reported by the peer. */ class PeerClosedConnectionException(val closeCode: Int, val closeReason: String) diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/UpgradeToWebsocket.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/UpgradeToWebSocket.scala similarity index 53% rename from akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/UpgradeToWebsocket.scala rename to akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/UpgradeToWebSocket.scala index 5508161834..e41be5e988 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/UpgradeToWebsocket.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/UpgradeToWebSocket.scala @@ -1,25 +1,23 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.ws import java.lang.Iterable -import akka.http.impl.util.JavaMapping - import scala.collection.immutable -import akka.stream -import akka.stream.javadsl -import akka.stream.scaladsl.{ Sink, Source, Flow } +import akka.NotUsed +import akka.stream._ +import akka.http.impl.util.JavaMapping import akka.http.javadsl.{ model ⇒ jm } import akka.http.scaladsl.model.HttpResponse /** - * A custom header that will be added to an Websocket upgrade HttpRequest that - * enables a request handler to upgrade this connection to a Websocket connection and - * registers a Websocket handler. + * A custom header that will be added to an WebSocket upgrade HttpRequest that + * enables a request handler to upgrade this connection to a WebSocket connection and + * registers a WebSocket handler. */ -trait UpgradeToWebsocket extends jm.ws.UpgradeToWebsocket { +trait UpgradeToWebSocket extends jm.ws.UpgradeToWebSocket { /** * A sequence of protocols the client accepts. * @@ -28,31 +26,31 @@ trait UpgradeToWebsocket extends jm.ws.UpgradeToWebsocket { def requestedProtocols: immutable.Seq[String] /** - * The high-level interface to create a Websocket server based on "messages". + * The high-level interface to create a WebSocket server based on "messages". * * Returns a response to return in a request handler that will signal the - * low-level HTTP implementation to upgrade the connection to Websocket and - * use the supplied handler to handle incoming Websocket messages. + * low-level HTTP implementation to upgrade the connection to WebSocket and + * use the supplied handler to handle incoming WebSocket messages. * * Optionally, a subprotocol out of the ones requested by the client can be chosen. */ - def handleMessages(handlerFlow: Flow[Message, Message, Any], + def handleMessages(handlerFlow: Graph[FlowShape[Message, Message], Any], subprotocol: Option[String] = None): HttpResponse /** - * The high-level interface to create a Websocket server based on "messages". + * The high-level interface to create a WebSocket server based on "messages". * * Returns a response to return in a request handler that will signal the - * low-level HTTP implementation to upgrade the connection to Websocket and + * low-level HTTP implementation to upgrade the connection to WebSocket and * use the supplied inSink to consume messages received from the client and * the supplied outSource to produce message to sent to the client. * * Optionally, a subprotocol out of the ones requested by the client can be chosen. */ - def handleMessagesWithSinkSource(inSink: Sink[Message, Any], - outSource: Source[Message, Any], + def handleMessagesWithSinkSource(inSink: Graph[SinkShape[Message], Any], + outSource: Graph[SourceShape[Message], Any], subprotocol: Option[String] = None): HttpResponse = - handleMessages(Flow.fromSinkAndSource(inSink, outSource), subprotocol) + handleMessages(scaladsl.Flow.fromSinkAndSource(inSink, outSource), subprotocol) import scala.collection.JavaConverters._ @@ -64,29 +62,29 @@ trait UpgradeToWebsocket extends jm.ws.UpgradeToWebsocket { /** * Java API */ - def handleMessagesWith(handlerFlow: stream.javadsl.Flow[jm.ws.Message, jm.ws.Message, _]): HttpResponse = + def handleMessagesWith(handlerFlow: Graph[FlowShape[jm.ws.Message, jm.ws.Message], _ <: Any]): HttpResponse = handleMessages(JavaMapping.toScala(handlerFlow)) /** * Java API */ - def handleMessagesWith(handlerFlow: stream.javadsl.Flow[jm.ws.Message, jm.ws.Message, _], subprotocol: String): HttpResponse = + def handleMessagesWith(handlerFlow: Graph[FlowShape[jm.ws.Message, jm.ws.Message], _ <: Any], subprotocol: String): HttpResponse = handleMessages(JavaMapping.toScala(handlerFlow), subprotocol = Some(subprotocol)) /** * Java API */ - def handleMessagesWith(inSink: stream.javadsl.Sink[jm.ws.Message, _], outSource: javadsl.Source[jm.ws.Message, _]): HttpResponse = + def handleMessagesWith(inSink: Graph[SinkShape[jm.ws.Message], _ <: Any], outSource: Graph[SourceShape[jm.ws.Message], _ <: Any]): HttpResponse = handleMessages(createScalaFlow(inSink, outSource)) /** * Java API */ - def handleMessagesWith(inSink: stream.javadsl.Sink[jm.ws.Message, _], - outSource: javadsl.Source[jm.ws.Message, _], + def handleMessagesWith(inSink: Graph[SinkShape[jm.ws.Message], _ <: Any], + outSource: Graph[SourceShape[jm.ws.Message], _ <: Any], subprotocol: String): HttpResponse = handleMessages(createScalaFlow(inSink, outSource), subprotocol = Some(subprotocol)) - private[this] def createScalaFlow(inSink: stream.javadsl.Sink[jm.ws.Message, _], outSource: stream.javadsl.Source[jm.ws.Message, _]): Flow[Message, Message, Any] = - JavaMapping.toScala(Flow.fromSinkAndSourceMat(inSink.asScala, outSource.asScala)((_, _) ⇒ ()).asJava) + private[this] def createScalaFlow(inSink: Graph[SinkShape[jm.ws.Message], _ <: Any], outSource: Graph[SourceShape[jm.ws.Message], _ <: Any]): Graph[FlowShape[Message, Message], NotUsed] = + JavaMapping.toScala(scaladsl.Flow.fromSinkAndSourceMat(inSink, outSource)(scaladsl.Keep.none): Graph[FlowShape[jm.ws.Message, jm.ws.Message], NotUsed]) } diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/WebSocketRequest.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/WebSocketRequest.scala new file mode 100644 index 0000000000..051bdc0991 --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/WebSocketRequest.scala @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package akka.http.scaladsl.model.ws + +import scala.language.implicitConversions + +import scala.collection.immutable + +import akka.http.scaladsl.model.{ HttpHeader, Uri } + +/** + * Represents a WebSocket request. + * @param uri The target URI to connect to. + * @param extraHeaders Extra headers to add to the WebSocket request. + * @param subprotocol A WebSocket subprotocol if required. + */ +final case class WebSocketRequest( + uri: Uri, + extraHeaders: immutable.Seq[HttpHeader] = Nil, + subprotocol: Option[String] = None) +object WebSocketRequest { + implicit def fromTargetUri(uri: Uri): WebSocketRequest = WebSocketRequest(uri) + implicit def fromTargetUriString(uriString: String): WebSocketRequest = WebSocketRequest(uriString) +} \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/WebsocketUpgradeResponse.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/WebSocketUpgradeResponse.scala similarity index 64% rename from akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/WebsocketUpgradeResponse.scala rename to akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/WebSocketUpgradeResponse.scala index 595fbc2298..ce2ca74243 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/WebsocketUpgradeResponse.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/WebSocketUpgradeResponse.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.ws @@ -9,8 +9,8 @@ import akka.http.scaladsl.model.HttpResponse /** * Represents the response to a websocket upgrade request. Can either be [[ValidUpgrade]] or [[InvalidUpgradeResponse]]. */ -sealed trait WebsocketUpgradeResponse { +sealed trait WebSocketUpgradeResponse { def response: HttpResponse } -final case class ValidUpgrade(response: HttpResponse, chosenSubprotocol: Option[String]) extends WebsocketUpgradeResponse -final case class InvalidUpgradeResponse(response: HttpResponse, cause: String) extends WebsocketUpgradeResponse \ No newline at end of file +final case class ValidUpgrade(response: HttpResponse, chosenSubprotocol: Option[String]) extends WebSocketUpgradeResponse +final case class InvalidUpgradeResponse(response: HttpResponse, cause: String) extends WebSocketUpgradeResponse \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/WebsocketRequest.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/WebsocketRequest.scala deleted file mode 100644 index e4dd04a02b..0000000000 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/model/ws/WebsocketRequest.scala +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright (C) 2009-2015 Typesafe Inc. - */ - -package akka.http.scaladsl.model.ws - -import scala.language.implicitConversions - -import scala.collection.immutable - -import akka.http.scaladsl.model.{ HttpHeader, Uri } - -/** - * Represents a Websocket request. - * @param uri The target URI to connect to. - * @param extraHeaders Extra headers to add to the Websocket request. - * @param subprotocol A Websocket subprotocol if required. - */ -final case class WebsocketRequest( - uri: Uri, - extraHeaders: immutable.Seq[HttpHeader] = Nil, - subprotocol: Option[String] = None) -object WebsocketRequest { - implicit def fromTargetUri(uri: Uri): WebsocketRequest = WebsocketRequest(uri) - implicit def fromTargetUriString(uriString: String): WebsocketRequest = WebsocketRequest(uriString) -} \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/settings/ClientConnectionSettings.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/settings/ClientConnectionSettings.scala new file mode 100644 index 0000000000..38eafe73a1 --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/settings/ClientConnectionSettings.scala @@ -0,0 +1,64 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ +package akka.http.scaladsl.settings + +import java.lang.Iterable +import java.util.{ Optional, Random } +import java.util.function.Supplier + +import akka.actor.ActorSystem +import akka.http.impl.settings.ClientConnectionSettingsImpl +import akka.http.javadsl.model.headers.UserAgent +import akka.http.javadsl.{ settings ⇒ js } +import akka.http.scaladsl.model.headers.`User-Agent` +import akka.io.Inet.SocketOption +import com.typesafe.config.Config + +import scala.collection.immutable +import scala.compat.java8.OptionConverters +import scala.concurrent.duration.{ FiniteDuration, Duration } +import scala.collection.JavaConverters._ + +/** + * Public API but not intended for subclassing + */ +abstract class ClientConnectionSettings private[akka] () extends akka.http.javadsl.settings.ClientConnectionSettings { self: ClientConnectionSettingsImpl ⇒ + def userAgentHeader: Option[`User-Agent`] + def connectingTimeout: FiniteDuration + def idleTimeout: Duration + def requestHeaderSizeHint: Int + def websocketRandomFactory: () ⇒ Random + def socketOptions: immutable.Seq[SocketOption] + def parserSettings: ParserSettings + + /* JAVA APIs */ + + final override def getConnectingTimeout: FiniteDuration = connectingTimeout + final override def getParserSettings: js.ParserSettings = parserSettings + final override def getIdleTimeout: Duration = idleTimeout + final override def getSocketOptions: Iterable[SocketOption] = socketOptions.asJava + final override def getUserAgentHeader: Optional[UserAgent] = OptionConverters.toJava(userAgentHeader) + final override def getRequestHeaderSizeHint: Int = requestHeaderSizeHint + final override def getWebsocketRandomFactory: Supplier[Random] = new Supplier[Random] { + override def get(): Random = websocketRandomFactory() + } + + // --- + + // overrides for more specific return type + override def withConnectingTimeout(newValue: FiniteDuration): ClientConnectionSettings = self.copy(connectingTimeout = newValue) + override def withIdleTimeout(newValue: Duration): ClientConnectionSettings = self.copy(idleTimeout = newValue) + override def withRequestHeaderSizeHint(newValue: Int): ClientConnectionSettings = self.copy(requestHeaderSizeHint = newValue) + + // overloads for idiomatic Scala use + def withWebsocketRandomFactory(newValue: () ⇒ Random): ClientConnectionSettings = self.copy(websocketRandomFactory = newValue) + def withUserAgentHeader(newValue: Option[`User-Agent`]): ClientConnectionSettings = self.copy(userAgentHeader = newValue) + def withSocketOptions(newValue: immutable.Seq[SocketOption]): ClientConnectionSettings = self.copy(socketOptions = newValue) + def withParserSettings(newValue: ParserSettings): ClientConnectionSettings = self.copy(parserSettings = newValue) +} + +object ClientConnectionSettings extends SettingsCompanion[ClientConnectionSettings] { + override def apply(config: Config): ClientConnectionSettings = ClientConnectionSettingsImpl(config) + override def apply(configOverrides: String): ClientConnectionSettings = ClientConnectionSettingsImpl(configOverrides) +} \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/settings/ConnectionPoolSettings.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/settings/ConnectionPoolSettings.scala new file mode 100644 index 0000000000..dc58f8eebc --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/settings/ConnectionPoolSettings.scala @@ -0,0 +1,48 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ +package akka.http.scaladsl.settings + +import akka.http.impl.settings.ConnectionPoolSettingsImpl +import akka.http.javadsl.{ settings ⇒ js } +import com.typesafe.config.Config + +import scala.concurrent.duration.Duration + +/** + * Public API but not intended for subclassing + */ +abstract class ConnectionPoolSettings extends js.ConnectionPoolSettings { self: ConnectionPoolSettingsImpl ⇒ + def maxConnections: Int + def maxRetries: Int + def maxOpenRequests: Int + def pipeliningLimit: Int + def idleTimeout: Duration + def connectionSettings: ClientConnectionSettings + + /* JAVA APIs */ + + final override def getConnectionSettings: js.ClientConnectionSettings = connectionSettings + final override def getPipeliningLimit: Int = pipeliningLimit + final override def getIdleTimeout: Duration = idleTimeout + final override def getMaxConnections: Int = maxConnections + final override def getMaxOpenRequests: Int = maxOpenRequests + final override def getMaxRetries: Int = maxRetries + + // --- + + // overrides for more precise return type + override def withMaxConnections(n: Int): ConnectionPoolSettings = self.copy(maxConnections = n) + override def withMaxRetries(n: Int): ConnectionPoolSettings = self.copy(maxRetries = n) + override def withMaxOpenRequests(newValue: Int): ConnectionPoolSettings = self.copy(maxOpenRequests = newValue) + override def withPipeliningLimit(newValue: Int): ConnectionPoolSettings = self.copy(pipeliningLimit = newValue) + override def withIdleTimeout(newValue: Duration): ConnectionPoolSettings = self.copy(idleTimeout = newValue) + + // overloads for idiomatic Scala use + def withConnectionSettings(newValue: ClientConnectionSettings): ConnectionPoolSettings = self.copy(connectionSettings = newValue) +} + +object ConnectionPoolSettings extends SettingsCompanion[ConnectionPoolSettings] { + override def apply(config: Config) = ConnectionPoolSettingsImpl(config) + override def apply(configOverrides: String) = ConnectionPoolSettingsImpl(configOverrides) +} \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/settings/ParserSettings.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/settings/ParserSettings.scala new file mode 100644 index 0000000000..8c04d18b3a --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/settings/ParserSettings.scala @@ -0,0 +1,127 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ +package akka.http.scaladsl.settings + +import java.util +import java.util.Optional +import java.util.function.Function + +import akka.http.impl.settings.ParserSettingsImpl +import akka.http.impl.util._ +import akka.http.scaladsl.model.HttpMethod +import akka.http.scaladsl.model.StatusCode +import akka.http.scaladsl.model.{ HttpMethod, StatusCode, Uri } +import akka.http.scaladsl.{ settings ⇒ js } +import com.typesafe.config.Config + +import scala.annotation.varargs +import scala.collection.JavaConverters._ +import scala.compat.java8.OptionConverters + +/** + * Public API but not intended for subclassing + */ +abstract class ParserSettings private[akka] () extends akka.http.javadsl.settings.ParserSettings { self: ParserSettingsImpl ⇒ + def maxUriLength: Int + def maxMethodLength: Int + def maxResponseReasonLength: Int + def maxHeaderNameLength: Int + def maxHeaderValueLength: Int + def maxHeaderCount: Int + def maxContentLength: Long + def maxChunkExtLength: Int + def maxChunkSize: Int + def uriParsingMode: Uri.ParsingMode + def cookieParsingMode: ParserSettings.CookieParsingMode + def illegalHeaderWarnings: Boolean + def errorLoggingVerbosity: ParserSettings.ErrorLoggingVerbosity + def headerValueCacheLimits: Map[String, Int] + def includeTlsSessionInfoHeader: Boolean + def customMethods: String ⇒ Option[HttpMethod] + def customStatusCodes: Int ⇒ Option[StatusCode] + + /* Java APIs */ + override def getCookieParsingMode: js.ParserSettings.CookieParsingMode = cookieParsingMode + override def getHeaderValueCacheLimits: util.Map[String, Int] = headerValueCacheLimits.asJava + override def getMaxChunkExtLength = maxChunkExtLength + override def getUriParsingMode: akka.http.javadsl.model.Uri.ParsingMode = uriParsingMode + override def getMaxHeaderCount = maxHeaderCount + override def getMaxContentLength = maxContentLength + override def getMaxHeaderValueLength = maxHeaderValueLength + override def getIncludeTlsSessionInfoHeader = includeTlsSessionInfoHeader + override def getIllegalHeaderWarnings = illegalHeaderWarnings + override def getMaxHeaderNameLength = maxHeaderNameLength + override def getMaxChunkSize = maxChunkSize + override def getMaxResponseReasonLength = maxResponseReasonLength + override def getMaxUriLength = maxUriLength + override def getMaxMethodLength = maxMethodLength + override def getErrorLoggingVerbosity: js.ParserSettings.ErrorLoggingVerbosity = errorLoggingVerbosity + + override def getCustomMethods = new Function[String, Optional[akka.http.javadsl.model.HttpMethod]] { + override def apply(t: String) = OptionConverters.toJava(customMethods(t)) + } + override def getCustomStatusCodes = new Function[Int, Optional[akka.http.javadsl.model.StatusCode]] { + override def apply(t: Int) = OptionConverters.toJava(customStatusCodes(t)) + } + + // --- + + // override for more specific return type + override def withMaxUriLength(newValue: Int): ParserSettings = self.copy(maxUriLength = newValue) + override def withMaxMethodLength(newValue: Int): ParserSettings = self.copy(maxMethodLength = newValue) + override def withMaxResponseReasonLength(newValue: Int): ParserSettings = self.copy(maxResponseReasonLength = newValue) + override def withMaxHeaderNameLength(newValue: Int): ParserSettings = self.copy(maxHeaderNameLength = newValue) + override def withMaxHeaderValueLength(newValue: Int): ParserSettings = self.copy(maxHeaderValueLength = newValue) + override def withMaxHeaderCount(newValue: Int): ParserSettings = self.copy(maxHeaderCount = newValue) + override def withMaxContentLength(newValue: Long): ParserSettings = self.copy(maxContentLength = newValue) + override def withMaxChunkExtLength(newValue: Int): ParserSettings = self.copy(maxChunkExtLength = newValue) + override def withMaxChunkSize(newValue: Int): ParserSettings = self.copy(maxChunkSize = newValue) + override def withIllegalHeaderWarnings(newValue: Boolean): ParserSettings = self.copy(illegalHeaderWarnings = newValue) + override def withIncludeTlsSessionInfoHeader(newValue: Boolean): ParserSettings = self.copy(includeTlsSessionInfoHeader = newValue) + + // overloads for idiomatic Scala use + def withUriParsingMode(newValue: Uri.ParsingMode): ParserSettings = self.copy(uriParsingMode = newValue) + def withCookieParsingMode(newValue: ParserSettings.CookieParsingMode): ParserSettings = self.copy(cookieParsingMode = newValue) + def withErrorLoggingVerbosity(newValue: ParserSettings.ErrorLoggingVerbosity): ParserSettings = self.copy(errorLoggingVerbosity = newValue) + def withHeaderValueCacheLimits(newValue: Map[String, Int]): ParserSettings = self.copy(headerValueCacheLimits = newValue) + def withCustomMethods(methods: HttpMethod*): ParserSettings = { + val map = methods.map(m ⇒ m.name -> m).toMap + self.copy(customMethods = map.get) + } + def withCustomStatusCodes(codes: StatusCode*): ParserSettings = { + val map = codes.map(c ⇒ c.intValue -> c).toMap + self.copy(customStatusCodes = map.get) + } +} + +object ParserSettings extends SettingsCompanion[ParserSettings] { + trait CookieParsingMode extends akka.http.javadsl.settings.ParserSettings.CookieParsingMode + object CookieParsingMode { + case object RFC6265 extends CookieParsingMode + case object Raw extends CookieParsingMode + + def apply(mode: String): CookieParsingMode = mode.toRootLowerCase match { + case "rfc6265" ⇒ RFC6265 + case "raw" ⇒ Raw + } + } + + trait ErrorLoggingVerbosity extends akka.http.javadsl.settings.ParserSettings.ErrorLoggingVerbosity + object ErrorLoggingVerbosity { + case object Off extends ErrorLoggingVerbosity + case object Simple extends ErrorLoggingVerbosity + case object Full extends ErrorLoggingVerbosity + + def apply(string: String): ErrorLoggingVerbosity = + string.toRootLowerCase match { + case "off" ⇒ Off + case "simple" ⇒ Simple + case "full" ⇒ Full + case x ⇒ throw new IllegalArgumentException(s"[$x] is not a legal `error-logging-verbosity` setting") + } + } + + override def apply(config: Config): ParserSettings = ParserSettingsImpl(config) + override def apply(configOverrides: String): ParserSettings = ParserSettingsImpl(configOverrides) +} diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/settings/RoutingSettings.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/settings/RoutingSettings.scala new file mode 100644 index 0000000000..87c2461e6c --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/settings/RoutingSettings.scala @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2016 Typesafe Inc. + */ +package akka.http.scaladsl.settings + +import akka.http.impl.settings.RoutingSettingsImpl +import com.typesafe.config.Config + +/** + * Public API but not intended for subclassing + */ +abstract class RoutingSettings private[akka] () extends akka.http.javadsl.settings.RoutingSettings { self: RoutingSettingsImpl ⇒ + def verboseErrorMessages: Boolean + def fileGetConditional: Boolean + def renderVanityFooter: Boolean + def rangeCountLimit: Int + def rangeCoalescingThreshold: Long + def decodeMaxBytesPerChunk: Int + def fileIODispatcher: String + + /* Java APIs */ + def getVerboseErrorMessages: Boolean = verboseErrorMessages + def getFileGetConditional: Boolean = fileGetConditional + def getRenderVanityFooter: Boolean = renderVanityFooter + def getRangeCountLimit: Int = rangeCountLimit + def getRangeCoalescingThreshold: Long = rangeCoalescingThreshold + def getDecodeMaxBytesPerChunk: Int = decodeMaxBytesPerChunk + def getFileIODispatcher: String = fileIODispatcher + + override def withVerboseErrorMessages(verboseErrorMessages: Boolean): RoutingSettings = self.copy(verboseErrorMessages = verboseErrorMessages) + override def withFileGetConditional(fileGetConditional: Boolean): RoutingSettings = self.copy(fileGetConditional = fileGetConditional) + override def withRenderVanityFooter(renderVanityFooter: Boolean): RoutingSettings = self.copy(renderVanityFooter = renderVanityFooter) + override def withRangeCountLimit(rangeCountLimit: Int): RoutingSettings = self.copy(rangeCountLimit = rangeCountLimit) + override def withRangeCoalescingThreshold(rangeCoalescingThreshold: Long): RoutingSettings = self.copy(rangeCoalescingThreshold = rangeCoalescingThreshold) + override def withDecodeMaxBytesPerChunk(decodeMaxBytesPerChunk: Int): RoutingSettings = self.copy(decodeMaxBytesPerChunk = decodeMaxBytesPerChunk) + override def withFileIODispatcher(fileIODispatcher: String): RoutingSettings = self.copy(fileIODispatcher = fileIODispatcher) + +} + +object RoutingSettings extends SettingsCompanion[RoutingSettings] { + override def apply(config: Config): RoutingSettings = RoutingSettingsImpl(config) + override def apply(configOverrides: String): RoutingSettings = RoutingSettingsImpl(configOverrides) +} diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/settings/ServerSettings.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/settings/ServerSettings.scala new file mode 100644 index 0000000000..ffe0125221 --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/settings/ServerSettings.scala @@ -0,0 +1,98 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ +package akka.http.scaladsl.settings + +import java.util.Random +import java.util.function.Supplier + +import akka.http.impl.settings.ServerSettingsImpl +import akka.http.impl.util.JavaMapping.Implicits._ +import akka.http.javadsl.{ model ⇒ jm, settings ⇒ js } +import akka.http.scaladsl.model.headers.Host +import akka.http.scaladsl.model.headers.Server +import akka.io.Inet.SocketOption +import com.typesafe.config.Config + +import scala.collection.JavaConverters._ +import scala.collection.immutable +import scala.compat.java8.OptionConverters +import scala.concurrent.duration.{ FiniteDuration, Duration } +import scala.language.implicitConversions + +/** + * Public API but not intended for subclassing + */ +abstract class ServerSettings private[akka] () extends akka.http.javadsl.settings.ServerSettings { self: ServerSettingsImpl ⇒ + def serverHeader: Option[Server] + def timeouts: ServerSettings.Timeouts + def maxConnections: Int + def pipeliningLimit: Int + def remoteAddressHeader: Boolean + def rawRequestUriHeader: Boolean + def transparentHeadRequests: Boolean + def verboseErrorMessages: Boolean + def responseHeaderSizeHint: Int + def backlog: Int + def socketOptions: immutable.Seq[SocketOption] + def defaultHostHeader: Host + def websocketRandomFactory: () ⇒ Random + def parserSettings: ParserSettings + + /* Java APIs */ + + override def getBacklog = backlog + override def getDefaultHostHeader = defaultHostHeader.asJava + override def getPipeliningLimit = pipeliningLimit + override def getParserSettings: js.ParserSettings = parserSettings + override def getMaxConnections = maxConnections + override def getTransparentHeadRequests = transparentHeadRequests + override def getResponseHeaderSizeHint = responseHeaderSizeHint + override def getVerboseErrorMessages = verboseErrorMessages + override def getSocketOptions = socketOptions.asJava + override def getServerHeader = OptionConverters.toJava(serverHeader.map(_.asJava)) + override def getTimeouts = timeouts + override def getRawRequestUriHeader = rawRequestUriHeader + override def getRemoteAddressHeader = remoteAddressHeader + override def getWebsocketRandomFactory = new Supplier[Random] { + override def get(): Random = websocketRandomFactory() + } + + // --- + + // override for more specific return type + override def withMaxConnections(newValue: Int): ServerSettings = self.copy(maxConnections = newValue) + override def withPipeliningLimit(newValue: Int): ServerSettings = self.copy(pipeliningLimit = newValue) + override def withRemoteAddressHeader(newValue: Boolean): ServerSettings = self.copy(remoteAddressHeader = newValue) + override def withRawRequestUriHeader(newValue: Boolean): ServerSettings = self.copy(rawRequestUriHeader = newValue) + override def withTransparentHeadRequests(newValue: Boolean): ServerSettings = self.copy(transparentHeadRequests = newValue) + override def withVerboseErrorMessages(newValue: Boolean): ServerSettings = self.copy(verboseErrorMessages = newValue) + override def withResponseHeaderSizeHint(newValue: Int): ServerSettings = self.copy(responseHeaderSizeHint = newValue) + override def withBacklog(newValue: Int): ServerSettings = self.copy(backlog = newValue) + override def withSocketOptions(newValue: java.lang.Iterable[SocketOption]): ServerSettings = self.copy(socketOptions = newValue.asScala.toList) + override def withWebsocketRandomFactory(newValue: java.util.function.Supplier[Random]): ServerSettings = self.copy(websocketRandomFactory = () ⇒ newValue.get()) + + // overloads for Scala idiomatic use + def withTimeouts(newValue: ServerSettings.Timeouts): ServerSettings = self.copy(timeouts = newValue) + def withServerHeader(newValue: Option[Server]): ServerSettings = self.copy(serverHeader = newValue) + def withDefaultHostHeader(newValue: Host): ServerSettings = self.copy(defaultHostHeader = newValue) + def withParserSettings(newValue: ParserSettings): ServerSettings = self.copy(parserSettings = newValue) + def withWebsocketRandomFactory(newValue: () ⇒ Random): ServerSettings = self.copy(websocketRandomFactory = newValue) + def withSocketOptions(newValue: immutable.Seq[SocketOption]): ServerSettings = self.copy(socketOptions = newValue) + +} + +object ServerSettings extends SettingsCompanion[ServerSettings] { + trait Timeouts extends akka.http.javadsl.settings.ServerSettings.Timeouts { + // --- + // override for more specific return types + override def withIdleTimeout(newValue: Duration): ServerSettings.Timeouts = self.copy(idleTimeout = newValue) + override def withRequestTimeout(newValue: Duration): ServerSettings.Timeouts = self.copy(requestTimeout = newValue) + override def withBindTimeout(newValue: FiniteDuration): ServerSettings.Timeouts = self.copy(bindTimeout = newValue) + } + + implicit def timeoutsShortcut(s: ServerSettings): Timeouts = s.timeouts + + override def apply(config: Config): ServerSettings = ServerSettingsImpl(config) + override def apply(configOverrides: String): ServerSettings = ServerSettingsImpl(configOverrides) +} \ No newline at end of file diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/settings/SettingsCompanion.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/settings/SettingsCompanion.scala new file mode 100644 index 0000000000..e1b0ee6b38 --- /dev/null +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/settings/SettingsCompanion.scala @@ -0,0 +1,27 @@ +package akka.http.scaladsl.settings + +import akka.actor.{ ActorRefFactory, ActorSystem } +import com.typesafe.config.Config +import akka.http.impl.util._ + +/** INTERNAL API */ +private[akka] trait SettingsCompanion[T] { + + /** + * Creates an instance of settings using the configuration provided by the given ActorSystem. + */ + final def apply(system: ActorSystem): T = apply(system.settings.config) + implicit def default(implicit system: ActorRefFactory): T = apply(actorSystem) + + /** + * Creates an instance of settings using the given Config. + */ + def apply(config: Config): T + + /** + * Create an instance of settings using the given String of config overrides to override + * settings set in the class loader of this class (i.e. by application.conf or reference.conf files in + * the class loader of this class). + */ + def apply(configOverrides: String): T +} diff --git a/akka-http-core/src/main/scala/akka/http/scaladsl/util/FastFuture.scala b/akka-http-core/src/main/scala/akka/http/scaladsl/util/FastFuture.scala index ebee3e3af4..3949a902a5 100644 --- a/akka-http-core/src/main/scala/akka/http/scaladsl/util/FastFuture.scala +++ b/akka-http-core/src/main/scala/akka/http/scaladsl/util/FastFuture.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.util diff --git a/akka-http-core/src/test/java/akka/http/javadsl/WSEchoTestClientApp.java b/akka-http-core/src/test/java/akka/http/javadsl/WSEchoTestClientApp.java index dc28db6798..8253bec02e 100644 --- a/akka-http-core/src/test/java/akka/http/javadsl/WSEchoTestClientApp.java +++ b/akka-http-core/src/test/java/akka/http/javadsl/WSEchoTestClientApp.java @@ -1,14 +1,15 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl; +import akka.NotUsed; import akka.actor.ActorSystem; import akka.dispatch.Futures; import akka.http.javadsl.model.ws.Message; import akka.http.javadsl.model.ws.TextMessage; -import akka.http.javadsl.model.ws.WebsocketRequest; +import akka.http.javadsl.model.ws.WebSocketRequest; import akka.japi.function.Function; import akka.stream.ActorMaterializer; import akka.stream.Materializer; @@ -16,16 +17,17 @@ import akka.stream.javadsl.Flow; import akka.stream.javadsl.Keep; import akka.stream.javadsl.Sink; import akka.stream.javadsl.Source; -import scala.concurrent.Await; import scala.concurrent.Future; import scala.concurrent.duration.FiniteDuration; -import scala.runtime.BoxedUnit; import java.util.Arrays; import java.util.List; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; public class WSEchoTestClientApp { private static final Function messageStringifier = new Function() { + private static final long serialVersionUID = 1L; @Override public String apply(Message msg) throws Exception { if (msg.isText() && msg.asTextMessage().isStrict()) @@ -49,35 +51,35 @@ public class WSEchoTestClientApp { system.dispatcher(), ignoredMessage); - Source echoSource = + Source echoSource = Source.from(Arrays.asList( TextMessage.create("abc"), TextMessage.create("def"), TextMessage.create("ghi") )).concat(Source.fromFuture(delayedCompletion).drop(1)); - Sink>> echoSink = + Sink>> echoSink = Flow.of(Message.class) .map(messageStringifier) .grouped(1000) - .toMat(Sink.>head(), Keep.>>right()); + .toMat(Sink.>head(), Keep.right()); - Flow>> echoClient = - Flow.fromSinkAndSourceMat(echoSink, echoSource, Keep.>, BoxedUnit>left()); + Flow>> echoClient = + Flow.fromSinkAndSourceMat(echoSink, echoSource, Keep.left()); - Future> result = - Http.get(system).singleWebsocketRequest( - WebsocketRequest.create("ws://echo.websocket.org"), + CompletionStage> result = + Http.get(system).singleWebSocketRequest( + WebSocketRequest.create("ws://echo.websocket.org"), echoClient, materializer ).second(); - List messages = Await.result(result, FiniteDuration.apply(10, "second")); + List messages = result.toCompletableFuture().get(10, TimeUnit.SECONDS); System.out.println("Collected " + messages.size() + " messages:"); for (String msg: messages) System.out.println(msg); } finally { - system.shutdown(); + system.terminate(); } } } diff --git a/akka-http-core/src/test/java/akka/http/javadsl/model/JavaApiTestCases.java b/akka-http-core/src/test/java/akka/http/javadsl/model/JavaApiTestCases.java index f33b9d0675..562693be94 100644 --- a/akka-http-core/src/test/java/akka/http/javadsl/model/JavaApiTestCases.java +++ b/akka-http-core/src/test/java/akka/http/javadsl/model/JavaApiTestCases.java @@ -1,70 +1,100 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; import akka.http.impl.util.Util; -import akka.http.javadsl.model.headers.Authorization; -import akka.http.javadsl.model.headers.HttpCredentials; +import akka.http.javadsl.model.headers.*; import akka.japi.Pair; public class JavaApiTestCases { - /** Builds a request for use on the client side */ - public static HttpRequest buildRequest() { + /** + * Builds a request for use on the client side + */ + public static HttpRequest buildRequest() { + return + HttpRequest.create() + .withMethod(HttpMethods.POST) + .withUri("/send"); + } + + /** + * A simple handler for an Http server + */ + public static HttpResponse handleRequest(HttpRequest request) { + if (request.method() == HttpMethods.GET) { + Uri uri = request.getUri(); + if (uri.path().equals("/hello")) { + String name = uri.query().get("name").orElse("Mister X"); + return - HttpRequest.create() - .withMethod(HttpMethods.POST) - .withUri("/send"); - } - - /** A simple handler for an Http server */ - public static HttpResponse handleRequest(HttpRequest request) { - if (request.method() == HttpMethods.GET) { - Uri uri = request.getUri(); - if (uri.path().equals("/hello")) { - String name = Util.getOrElse(uri.query().get("name"), "Mister X"); - - return - HttpResponse.create() - .withEntity("Hello " + name + "!"); - } else - return - HttpResponse.create() - .withStatus(404) - .withEntity("Not found"); - } else - return - HttpResponse.create() + HttpResponse.create() + .withEntity("Hello " + name + "!"); + } else { + return + HttpResponse.create() + .withStatus(404) + .withEntity("Not found"); + } + } else { + return + HttpResponse.create() .withStatus(StatusCodes.METHOD_NOT_ALLOWED) .withEntity("Unsupported method"); } + } - /** Adds authentication to an existing request */ - public static HttpRequest addAuthentication(HttpRequest request) { - // unused here but just to show the shortcut - request.addHeader(Authorization.basic("username", "password")); + /** + * Adds authentication to an existing request + */ + public static HttpRequest addAuthentication(HttpRequest request) { + // unused here but just to show the shortcut + request.addHeader(Authorization.basic("username", "password")); - return request - .addHeader(Authorization.create(HttpCredentials.createBasicHttpCredentials("username", "password"))); + return request + .addHeader(Authorization.create(HttpCredentials.createBasicHttpCredentials("username", "password"))); - } + } - /** Removes cookies from an existing request */ - public static HttpRequest removeCookies(HttpRequest request) { - return request.removeHeader("Cookie"); - } + /** + * Removes cookies from an existing request + */ + public static HttpRequest removeCookies(HttpRequest request) { + return request.removeHeader("Cookie"); + } - /** Build a uri to send a form */ - public static Uri createUriForOrder(String orderId, String price, String amount) { - return Uri.create("/order").query( - Query.create( - Pair.create("orderId", orderId), - Pair.create("price", price), - Pair.create("amount", amount))); - } + /** + * Build a uri to send a form + */ + public static Uri createUriForOrder(String orderId, String price, String amount) { + return Uri.create("/order").query( + Query.create( + Pair.create("orderId", orderId), + Pair.create("price", price), + Pair.create("amount", amount))); + } - public static Query addSessionId(Query query) { - return query.withParam("session", "abcdefghijkl"); - } + public static Query addSessionId(Query query) { + return query.withParam("session", "abcdefghijkl"); + } + + public static Object accessScalaDefinedJavadslContentTypeAndMediaType(ContentType type) { + Object anything = null; + + akka.http.javadsl.model.MediaType mediaType = type.mediaType(); + + // just for the sake of explicitly touching the interfaces + if (mediaType.binary()) anything = (akka.http.javadsl.model.MediaType.Binary) mediaType; + anything = (akka.http.javadsl.model.MediaType.Multipart) mediaType; + anything = (akka.http.javadsl.model.MediaType.WithOpenCharset) mediaType; + anything = (akka.http.javadsl.model.MediaType.WithFixedCharset) mediaType; + + if (type.binary()) anything = (akka.http.javadsl.model.ContentType.Binary) type; + anything = (akka.http.javadsl.model.ContentType.NonBinary) type; + anything = (akka.http.javadsl.model.ContentType.WithCharset) type; + anything = (akka.http.javadsl.model.ContentType.WithFixedCharset) type; + + return anything; + } } diff --git a/akka-http-core/src/test/java/akka/http/javadsl/model/JavaTestServer.java b/akka-http-core/src/test/java/akka/http/javadsl/model/JavaTestServer.java index 01856c14e8..7f3ce29d7c 100644 --- a/akka-http-core/src/test/java/akka/http/javadsl/model/JavaTestServer.java +++ b/akka-http-core/src/test/java/akka/http/javadsl/model/JavaTestServer.java @@ -1,16 +1,17 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model; +import akka.NotUsed; import akka.actor.ActorSystem; import akka.http.javadsl.Http; import akka.http.javadsl.ServerBinding; import akka.japi.Function; import akka.http.javadsl.model.ws.Message; import akka.http.javadsl.model.ws.TextMessage; -import akka.http.javadsl.model.ws.Websocket; +import akka.http.javadsl.model.ws.WebSocket; import akka.japi.JavaPartialFunction; import akka.stream.ActorMaterializer; import akka.stream.Materializer; @@ -19,10 +20,10 @@ import akka.stream.javadsl.Source; import scala.concurrent.Await; import scala.concurrent.Future; import scala.concurrent.duration.FiniteDuration; -import scala.runtime.BoxedUnit; import java.io.BufferedReader; import java.io.InputStreamReader; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; public class JavaTestServer { @@ -32,34 +33,34 @@ public class JavaTestServer { try { final Materializer materializer = ActorMaterializer.create(system); - Future serverBindingFuture = + CompletionStage serverBindingFuture = Http.get(system).bindAndHandleSync( new Function() { public HttpResponse apply(HttpRequest request) throws Exception { System.out.println("Handling request to " + request.getUri()); if (request.getUri().path().equals("/")) - return Websocket.handleWebsocketRequestWith(request, echoMessages()); + return WebSocket.handleWebSocketRequestWith(request, echoMessages()); else if (request.getUri().path().equals("/greeter")) - return Websocket.handleWebsocketRequestWith(request, greeter()); + return WebSocket.handleWebSocketRequestWith(request, greeter()); else return JavaApiTestCases.handleRequest(request); } }, "localhost", 8080, materializer); - Await.result(serverBindingFuture, new FiniteDuration(1, TimeUnit.SECONDS)); // will throw if binding fails + serverBindingFuture.toCompletableFuture().get(1, TimeUnit.SECONDS); // will throw if binding fails System.out.println("Press ENTER to stop."); new BufferedReader(new InputStreamReader(System.in)).readLine(); } finally { - system.shutdown(); + system.terminate(); } } - public static Flow echoMessages() { + public static Flow echoMessages() { return Flow.create(); // the identity operation } - public static Flow greeter() { + public static Flow greeter() { return Flow.create() .collect(new JavaPartialFunction() { diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/client/ClientCancellationSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/client/ClientCancellationSpec.scala index 49a498602a..e003ede18c 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/client/ClientCancellationSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/client/ClientCancellationSpec.scala @@ -2,7 +2,7 @@ package akka.http.impl.engine.client import javax.net.ssl.SSLContext -import akka.http.scaladsl.{ HttpsContext, Http } +import akka.http.scaladsl.{ ConnectionContext, Http } import akka.http.scaladsl.model.{ HttpHeader, HttpResponse, HttpRequest } import akka.stream.ActorMaterializer import akka.stream.scaladsl.{ Flow, Sink, Source } @@ -28,7 +28,7 @@ class ClientCancellationSpec extends AkkaSpec(""" { req ⇒ HttpResponse() }, // TLS client does full-close, no need for the connection:close header addressTls.getHostName, addressTls.getPort, - httpsContext = Some(HttpsContext(SSLContext.getDefault)))(noncheckedMaterializer) + connectionContext = ConnectionContext.https(SSLContext.getDefault))(noncheckedMaterializer) def testCase(connection: Flow[HttpRequest, HttpResponse, Any]): Unit = Utils.assertAllStagesStopped { val requests = TestPublisher.probe[HttpRequest]() @@ -57,7 +57,7 @@ class ClientCancellationSpec extends AkkaSpec(""" "support cancellation in simple outgoing connection with TLS" in { pending testCase( - Http().outgoingConnectionTls(addressTls.getHostName, addressTls.getPort)) + Http().outgoingConnectionHttps(addressTls.getHostName, addressTls.getPort)) } "support cancellation in pooled outgoing connection with TLS" in { @@ -65,7 +65,7 @@ class ClientCancellationSpec extends AkkaSpec(""" testCase( Flow[HttpRequest] .map((_, ())) - .via(Http().cachedHostConnectionPoolTls(addressTls.getHostName, addressTls.getPort)(noncheckedMaterializer)) + .via(Http().cachedHostConnectionPoolHttps(addressTls.getHostName, addressTls.getPort)(noncheckedMaterializer)) .map(_._1.get)) } diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/client/ConnectionPoolSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/client/ConnectionPoolSpec.scala index ff2a9b3961..b41590b149 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/client/ConnectionPoolSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/client/ConnectionPoolSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.client @@ -8,6 +8,8 @@ import java.net.InetSocketAddress import java.nio.ByteBuffer import java.nio.channels.{ SocketChannel, ServerSocketChannel } import java.util.concurrent.atomic.AtomicInteger +import akka.http.impl.settings.ConnectionPoolSettingsImpl + import scala.concurrent.Await import scala.concurrent.duration._ import scala.util.control.NonFatal @@ -15,7 +17,7 @@ import scala.util.{ Failure, Success, Try } import akka.util.ByteString import akka.http.scaladsl.{ TestUtils, Http } import akka.http.impl.util.{ SingletonException, StreamUtils } -import akka.http.{ ClientConnectionSettings, ConnectionPoolSettings, ServerSettings } +import akka.http.scaladsl.settings.{ ClientConnectionSettings, ConnectionPoolSettings, ServerSettings } import akka.stream.io.{ SessionBytes, SendBytes, SslTlsOutbound } import akka.stream.{ BidiShape, ActorMaterializer } import akka.stream.testkit.{ TestPublisher, TestSubscriber, AkkaSpec } @@ -128,7 +130,7 @@ class ConnectionPoolSpec extends AkkaSpec(""" } "be able to handle 500 pipelined requests against the test server" in new TestSetup { - val settings = ConnectionPoolSettings(system).copy(maxConnections = 4, pipeliningLimit = 2) + val settings = ConnectionPoolSettings(system).withMaxConnections(4).withPipeliningLimit(2) val poolFlow = Http().cachedHostConnectionPool[Int](serverHostName, serverPort, settings = settings) val N = 500 @@ -229,7 +231,7 @@ class ConnectionPoolSpec extends AkkaSpec(""" } "The single-request client infrastructure" should { - class LocalTestSetup extends TestSetup(ServerSettings(system).copy(rawRequestUriHeader = true), autoAccept = true) + class LocalTestSetup extends TestSetup(ServerSettings(system).withRawRequestUriHeader(true), autoAccept = true) "transform absolute request URIs into relative URIs plus host header" in new LocalTestSetup { val request = HttpRequest(uri = s"http://$serverHostName:$serverPort/abc?query#fragment") @@ -303,8 +305,7 @@ class ConnectionPoolSpec extends AkkaSpec(""" .transform(StreamUtils.recover { case NoErrorComplete ⇒ ByteString.empty }), Flow[ByteString].map(SessionBytes(null, _))) val sink = if (autoAccept) Sink.foreach[Http.IncomingConnection](handleConnection) else Sink.fromSubscriber(incomingConnections) - // TODO getHostString in Java7 - Tcp().bind(serverEndpoint.getHostName, serverEndpoint.getPort, idleTimeout = serverSettings.timeouts.idleTimeout) + Tcp().bind(serverEndpoint.getHostString, serverEndpoint.getPort, idleTimeout = serverSettings.timeouts.idleTimeout) .map { c ⇒ val layer = Http().serverLayer(serverSettings, log = log) Http.IncomingConnection(c.localAddress, c.remoteAddress, layer atop rawBytesInjection join c.flow) @@ -327,7 +328,7 @@ class ConnectionPoolSpec extends AkkaSpec(""" pipeliningLimit: Int = 1, idleTimeout: Duration = 5.seconds, ccSettings: ClientConnectionSettings = ClientConnectionSettings(system)) = { - val settings = ConnectionPoolSettings(maxConnections, maxRetries, maxOpenRequests, pipeliningLimit, + val settings = new ConnectionPoolSettingsImpl(maxConnections, maxRetries, maxOpenRequests, pipeliningLimit, idleTimeout, ClientConnectionSettings(system)) flowTestBench(Http().cachedHostConnectionPool[T](serverHostName, serverPort, settings)) } @@ -338,9 +339,9 @@ class ConnectionPoolSpec extends AkkaSpec(""" pipeliningLimit: Int = 1, idleTimeout: Duration = 5.seconds, ccSettings: ClientConnectionSettings = ClientConnectionSettings(system)) = { - val settings = ConnectionPoolSettings(maxConnections, maxRetries, maxOpenRequests, pipeliningLimit, + val settings = new ConnectionPoolSettingsImpl(maxConnections, maxRetries, maxOpenRequests, pipeliningLimit, idleTimeout, ClientConnectionSettings(system)) - flowTestBench(Http().superPool[T](settings)) + flowTestBench(Http().superPool[T](settings = settings)) } def flowTestBench[T, Mat](poolFlow: Flow[(HttpRequest, T), (Try[HttpResponse], T), Mat]) = { @@ -356,6 +357,8 @@ class ConnectionPoolSpec extends AkkaSpec(""" } case class ConnNrHeader(nr: Int) extends CustomHeader { + def renderInRequests = false + def renderInResponses = true def name = "Conn-Nr" def value = nr.toString } diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/client/HighLevelOutgoingConnectionSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/client/HighLevelOutgoingConnectionSpec.scala index 32c659eb2d..b553f2bb7b 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/client/HighLevelOutgoingConnectionSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/client/HighLevelOutgoingConnectionSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.client @@ -11,16 +11,19 @@ import akka.stream.scaladsl._ import akka.stream.testkit.AkkaSpec import akka.http.scaladsl.{ Http, TestUtils } import akka.http.scaladsl.model._ +import akka.stream.testkit.Utils +import org.scalatest.concurrent.ScalaFutures -class HighLevelOutgoingConnectionSpec extends AkkaSpec { +class HighLevelOutgoingConnectionSpec extends AkkaSpec with ScalaFutures { implicit val materializer = ActorMaterializer() + implicit val patience = PatienceConfig(1.second) "The connection-level client implementation" should { - "be able to handle 100 pipelined requests across one connection" in { + "be able to handle 100 pipelined requests across one connection" in Utils.assertAllStagesStopped { val (_, serverHostName, serverPort) = TestUtils.temporaryServerHostnameAndPort() - Http().bindAndHandleSync(r ⇒ HttpResponse(entity = r.uri.toString.reverse.takeWhile(Character.isDigit).reverse), + val binding = Http().bindAndHandleSync(r ⇒ HttpResponse(entity = r.uri.toString.reverse.takeWhile(Character.isDigit).reverse), serverHostName, serverPort) val N = 100 @@ -32,13 +35,14 @@ class HighLevelOutgoingConnectionSpec extends AkkaSpec { .map { r ⇒ val s = r.data.utf8String; log.debug(s); s.toInt } .runFold(0)(_ + _) - Await.result(result, 10.seconds) shouldEqual N * (N + 1) / 2 + result.futureValue(PatienceConfig(10.seconds)) shouldEqual N * (N + 1) / 2 + binding.futureValue.unbind() } - "be able to handle 100 pipelined requests across 4 connections (client-flow is reusable)" in { + "be able to handle 100 pipelined requests across 4 connections (client-flow is reusable)" in Utils.assertAllStagesStopped { val (_, serverHostName, serverPort) = TestUtils.temporaryServerHostnameAndPort() - Http().bindAndHandleSync(r ⇒ HttpResponse(entity = r.uri.toString.reverse.takeWhile(Character.isDigit).reverse), + val binding = Http().bindAndHandleSync(r ⇒ HttpResponse(entity = r.uri.toString.reverse.takeWhile(Character.isDigit).reverse), serverHostName, serverPort) val connFlow = Http().outgoingConnection(serverHostName, serverPort) @@ -64,12 +68,14 @@ class HighLevelOutgoingConnectionSpec extends AkkaSpec { .map { r ⇒ val s = r.data.utf8String; log.debug(s); s.toInt } .runFold(0)(_ + _) - Await.result(result, 10.seconds) shouldEqual C * N * (N + 1) / 2 + result.futureValue(PatienceConfig(10.seconds)) shouldEqual C * N * (N + 1) / 2 + binding.futureValue.unbind() } - "catch response stream truncation" in { + "catch response stream truncation" in Utils.assertAllStagesStopped { val (_, serverHostName, serverPort) = TestUtils.temporaryServerHostnameAndPort() - Http().bindAndHandleSync({ + + val binding = Http().bindAndHandleSync({ case HttpRequest(_, Uri.Path("/b"), _, _, _) ⇒ HttpResponse(headers = List(headers.Connection("close"))) case _ ⇒ HttpResponse() }, serverHostName, serverPort) @@ -81,6 +87,7 @@ class HighLevelOutgoingConnectionSpec extends AkkaSpec { .runWith(Sink.head) a[One2OneBidiFlow.OutputTruncationException.type] should be thrownBy Await.result(x, 1.second) + binding.futureValue.unbind() } } } diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/client/HttpConfigurationSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/client/HttpConfigurationSpec.scala index fb8699ebc9..2a1564dc14 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/client/HttpConfigurationSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/client/HttpConfigurationSpec.scala @@ -1,11 +1,11 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.client import akka.actor.ActorSystem -import akka.http.{ ClientConnectionSettings, ConnectionPoolSettings, ServerSettings } +import akka.http.scaladsl.settings.{ ClientConnectionSettings, ConnectionPoolSettings, ServerSettings } import akka.stream.testkit.AkkaSpec import com.typesafe.config.ConfigFactory @@ -135,7 +135,7 @@ class HttpConfigurationSpec extends AkkaSpec { val config = ConfigFactory.parseString(overrides).withFallback(ConfigFactory.load()) // we go via ActorSystem in order to hit the settings caching infrastructure val sys = ActorSystem("config-testing", config) - try block(sys) finally sys.shutdown() + try block(sys) finally sys.terminate() } } \ No newline at end of file diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/client/LowLevelOutgoingConnectionSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/client/LowLevelOutgoingConnectionSpec.scala index ef2f72b147..237e57b45d 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/client/LowLevelOutgoingConnectionSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/client/LowLevelOutgoingConnectionSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.client @@ -7,7 +7,7 @@ package akka.http.impl.engine.client import scala.concurrent.duration._ import scala.reflect.ClassTag import org.scalatest.Inside -import akka.http.ClientConnectionSettings +import akka.http.scaladsl.settings.ClientConnectionSettings import akka.stream.io.{ SessionBytes, SslTlsOutbound, SendBytes } import akka.util.ByteString import akka.event.NoLogging @@ -521,9 +521,9 @@ class LowLevelOutgoingConnectionSpec extends AkkaSpec("akka.loggers = []\n akka. def settings = { val s = ClientConnectionSettings(system) - .copy(userAgentHeader = Some(`User-Agent`(List(ProductVersion("akka-http", "test"))))) + .withUserAgentHeader(Some(`User-Agent`(List(ProductVersion("akka-http", "test"))))) if (maxResponseContentLength < 0) s - else s.copy(parserSettings = s.parserSettings.copy(maxContentLength = maxResponseContentLength)) + else s.withParserSettings(s.parserSettings.withMaxContentLength(maxResponseContentLength)) } val (netOut, netIn) = { diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/client/TlsEndpointVerificationSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/client/TlsEndpointVerificationSpec.scala index 0948a75c05..f70691837b 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/client/TlsEndpointVerificationSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/client/TlsEndpointVerificationSpec.scala @@ -1,9 +1,10 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.client +import akka.NotUsed import org.scalatest.concurrent.PatienceConfiguration.Timeout import org.scalatest.concurrent.ScalaFutures import akka.stream.ActorMaterializer @@ -11,7 +12,7 @@ import akka.stream.io._ import akka.stream.scaladsl._ import akka.stream.testkit.AkkaSpec import akka.http.impl.util._ -import akka.http.scaladsl.{ HttpsContext, Http } +import akka.http.scaladsl.{ ConnectionContext, Http } import akka.http.scaladsl.model.{ StatusCodes, HttpResponse, HttpRequest } import akka.http.scaladsl.model.headers.{ Host, `Tls-Session-Info` } import org.scalatest.time.{ Span, Seconds } @@ -69,19 +70,14 @@ class TlsEndpointVerificationSpec extends AkkaSpec(""" val ex = intercept[Exception] { Http().singleRequest(req).futureValue } - if (Java6Compat.isJava6) { - // our manual verification - ex.getMessage should include("Hostname verification failed") - } else { - // JDK built-in verification - val expectedMsg = "No subject alternative DNS name matching www.howsmyssl.com found" + // JDK built-in verification + val expectedMsg = "No subject alternative DNS name matching www.howsmyssl.com found" - var e: Throwable = ex - while (e.getCause != null) e = e.getCause + var e: Throwable = ex + while (e.getCause != null) e = e.getCause - info("TLS failure cause: " + e.getMessage) - e.getMessage should include(expectedMsg) - } + info("TLS failure cause: " + e.getMessage) + e.getMessage should include(expectedMsg) } "pass hostname verification on https://www.playframework.com/" in { @@ -92,10 +88,10 @@ class TlsEndpointVerificationSpec extends AkkaSpec(""" } } - def pipeline(clientContext: HttpsContext, hostname: String): HttpRequest ⇒ Future[HttpResponse] = req ⇒ + def pipeline(clientContext: ConnectionContext, hostname: String): HttpRequest ⇒ Future[HttpResponse] = req ⇒ Source.single(req).via(pipelineFlow(clientContext, hostname)).runWith(Sink.head) - def pipelineFlow(clientContext: HttpsContext, hostname: String): Flow[HttpRequest, HttpResponse, Unit] = { + def pipelineFlow(clientContext: ConnectionContext, hostname: String): Flow[HttpRequest, HttpResponse, NotUsed] = { val handler: HttpRequest ⇒ HttpResponse = { req ⇒ // verify Tls-Session-Info header information val name = req.header[`Tls-Session-Info`].flatMap(_.localPrincipal).map(_.getName) @@ -103,8 +99,8 @@ class TlsEndpointVerificationSpec extends AkkaSpec(""" else HttpResponse(StatusCodes.BadRequest, entity = "Tls-Session-Info header verification failed") } - val serverSideTls = Http().sslTlsStage(Some(ExampleHttpContexts.exampleServerContext), Server) - val clientSideTls = Http().sslTlsStage(Some(clientContext), Client, Some(hostname -> 8080)) + val serverSideTls = Http().sslTlsStage(ExampleHttpContexts.exampleServerContext, Server) + val clientSideTls = Http().sslTlsStage(clientContext, Client, Some(hostname -> 8080)) val server = Http().serverLayer() diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/BoyerMooreSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/BoyerMooreSpec.scala index 8c6561f79c..a681a869dc 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/BoyerMooreSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/BoyerMooreSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.parsing diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/ContentLengthHeaderParserSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/ContentLengthHeaderParserSpec.scala index 54b3e1d2c0..65e3d7e886 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/ContentLengthHeaderParserSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/ContentLengthHeaderParserSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.parsing diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/HttpHeaderParserSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/HttpHeaderParserSpec.scala index e0e631c5c7..5653afbc67 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/HttpHeaderParserSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/HttpHeaderParserSpec.scala @@ -1,11 +1,11 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.parsing import java.lang.{ StringBuilder ⇒ JStringBuilder } -import akka.http.ParserSettings +import akka.http.scaladsl.settings.ParserSettings import com.typesafe.config.{ ConfigFactory, Config } import scala.annotation.tailrec import scala.util.Random @@ -108,7 +108,7 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll } "retrieve the EmptyHeader" in new TestSetup() { - parseAndCache("\r\n")() shouldEqual HttpHeaderParser.EmptyHeader + parseAndCache("\r\n")() shouldEqual EmptyHeader } "retrieve a cached header with an exact header name match" in new TestSetup() { @@ -228,7 +228,7 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll } } - override def afterAll() = system.shutdown() + override def afterAll() = system.terminate() def check(pair: (String, String)) = { val (expected, actual) = pair diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/HttpHeaderParserTestBed.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/HttpHeaderParserTestBed.scala index 448c946f96..b8dcb95b9a 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/HttpHeaderParserTestBed.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/HttpHeaderParserTestBed.scala @@ -1,7 +1,7 @@ package akka.http.impl.engine.parsing import akka.actor.ActorSystem -import akka.http.ParserSettings +import akka.http.scaladsl.settings.ParserSettings import com.typesafe.config.{ ConfigFactory, Config } object HttpHeaderParserTestBed extends App { @@ -30,5 +30,5 @@ object HttpHeaderParserTestBed extends App { """.stripMargin.replace("%TRIE%", parser.formatTrie) } - system.shutdown() + system.terminate() } diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/RequestParserSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/RequestParserSpec.scala index e90494bc3a..c1bb085683 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/RequestParserSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/RequestParserSpec.scala @@ -1,9 +1,11 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.parsing +import akka.NotUsed + import scala.concurrent.Future import scala.concurrent.duration._ @@ -21,7 +23,7 @@ import akka.stream.io.{ SslTlsPlacebo, SessionBytes } import org.scalatest.matchers.Matcher import org.scalatest.{ BeforeAndAfterAll, FreeSpec, Matchers } -import akka.http.ParserSettings +import akka.http.scaladsl.settings.ParserSettings import akka.http.impl.engine.parsing.ParserOutput._ import akka.http.impl.util._ import akka.http.scaladsl.model.HttpEntity._ @@ -433,7 +435,7 @@ class RequestParserSpec extends FreeSpec with Matchers with BeforeAndAfterAll { } } - override def afterAll() = system.shutdown() + override def afterAll() = system.terminate() private class Test { def awaitAtMost: FiniteDuration = 250.millis @@ -521,5 +523,5 @@ class RequestParserSpec extends FreeSpec with Matchers with BeforeAndAfterAll { def prep(response: String) = response.stripMarginWithNewline("\r\n") } - def source[T](elems: T*): Source[T, Unit] = Source(elems.toList) + def source[T](elems: T*): Source[T, NotUsed] = Source(elems.toList) } diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/ResponseParserSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/ResponseParserSpec.scala index 00f46b4b75..ed0362fa71 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/ResponseParserSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/ResponseParserSpec.scala @@ -1,10 +1,11 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.parsing -import akka.http.ParserSettings +import akka.NotUsed +import akka.http.scaladsl.settings.ParserSettings import akka.http.scaladsl.util.FastFuture import akka.stream.io.{ SslTlsPlacebo, SessionBytes } import com.typesafe.config.{ ConfigFactory, Config } @@ -239,7 +240,7 @@ class ResponseParserSpec extends FreeSpec with Matchers with BeforeAndAfterAll { } } - override def afterAll() = system.shutdown() + override def afterAll() = system.terminate() private class Test { var closeAfterResponseCompletion = Seq.empty[Boolean] @@ -289,7 +290,7 @@ class ResponseParserSpec extends FreeSpec with Matchers with BeforeAndAfterAll { }.map(strictEqualify) } - def rawParse(requestMethod: HttpMethod, input: String*): Source[Either[ResponseOutput, HttpResponse], Unit] = + def rawParse(requestMethod: HttpMethod, input: String*): Source[Either[ResponseOutput, HttpResponse], NotUsed] = Source(input.toList) .map(bytes ⇒ SessionBytes(SslTlsPlacebo.dummySession, ByteString(bytes))) .transform(() ⇒ newParserStage(requestMethod)).named("parser") @@ -328,6 +329,6 @@ class ResponseParserSpec extends FreeSpec with Matchers with BeforeAndAfterAll { def prep(response: String) = response.stripMarginWithNewline("\r\n") - def source[T](elems: T*): Source[T, Unit] = Source(elems.toList) + def source[T](elems: T*): Source[T, NotUsed] = Source(elems.toList) } } diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/rendering/RequestRendererSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/rendering/RequestRendererSpec.scala index e3391378ae..9129624057 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/rendering/RequestRendererSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/rendering/RequestRendererSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.rendering @@ -66,11 +66,11 @@ class RequestRendererSpec extends FreeSpec with Matchers with BeforeAndAfterAll "POST request, a few headers (incl. a custom Host header) and no body" in new TestSetup() { HttpRequest(POST, "/abc/xyz", List( RawHeader("X-Fancy", "naa"), - Age(0), + Link(Uri("http://akka.io"), LinkParams.first), Host("spray.io", 9999))) should renderTo { """POST /abc/xyz HTTP/1.1 |X-Fancy: naa - |Age: 0 + |Link: ; rel=first |Host: spray.io:9999 |User-Agent: akka-http/1.0.0 |Content-Length: 0 @@ -262,8 +262,10 @@ class RequestRendererSpec extends FreeSpec with Matchers with BeforeAndAfterAll } } "render a CustomHeader header" - { - "if suppressRendering = false" in new TestSetup(None) { + "if renderInRequests = true" in new TestSetup(None) { case class MyHeader(number: Int) extends CustomHeader { + def renderInRequests = true + def renderInResponses = false def name: String = "X-My-Header" def value: String = s"No$number" } @@ -275,10 +277,10 @@ class RequestRendererSpec extends FreeSpec with Matchers with BeforeAndAfterAll |""" } } - "not if suppressRendering = true" in new TestSetup(None) { + "not if renderInRequests = false" in new TestSetup(None) { case class MyInternalHeader(number: Int) extends CustomHeader { - override def suppressRendering: Boolean = true - + def renderInRequests = false + def renderInResponses = false def name: String = "X-My-Internal-Header" def value: String = s"No$number" } @@ -314,7 +316,7 @@ class RequestRendererSpec extends FreeSpec with Matchers with BeforeAndAfterAll } } - override def afterAll() = system.shutdown() + override def afterAll() = system.terminate() class TestSetup(val userAgent: Option[`User-Agent`] = Some(`User-Agent`("akka-http/1.0.0")), serverAddress: InetSocketAddress = new InetSocketAddress("test.com", 8080)) diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/rendering/ResponseRendererSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/rendering/ResponseRendererSpec.scala index 9ba309c58e..7596762e89 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/rendering/ResponseRendererSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/rendering/ResponseRendererSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.rendering @@ -414,8 +414,10 @@ class ResponseRendererSpec extends FreeSpec with Matchers with BeforeAndAfterAll } "render a CustomHeader header" - { - "if suppressRendering = false" in new TestSetup(None) { + "if renderInResponses = true" in new TestSetup(None) { case class MyHeader(number: Int) extends CustomHeader { + def renderInRequests = false + def renderInResponses = true def name: String = "X-My-Header" def value: String = s"No$number" } @@ -428,10 +430,10 @@ class ResponseRendererSpec extends FreeSpec with Matchers with BeforeAndAfterAll |""" } } - "not if suppressRendering = true" in new TestSetup(None) { + "not if renderInResponses = false" in new TestSetup(None) { case class MyInternalHeader(number: Int) extends CustomHeader { - override def suppressRendering: Boolean = true - + def renderInRequests = false + def renderInResponses = false def name: String = "X-My-Internal-Header" def value: String = s"No$number" } @@ -575,7 +577,7 @@ class ResponseRendererSpec extends FreeSpec with Matchers with BeforeAndAfterAll } } - override def afterAll() = system.shutdown() + override def afterAll() = system.terminate() class TestSetup(val serverHeader: Option[Server] = Some(Server("akka-http/1.0.0"))) extends HttpResponseRendererFactory(serverHeader, responseHeaderSizeHint = 64, NoLogging) { @@ -590,7 +592,7 @@ class ResponseRendererSpec extends FreeSpec with Matchers with BeforeAndAfterAll .via(renderer.named("renderer")) .map { case ResponseRenderingOutput.HttpData(bytes) ⇒ bytes - case _: ResponseRenderingOutput.SwitchToWebsocket ⇒ throw new IllegalStateException("Didn't expect websocket response") + case _: ResponseRenderingOutput.SwitchToWebSocket ⇒ throw new IllegalStateException("Didn't expect websocket response") } .groupedWithin(1000, 100.millis) .viaMat(StreamUtils.identityFinishReporter[Seq[ByteString]])(Keep.right) diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/server/HttpServerSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/server/HttpServerSpec.scala index 007a5469a0..720bf6a1dd 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/server/HttpServerSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/server/HttpServerSpec.scala @@ -1,11 +1,11 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.server import java.net.{ InetAddress, InetSocketAddress } -import akka.http.ServerSettings +import akka.http.scaladsl.settings.ServerSettings import scala.reflect.ClassTag import scala.util.Random import scala.annotation.tailrec @@ -22,7 +22,10 @@ import HttpEntity._ import MediaTypes._ import HttpMethods._ -class HttpServerSpec extends AkkaSpec("akka.loggers = []\n akka.loglevel = OFF") with Inside { spec ⇒ +class HttpServerSpec extends AkkaSpec( + """akka.loggers = [] + akka.loglevel = OFF + akka.http.server.request-timeout = infinite""") with Inside { spec ⇒ implicit val materializer = ActorMaterializer() "The server implementation" should { @@ -363,7 +366,7 @@ class HttpServerSpec extends AkkaSpec("akka.loggers = []\n akka.loglevel = OFF") } "translate HEAD request to GET request when transparent-head-requests are enabled" in new TestSetup { - override def settings = ServerSettings(system).copy(transparentHeadRequests = true) + override def settings = ServerSettings(system).withTransparentHeadRequests(true) send("""HEAD / HTTP/1.1 |Host: example.com | @@ -372,7 +375,7 @@ class HttpServerSpec extends AkkaSpec("akka.loggers = []\n akka.loglevel = OFF") } "keep HEAD request when transparent-head-requests are disabled" in new TestSetup { - override def settings = ServerSettings(system).copy(transparentHeadRequests = false) + override def settings = ServerSettings(system).withTransparentHeadRequests(false) send("""HEAD / HTTP/1.1 |Host: example.com | @@ -659,7 +662,7 @@ class HttpServerSpec extends AkkaSpec("akka.loggers = []\n akka.loglevel = OFF") expectRequest() shouldEqual HttpRequest(uri = "http://example.com/abc", protocol = HttpProtocols.`HTTP/1.0`) - override def settings: ServerSettings = super.settings.copy(defaultHostHeader = Host("example.com")) + override def settings: ServerSettings = super.settings.withDefaultHostHeader(Host("example.com")) } "fail an HTTP/1.0 request with 400 if no default-host-header is set" in new TestSetup { @@ -687,7 +690,7 @@ class HttpServerSpec extends AkkaSpec("akka.loggers = []\n akka.loglevel = OFF") Some(new InetSocketAddress(theAddress, 8080)) override def settings: ServerSettings = - super.settings.copy(remoteAddressHeader = true) + super.settings.withRemoteAddressHeader(true) send("""GET / HTTP/1.1 |Host: example.com @@ -698,6 +701,82 @@ class HttpServerSpec extends AkkaSpec("akka.loggers = []\n akka.loglevel = OFF") request.headers should contain(`Remote-Address`(RemoteAddress(theAddress, Some(8080)))) } + "support request timeouts" which { + + "are defined via the config" in new RequestTimeoutTestSetup(10.millis) { + send("GET / HTTP/1.1\r\nHost: example.com\r\n\r\n") + expectRequest().header[`Timeout-Access`] shouldBe defined + expectResponseWithWipedDate( + """HTTP/1.1 503 Service Unavailable + |Server: akka-http/test + |Date: XXXX + |Content-Type: text/plain; charset=UTF-8 + |Content-Length: 105 + | + |The server was not able to produce a timely response to your request. + |Please try again in a short while!""") + } + + "are programmatically increased (not expiring)" in new RequestTimeoutTestSetup(10.millis) { + send("GET / HTTP/1.1\r\nHost: example.com\r\n\r\n") + expectRequest().header[`Timeout-Access`].foreach(_.timeoutAccess.updateTimeout(50.millis)) + netOut.expectNoBytes(30.millis) + responses.sendNext(HttpResponse()) + expectResponseWithWipedDate( + """HTTP/1.1 200 OK + |Server: akka-http/test + |Date: XXXX + |Content-Length: 0 + | + |""") + } + + "are programmatically increased (expiring)" in new RequestTimeoutTestSetup(10.millis) { + send("GET / HTTP/1.1\r\nHost: example.com\r\n\r\n") + expectRequest().header[`Timeout-Access`].foreach(_.timeoutAccess.updateTimeout(50.millis)) + netOut.expectNoBytes(30.millis) + expectResponseWithWipedDate( + """HTTP/1.1 503 Service Unavailable + |Server: akka-http/test + |Date: XXXX + |Content-Type: text/plain; charset=UTF-8 + |Content-Length: 105 + | + |The server was not able to produce a timely response to your request. + |Please try again in a short while!""") + } + + "are programmatically decreased" in new RequestTimeoutTestSetup(50.millis) { + send("GET / HTTP/1.1\r\nHost: example.com\r\n\r\n") + expectRequest().header[`Timeout-Access`].foreach(_.timeoutAccess.updateTimeout(10.millis)) + val mark = System.nanoTime() + expectResponseWithWipedDate( + """HTTP/1.1 503 Service Unavailable + |Server: akka-http/test + |Date: XXXX + |Content-Type: text/plain; charset=UTF-8 + |Content-Length: 105 + | + |The server was not able to produce a timely response to your request. + |Please try again in a short while!""") + (System.nanoTime() - mark) should be < (40 * 1000000L) + } + + "have a programmatically set timeout handler" in new RequestTimeoutTestSetup(10.millis) { + send("GET / HTTP/1.1\r\nHost: example.com\r\n\r\n") + val timeoutResponse = HttpResponse(StatusCodes.InternalServerError, entity = "OOPS!") + expectRequest().header[`Timeout-Access`].foreach(_.timeoutAccess.updateHandler(_ ⇒ timeoutResponse)) + expectResponseWithWipedDate( + """HTTP/1.1 500 Internal Server Error + |Server: akka-http/test + |Date: XXXX + |Content-Type: text/plain; charset=UTF-8 + |Content-Length: 5 + | + |OOPS!""") + } + } + "add `Connection: close` to early responses" in new TestSetup { send("""POST / HTTP/1.1 |Host: example.com @@ -723,8 +802,7 @@ class HttpServerSpec extends AkkaSpec("akka.loggers = []\n akka.loglevel = OFF") netOut.expectComplete() } - def isDefinedVia = afterWord("is defined via") - "support request length verification" which isDefinedVia { + "support request length verification" which afterWord("is defined via") { class LengthVerificationTest(maxContentLength: Int) extends TestSetup(maxContentLength) { val entityBase = "0123456789ABCD" @@ -909,7 +987,13 @@ class HttpServerSpec extends AkkaSpec("akka.loggers = []\n akka.loglevel = OFF") override def settings = { val s = super.settings if (maxContentLength < 0) s - else s.copy(parserSettings = s.parserSettings.copy(maxContentLength = maxContentLength)) + else s.withParserSettings(s.parserSettings.withMaxContentLength(maxContentLength)) + } + } + class RequestTimeoutTestSetup(requestTimeout: Duration) extends TestSetup { + override def settings = { + val s = super.settings + s.withTimeouts(s.timeouts.withRequestTimeout(requestTimeout)) } } } diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/server/HttpServerTestSetupBase.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/server/HttpServerTestSetupBase.scala index 407adcbbda..af4b76b46e 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/server/HttpServerTestSetupBase.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/server/HttpServerTestSetupBase.scala @@ -1,11 +1,12 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.server import java.net.InetSocketAddress import akka.http.impl.engine.ws.ByteStringSinkProbe +import akka.http.scaladsl.settings.ServerSettings import akka.stream.io.{ SendBytes, SslTlsOutbound, SessionBytes } import scala.concurrent.duration.FiniteDuration import akka.actor.ActorSystem @@ -15,7 +16,6 @@ import akka.stream.{ ClosedShape, Materializer } import akka.stream.scaladsl._ import akka.stream.testkit.{ TestPublisher, TestSubscriber } import akka.http.impl.util._ -import akka.http.ServerSettings import akka.http.scaladsl.model.headers.{ ProductVersion, Server } import akka.http.scaladsl.model.{ HttpResponse, HttpRequest } import akka.stream.OverflowStrategy @@ -28,7 +28,7 @@ abstract class HttpServerTestSetupBase { val responses = TestPublisher.probe[HttpResponse]() def settings = ServerSettings(system) - .copy(serverHeader = Some(Server(List(ProductVersion("akka-http", "test"))))) + .withServerHeader(Some(Server(List(ProductVersion("akka-http", "test"))))) def remoteAddress: Option[InetSocketAddress] = None val (netIn, netOut) = { diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/BitBuilder.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/BitBuilder.scala index bcb824edc7..aa60878a01 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/BitBuilder.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/BitBuilder.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/ByteStringSinkProbe.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/ByteStringSinkProbe.scala index d3e4abb030..2c8a297ee6 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/ByteStringSinkProbe.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/ByteStringSinkProbe.scala @@ -1,9 +1,10 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws +import akka.NotUsed import akka.actor.ActorSystem import akka.stream.scaladsl.Sink import akka.stream.testkit.TestSubscriber @@ -13,7 +14,7 @@ import scala.annotation.tailrec import scala.concurrent.duration.FiniteDuration trait ByteStringSinkProbe { - def sink: Sink[ByteString, Unit] + def sink: Sink[ByteString, NotUsed] def expectBytes(length: Int): ByteString def expectBytes(expected: ByteString): Unit @@ -35,7 +36,7 @@ object ByteStringSinkProbe { def apply()(implicit system: ActorSystem): ByteStringSinkProbe = new ByteStringSinkProbe { val probe = TestSubscriber.probe[ByteString]() - val sink: Sink[ByteString, Unit] = Sink.fromSubscriber(probe) + val sink: Sink[ByteString, NotUsed] = Sink.fromSubscriber(probe) def expectNoBytes(): Unit = { probe.ensureSubscription() diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/EchoTestClientApp.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/EchoTestClientApp.scala index f687517807..73c3c5506a 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/EchoTestClientApp.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/EchoTestClientApp.scala @@ -1,9 +1,11 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws +import akka.NotUsed + import scala.concurrent.duration._ import akka.actor.ActorSystem @@ -24,10 +26,10 @@ object EchoTestClientApp extends App { import system.dispatcher implicit val materializer = ActorMaterializer() - def delayedCompletion(delay: FiniteDuration): Source[Nothing, Unit] = + def delayedCompletion(delay: FiniteDuration): Source[Nothing, NotUsed] = Source.single(1) .mapAsync(1)(_ ⇒ akka.pattern.after(delay, system.scheduler)(Future(1))) - .drop(1).asInstanceOf[Source[Nothing, Unit]] + .drop(1).asInstanceOf[Source[Nothing, NotUsed]] def messages: List[Message] = List( @@ -36,7 +38,7 @@ object EchoTestClientApp extends App { TextMessage("Test 2"), BinaryMessage(ByteString("def"))) - def source: Source[Message, Unit] = + def source: Source[Message, NotUsed] = Source(messages) ++ delayedCompletion(1.second) // otherwise, we may start closing too soon def sink: Sink[Message, Future[Seq[String]]] = @@ -52,17 +54,17 @@ object EchoTestClientApp extends App { def echoClient = Flow.fromSinkAndSourceMat(sink, source)(Keep.left) - val (upgrade, res) = Http().singleWebsocketRequest("wss://echo.websocket.org", echoClient) + val (upgrade, res) = Http().singleWebSocketRequest("wss://echo.websocket.org", echoClient) res onComplete { case Success(res) ⇒ println("Run successful. Got these elements:") res.foreach(println) - system.shutdown() + system.terminate() case Failure(e) ⇒ println("Run failed.") e.printStackTrace() - system.shutdown() + system.terminate() } - system.scheduler.scheduleOnce(10.seconds)(system.shutdown()) + system.scheduler.scheduleOnce(10.seconds)(system.terminate()) } diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/FramingSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/FramingSpec.scala index 095eef7e51..684a01e73a 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/FramingSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/FramingSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws @@ -18,7 +18,7 @@ import Protocol.Opcode class FramingSpec extends FreeSpec with Matchers with WithMaterializerSpec { import BitBuilder._ - "The Websocket parser/renderer round-trip should work for" - { + "The WebSocket parser/renderer round-trip should work for" - { "the frame header" - { "interpret flags correctly" - { "FIN" in { diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/MessageSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/MessageSpec.scala index 9e5a88d596..1b1c70e93b 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/MessageSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/MessageSpec.scala @@ -1,9 +1,11 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws +import akka.NotUsed + import scala.concurrent.duration._ import scala.util.Random import org.scalatest.{ Matchers, FreeSpec } @@ -24,7 +26,7 @@ class MessageSpec extends FreeSpec with Matchers with WithMaterializerSpec { 0 // but don't finish it ) - "The Websocket implementation should" - { + "The WebSocket implementation should" - { "collect messages from frames" - { "for binary messages" - { "for an empty message" in new ClientTestSetup { @@ -854,7 +856,7 @@ class MessageSpec extends FreeSpec with Matchers with WithMaterializerSpec { val messageIn = TestSubscriber.probe[Message] val messageOut = TestPublisher.probe[Message]() - val messageHandler: Flow[Message, Message, Unit] = + val messageHandler: Flow[Message, Message, NotUsed] = Flow.fromSinkAndSource( Flow[Message].buffer(1, OverflowStrategy.backpressure).to(Sink.fromSubscriber(messageIn)), // alternatively need to request(1) before expectComplete Source.fromPublisher(messageOut)) @@ -862,7 +864,7 @@ class MessageSpec extends FreeSpec with Matchers with WithMaterializerSpec { Source.fromPublisher(netIn) .via(printEvent("netIn")) .via(FrameEventParser) - .via(Websocket + .via(WebSocket .stack(serverSide, maskingRandomFactory = Randoms.SecureRandomInstances, closeTimeout = closeTimeout, log = system.log) .join(messageHandler)) .via(printEvent("frameRendererIn")) @@ -971,7 +973,7 @@ class MessageSpec extends FreeSpec with Matchers with WithMaterializerSpec { } val trace = false // set to `true` for debugging purposes - def printEvent[T](marker: String): Flow[T, T, Unit] = + def printEvent[T](marker: String): Flow[T, T, NotUsed] = if (trace) akka.http.impl.util.printEvent(marker) else Flow[T] } diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/Utf8CodingSpecs.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/Utf8CodingSpecs.scala index 6d0d5c4b14..cce0103158 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/Utf8CodingSpecs.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/Utf8CodingSpecs.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WSClientAutobahnTest.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WSClientAutobahnTest.scala index 50a42e90c3..0d58bcfd84 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WSClientAutobahnTest.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WSClientAutobahnTest.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws @@ -159,15 +159,15 @@ object WSClientAutobahnTest extends App { def updateReportsAndShutdown(): Unit = updateReports().onComplete { res ⇒ println("Reports should now be accessible at http://localhost:8080/cwd/reports/clients/index.html") - system.shutdown() + system.terminate() } import scala.concurrent.duration._ import system.dispatcher - system.scheduler.scheduleOnce(60.seconds)(system.shutdown()) + system.scheduler.scheduleOnce(60.seconds)(system.terminate()) def runWs[T](uri: Uri, clientFlow: Flow[Message, Message, T]): T = - Http().singleWebsocketRequest(uri, clientFlow)._2 + Http().singleWebSocketRequest(uri, clientFlow)._2 def completionSignal[T]: Flow[T, T, Future[Unit]] = Flow[T].transformMaterializing { () ⇒ @@ -193,14 +193,16 @@ object WSClientAutobahnTest extends App { } /** - * The autobahn tests define a weird API where every request must be a Websocket request and - * they will send a single websocket message with the result. Websocket everywhere? Strange, + * The autobahn tests define a weird API where every request must be a WebSocket request and + * they will send a single websocket message with the result. WebSocket everywhere? Strange, * but somewhat consistent. */ def runToSingleText(uri: Uri): Future[String] = { val sink = Sink.head[Message] runWs(uri, Flow.fromSinkAndSourceMat(sink, Source.maybe[Message])(Keep.left)).flatMap { case tm: TextMessage ⇒ tm.textStream.runWith(Sink.fold("")(_ + _)) + case other ⇒ + throw new IllegalStateException(s"unexpected element of type ${other.getClass}") } } def runToSingleJsonValue[T: JsonReader](uri: Uri): Future[T] = diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WSServerAutobahnTest.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WSServerAutobahnTest.scala index 73d9ac2f66..ae7913fceb 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WSServerAutobahnTest.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WSServerAutobahnTest.scala @@ -1,16 +1,18 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws +import akka.NotUsed + import scala.concurrent.Await import scala.concurrent.duration._ import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.model.HttpMethods._ -import akka.http.scaladsl.model.ws.{ Message, UpgradeToWebsocket } +import akka.http.scaladsl.model.ws.{ Message, UpgradeToWebSocket } import akka.http.scaladsl.model._ import akka.stream.ActorMaterializer import akka.stream.scaladsl.Flow @@ -26,9 +28,9 @@ object WSServerAutobahnTest extends App { try { val binding = Http().bindAndHandleSync({ - case req @ HttpRequest(GET, Uri.Path("/"), _, _, _) if req.header[UpgradeToWebsocket].isDefined ⇒ - req.header[UpgradeToWebsocket] match { - case Some(upgrade) ⇒ upgrade.handleMessages(echoWebsocketService) // needed for running the autobahn test suite + case req @ HttpRequest(GET, Uri.Path("/"), _, _, _) if req.header[UpgradeToWebSocket].isDefined ⇒ + req.header[UpgradeToWebSocket] match { + case Some(upgrade) ⇒ upgrade.handleMessages(echoWebSocketService) // needed for running the autobahn test suite case None ⇒ HttpResponse(400, entity = "Not a valid websocket request!") } case _: HttpRequest ⇒ HttpResponse(404, entity = "Unknown resource!") @@ -44,9 +46,9 @@ object WSServerAutobahnTest extends App { case _ ⇒ throw new Exception("akka.ws-mode MUST be sleep or read.") } } finally { - system.shutdown() + system.terminate() } - def echoWebsocketService: Flow[Message, Message, Unit] = + def echoWebSocketService: Flow[Message, Message, NotUsed] = Flow[Message] // just let message flow directly to the output } diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WSTestSetupBase.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WSTestSetupBase.scala index 814999eef1..a49de707a0 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WSTestSetupBase.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WSTestSetupBase.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WSTestUtils.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WSTestUtils.scala index 307c3db08e..330fdb4f16 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WSTestUtils.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WSTestUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WebsocketClientSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WebSocketClientSpec.scala similarity index 90% rename from akka-http-core/src/test/scala/akka/http/impl/engine/ws/WebsocketClientSpec.scala rename to akka-http-core/src/test/scala/akka/http/impl/engine/ws/WebSocketClientSpec.scala index 461c04833e..b75a323042 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WebsocketClientSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WebSocketClientSpec.scala @@ -1,17 +1,18 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws import java.util.Random -import akka.http.scaladsl.model.ws.{ InvalidUpgradeResponse, WebsocketUpgradeResponse } +import akka.NotUsed +import akka.http.scaladsl.model.ws.{ InvalidUpgradeResponse, WebSocketUpgradeResponse } import akka.stream.ClosedShape import scala.concurrent.duration._ -import akka.http.ClientConnectionSettings +import akka.http.scaladsl.settings.ClientConnectionSettings import akka.http.scaladsl.Http import akka.http.scaladsl.model.headers.{ ProductVersion, `User-Agent` } import akka.http.scaladsl.model.ws._ @@ -24,8 +25,8 @@ import org.scalatest.{ Matchers, FreeSpec } import akka.http.impl.util._ -class WebsocketClientSpec extends FreeSpec with Matchers with WithMaterializerSpec { - "The client-side Websocket implementation should" - { +class WebSocketClientSpec extends FreeSpec with Matchers with WithMaterializerSpec { + "The client-side WebSocket implementation should" - { "establish a websocket connection when the user requests it" in new EstablishedConnectionSetup with ClientEchoes "establish connection with case insensitive header values" in new TestSetup with ClientEchoes { expectWireData(UpgradeRequestBytes) @@ -53,7 +54,7 @@ class WebsocketClientSpec extends FreeSpec with Matchers with WithMaterializerSp |""") expectNetworkAbort() - expectInvalidUpgradeResponseCause("Websocket server at ws://example.org/ws returned unexpected status code: 404 Not Found") + expectInvalidUpgradeResponseCause("WebSocket server at ws://example.org/ws returned unexpected status code: 404 Not Found") } "missing Sec-WebSocket-Accept hash" in new TestSetup with ClientEchoes { expectWireData(UpgradeRequestBytes) @@ -68,7 +69,7 @@ class WebsocketClientSpec extends FreeSpec with Matchers with WithMaterializerSp |""") expectNetworkAbort() - expectInvalidUpgradeResponseCause("Websocket server at ws://example.org/ws returned response that was missing required `Sec-WebSocket-Accept` header.") + expectInvalidUpgradeResponseCause("WebSocket server at ws://example.org/ws returned response that was missing required `Sec-WebSocket-Accept` header.") } "wrong Sec-WebSocket-Accept hash" in new TestSetup with ClientEchoes { expectWireData(UpgradeRequestBytes) @@ -84,7 +85,7 @@ class WebsocketClientSpec extends FreeSpec with Matchers with WithMaterializerSp |""") expectNetworkAbort() - expectInvalidUpgradeResponseCause("Websocket server at ws://example.org/ws returned response with invalid `Sec-WebSocket-Accept` header.") + expectInvalidUpgradeResponseCause("WebSocket server at ws://example.org/ws returned response with invalid `Sec-WebSocket-Accept` header.") } "missing `Upgrade` header" in new TestSetup with ClientEchoes { expectWireData(UpgradeRequestBytes) @@ -99,7 +100,7 @@ class WebsocketClientSpec extends FreeSpec with Matchers with WithMaterializerSp |""") expectNetworkAbort() - expectInvalidUpgradeResponseCause("Websocket server at ws://example.org/ws returned response that was missing required `Upgrade` header.") + expectInvalidUpgradeResponseCause("WebSocket server at ws://example.org/ws returned response that was missing required `Upgrade` header.") } "missing `Connection: upgrade` header" in new TestSetup with ClientEchoes { expectWireData(UpgradeRequestBytes) @@ -114,12 +115,12 @@ class WebsocketClientSpec extends FreeSpec with Matchers with WithMaterializerSp |""") expectNetworkAbort() - expectInvalidUpgradeResponseCause("Websocket server at ws://example.org/ws returned response that was missing required `Connection` header.") + expectInvalidUpgradeResponseCause("WebSocket server at ws://example.org/ws returned response that was missing required `Connection` header.") } } "don't send out frames before handshake was finished successfully" in new TestSetup { - def clientImplementation: Flow[Message, Message, Unit] = + def clientImplementation: Flow[Message, Message, NotUsed] = Flow.fromSinkAndSourceMat(Sink.ignore, Source.single(TextMessage("fast message")))(Keep.none) expectWireData(UpgradeRequestBytes) @@ -226,7 +227,7 @@ class WebsocketClientSpec extends FreeSpec with Matchers with WithMaterializerSp expectNetworkAbort() expectInvalidUpgradeResponseCause( - "Websocket server at ws://example.org/ws returned response that indicated that the given subprotocol was not supported. (client supported: v2, server supported: None)") + "WebSocket server at ws://example.org/ws returned response that indicated that the given subprotocol was not supported. (client supported: v2, server supported: None)") } "if different protocol was selected" in new TestSetup with ClientProbes { override protected def requestedSubProtocol: Option[String] = Some("v2") @@ -255,7 +256,7 @@ class WebsocketClientSpec extends FreeSpec with Matchers with WithMaterializerSp expectNetworkAbort() expectInvalidUpgradeResponseCause( - "Websocket server at ws://example.org/ws returned response that indicated that the given subprotocol was not supported. (client supported: v2, server supported: Some(v3))") + "WebSocket server at ws://example.org/ws returned response that indicated that the given subprotocol was not supported. (client supported: v2, server supported: Some(v3))") } } } @@ -291,20 +292,19 @@ class WebsocketClientSpec extends FreeSpec with Matchers with WithMaterializerSp abstract class TestSetup extends WSTestSetupBase { protected def noMsgTimeout: FiniteDuration = 100.millis - protected def clientImplementation: Flow[Message, Message, Unit] + protected def clientImplementation: Flow[Message, Message, NotUsed] protected def requestedSubProtocol: Option[String] = None val random = new Random(0) def settings = ClientConnectionSettings(system) - .copy( - userAgentHeader = Some(`User-Agent`(List(ProductVersion("akka-http", "test")))), - websocketRandomFactory = () ⇒ random) + .withUserAgentHeader(Some(`User-Agent`(List(ProductVersion("akka-http", "test"))))) + .withWebsocketRandomFactory(() ⇒ random) def targetUri: Uri = "ws://example.org/ws" - def clientLayer: Http.WebsocketClientLayer = - Http(system).websocketClientLayer( - WebsocketRequest(targetUri, subprotocol = requestedSubProtocol), + def clientLayer: Http.WebSocketClientLayer = + Http(system).webSocketClientLayer( + WebSocketRequest(targetUri, subprotocol = requestedSubProtocol), settings = settings) val (netOut, netIn, response) = { @@ -351,7 +351,7 @@ class WebsocketClientSpec extends FreeSpec with Matchers with WithMaterializerSp def expectNetworkAbort(): Unit = netOut.expectError() def closeNetworkInput(): Unit = netIn.sendComplete() - def expectResponse(response: WebsocketUpgradeResponse): Unit = + def expectResponse(response: WebSocketUpgradeResponse): Unit = expectInvalidUpgradeResponse() shouldEqual response def expectInvalidUpgradeResponseCause(expected: String): Unit = expectInvalidUpgradeResponse().cause shouldEqual expected @@ -362,14 +362,14 @@ class WebsocketClientSpec extends FreeSpec with Matchers with WithMaterializerSp } trait ClientEchoes extends TestSetup { - override def clientImplementation: Flow[Message, Message, Unit] = echoServer - def echoServer: Flow[Message, Message, Unit] = Flow[Message] + override def clientImplementation: Flow[Message, Message, NotUsed] = echoServer + def echoServer: Flow[Message, Message, NotUsed] = Flow[Message] } trait ClientProbes extends TestSetup { lazy val messagesOut = TestPublisher.probe[Message]() lazy val messagesIn = TestSubscriber.probe[Message]() - override def clientImplementation: Flow[Message, Message, Unit] = + override def clientImplementation: Flow[Message, Message, NotUsed] = Flow.fromSinkAndSourceMat(Sink.fromSubscriber(messagesIn), Source.fromPublisher(messagesOut))(Keep.none) } } diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WebSocketIntegrationSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WebSocketIntegrationSpec.scala new file mode 100644 index 0000000000..a9968cee85 --- /dev/null +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WebSocketIntegrationSpec.scala @@ -0,0 +1,174 @@ +/** + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package akka.http.impl.engine.ws + +import scala.concurrent.Await +import scala.concurrent.duration.DurationInt +import org.scalactic.ConversionCheckedTripleEquals +import org.scalatest.concurrent.ScalaFutures +import org.scalatest.time.Span.convertDurationToSpan +import akka.http.scaladsl.Http +import akka.http.scaladsl.model.HttpRequest +import akka.http.scaladsl.model.Uri.apply +import akka.http.scaladsl.model.ws._ +import akka.stream._ +import akka.stream.scaladsl._ +import akka.stream.testkit._ +import akka.stream.scaladsl.GraphDSL.Implicits._ +import org.scalatest.concurrent.Eventually +import akka.stream.io.SslTlsPlacebo +import java.net.InetSocketAddress +import akka.stream.impl.fusing.GraphStages +import akka.util.ByteString +import akka.http.scaladsl.model.StatusCodes +import akka.stream.testkit.scaladsl.TestSink +import scala.concurrent.Future + +class WebSocketIntegrationSpec extends AkkaSpec("akka.stream.materializer.debug.fuzzing-mode=off") + with ScalaFutures with ConversionCheckedTripleEquals with Eventually { + + implicit val patience = PatienceConfig(3.seconds) + import system.dispatcher + implicit val materializer = ActorMaterializer() + + "A WebSocket server" must { + + "not reset the connection when no data are flowing" in Utils.assertAllStagesStopped { + val source = TestPublisher.probe[Message]() + val bindingFuture = Http().bindAndHandleSync({ + case HttpRequest(_, _, headers, _, _) ⇒ + val upgrade = headers.collectFirst { case u: UpgradeToWebSocket ⇒ u }.get + upgrade.handleMessages(Flow.fromSinkAndSource(Sink.ignore, Source.fromPublisher(source)), None) + }, interface = "localhost", port = 0) + val binding = Await.result(bindingFuture, 3.seconds) + val myPort = binding.localAddress.getPort + + val (response, sink) = Http().singleWebSocketRequest( + WebSocketRequest("ws://127.0.0.1:" + myPort), + Flow.fromSinkAndSourceMat(TestSink.probe[Message], Source.empty)(Keep.left)) + + response.futureValue.response.status.isSuccess should ===(true) + sink + .request(10) + .expectNoMsg(500.millis) + + source + .sendNext(TextMessage("hello")) + .sendComplete() + sink + .expectNext(TextMessage("hello")) + .expectComplete() + + binding.unbind() + } + + "not reset the connection when no data are flowing and the connection is closed from the client" in Utils.assertAllStagesStopped { + val source = TestPublisher.probe[Message]() + val bindingFuture = Http().bindAndHandleSync({ + case HttpRequest(_, _, headers, _, _) ⇒ + val upgrade = headers.collectFirst { case u: UpgradeToWebSocket ⇒ u }.get + upgrade.handleMessages(Flow.fromSinkAndSource(Sink.ignore, Source.fromPublisher(source)), None) + }, interface = "localhost", port = 0) + val binding = Await.result(bindingFuture, 3.seconds) + val myPort = binding.localAddress.getPort + + val ((response, breaker), sink) = + Source.empty + .viaMat { + Http().webSocketClientLayer(WebSocketRequest("ws://localhost:" + myPort)) + .atop(SslTlsPlacebo.forScala) + .joinMat(Flow.fromGraph(GraphStages.breaker[ByteString]).via( + Tcp().outgoingConnection(new InetSocketAddress("localhost", myPort), halfClose = true)))(Keep.both) + }(Keep.right) + .toMat(TestSink.probe[Message])(Keep.both) + .run() + + response.futureValue.response.status.isSuccess should ===(true) + sink + .request(10) + .expectNoMsg(1500.millis) + + breaker.value.get.get.complete() + + source + .sendNext(TextMessage("hello")) + .sendComplete() + sink + .expectNext(TextMessage("hello")) + .expectComplete() + + binding.unbind() + } + + "echo 100 elements and then shut down without error" in Utils.assertAllStagesStopped { + + val bindingFuture = Http().bindAndHandleSync({ + case HttpRequest(_, _, headers, _, _) ⇒ + val upgrade = headers.collectFirst { case u: UpgradeToWebSocket ⇒ u }.get + upgrade.handleMessages(Flow.apply, None) + }, interface = "localhost", port = 0) + val binding = Await.result(bindingFuture, 3.seconds) + val myPort = binding.localAddress.getPort + + val N = 100 + val (response, count) = Http().singleWebSocketRequest( + WebSocketRequest("ws://127.0.0.1:" + myPort), + Flow.fromSinkAndSourceMat( + Sink.fold(0)((n, _: Message) ⇒ n + 1), + Source.repeat(TextMessage("hello")).take(N))(Keep.left)) + + count.futureValue should ===(N) + binding.unbind() + } + + "send back 100 elements and then terminate without error even when not ordinarily closed" in Utils.assertAllStagesStopped { + val N = 100 + + val handler = Flow.fromGraph(GraphDSL.create() { implicit b ⇒ + val merge = b.add(Merge[Int](2)) + + // convert to int so we can connect to merge + val mapMsgToInt = b.add(Flow[Message].map(_ ⇒ -1)) + val mapIntToMsg = b.add(Flow[Int].map(x ⇒ TextMessage.Strict(s"Sending: $x"))) + + // source we want to use to send message to the connected websocket sink + val rangeSource = b.add(Source(1 to N)) + + mapMsgToInt ~> merge // this part of the merge will never provide msgs + rangeSource ~> merge ~> mapIntToMsg + + FlowShape(mapMsgToInt.in, mapIntToMsg.out) + }) + + val bindingFuture = Http().bindAndHandleSync({ + case HttpRequest(_, _, headers, _, _) ⇒ + val upgrade = headers.collectFirst { case u: UpgradeToWebSocket ⇒ u }.get + upgrade.handleMessages(handler, None) + }, interface = "localhost", port = 0) + val binding = Await.result(bindingFuture, 3.seconds) + val myPort = binding.localAddress.getPort + + @volatile var messages = 0 + val (breaker, completion) = + Source.maybe + .viaMat { + Http().webSocketClientLayer(WebSocketRequest("ws://localhost:" + myPort)) + .atop(SslTlsPlacebo.forScala) + // the resource leak of #19398 existed only for severed websocket connections + .atopMat(GraphStages.bidiBreaker[ByteString, ByteString])(Keep.right) + .join(Tcp().outgoingConnection(new InetSocketAddress("localhost", myPort), halfClose = true)) + }(Keep.right) + .toMat(Sink.foreach(_ ⇒ messages += 1))(Keep.both) + .run() + eventually(messages should ===(N)) + // breaker should have been fulfilled long ago + breaker.value.get.get.completeAndCancel() + completion.futureValue + + binding.unbind() + } + + } + +} diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WebsocketServerSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WebSocketServerSpec.scala similarity index 93% rename from akka-http-core/src/test/scala/akka/http/impl/engine/ws/WebsocketServerSpec.scala rename to akka-http-core/src/test/scala/akka/http/impl/engine/ws/WebSocketServerSpec.scala index ce9b6ddc7e..3f298ba3bb 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WebsocketServerSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WebSocketServerSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws @@ -14,10 +14,10 @@ import akka.http.impl.util._ import akka.http.impl.engine.server.HttpServerTestSetupBase -class WebsocketServerSpec extends FreeSpec with Matchers with WithMaterializerSpec { spec ⇒ +class WebSocketServerSpec extends FreeSpec with Matchers with WithMaterializerSpec { spec ⇒ import WSTestUtils._ - "The server-side Websocket integration should" - { + "The server-side WebSocket integration should" - { "establish a websocket connection when the user requests it" - { "when user handler instantly tries to send messages" in Utils.assertAllStagesStopped { new TestSetup { @@ -33,7 +33,7 @@ class WebsocketServerSpec extends FreeSpec with Matchers with WithMaterializerSp |""") val request = expectRequest() - val upgrade = request.header[UpgradeToWebsocket] + val upgrade = request.header[UpgradeToWebSocket] upgrade.isDefined shouldBe true val source = @@ -79,7 +79,7 @@ class WebsocketServerSpec extends FreeSpec with Matchers with WithMaterializerSp |""") val request = expectRequest() - val upgrade = request.header[UpgradeToWebsocket] + val upgrade = request.header[UpgradeToWebSocket] upgrade.isDefined shouldBe true val response = upgrade.get.handleMessages(Flow[Message]) // simple echoing @@ -115,7 +115,7 @@ class WebsocketServerSpec extends FreeSpec with Matchers with WithMaterializerSp } } "prevent the selection of an unavailable subprotocol" in pending - "reject invalid Websocket handshakes" - { + "reject invalid WebSocket handshakes" - { "missing `Upgrade: websocket` header" in pending "missing `Connection: upgrade` header" in pending "missing `Sec-WebSocket-Key header" in pending diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WithMaterializerSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WithMaterializerSpec.scala index 41058b5f9b..3fc02e530f 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WithMaterializerSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WithMaterializerSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.engine.ws @@ -16,5 +16,5 @@ trait WithMaterializerSpec extends BeforeAndAfterAll { _: Suite ⇒ implicit lazy val system = ActorSystem(getClass.getSimpleName, testConf) implicit lazy val materializer = ActorMaterializer() - override def afterAll() = system.shutdown() + override def afterAll() = system.terminate() } \ No newline at end of file diff --git a/akka-http-core/src/test/scala/akka/http/impl/model/parser/HttpHeaderSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/model/parser/HttpHeaderSpec.scala index a46c9200e3..4b84322c4d 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/model/parser/HttpHeaderSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/model/parser/HttpHeaderSpec.scala @@ -1,10 +1,10 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.model.parser -import akka.http.ParserSettings.CookieParsingMode +import akka.http.scaladsl.settings.ParserSettings.CookieParsingMode import akka.http.impl.model.parser.HeaderParser.Settings import org.scalatest.{ Matchers, FreeSpec } import org.scalatest.matchers.{ Matcher, MatchResult } @@ -401,23 +401,23 @@ class HttpHeaderSpec extends FreeSpec with Matchers { } "Sec-WebSocket-Extensions" in { "Sec-WebSocket-Extensions: abc" =!= - `Sec-WebSocket-Extensions`(Vector(WebsocketExtension("abc"))) + `Sec-WebSocket-Extensions`(Vector(WebSocketExtension("abc"))) "Sec-WebSocket-Extensions: abc, def" =!= - `Sec-WebSocket-Extensions`(Vector(WebsocketExtension("abc"), WebsocketExtension("def"))) + `Sec-WebSocket-Extensions`(Vector(WebSocketExtension("abc"), WebSocketExtension("def"))) "Sec-WebSocket-Extensions: abc; param=2; use_y, def" =!= - `Sec-WebSocket-Extensions`(Vector(WebsocketExtension("abc", Map("param" -> "2", "use_y" -> "")), WebsocketExtension("def"))) + `Sec-WebSocket-Extensions`(Vector(WebSocketExtension("abc", Map("param" -> "2", "use_y" -> "")), WebSocketExtension("def"))) "Sec-WebSocket-Extensions: abc; param=\",xyz\", def" =!= - `Sec-WebSocket-Extensions`(Vector(WebsocketExtension("abc", Map("param" -> ",xyz")), WebsocketExtension("def"))) + `Sec-WebSocket-Extensions`(Vector(WebSocketExtension("abc", Map("param" -> ",xyz")), WebSocketExtension("def"))) // real examples from https://tools.ietf.org/html/draft-ietf-hybi-permessage-compression-19 "Sec-WebSocket-Extensions: permessage-deflate" =!= - `Sec-WebSocket-Extensions`(Vector(WebsocketExtension("permessage-deflate"))) + `Sec-WebSocket-Extensions`(Vector(WebSocketExtension("permessage-deflate"))) "Sec-WebSocket-Extensions: permessage-deflate; client_max_window_bits; server_max_window_bits=10" =!= - `Sec-WebSocket-Extensions`(Vector(WebsocketExtension("permessage-deflate", Map("client_max_window_bits" -> "", "server_max_window_bits" -> "10")))) + `Sec-WebSocket-Extensions`(Vector(WebSocketExtension("permessage-deflate", Map("client_max_window_bits" -> "", "server_max_window_bits" -> "10")))) "Sec-WebSocket-Extensions: permessage-deflate; client_max_window_bits; server_max_window_bits=10, permessage-deflate; client_max_window_bits" =!= `Sec-WebSocket-Extensions`(Vector( - WebsocketExtension("permessage-deflate", Map("client_max_window_bits" -> "", "server_max_window_bits" -> "10")), - WebsocketExtension("permessage-deflate", Map("client_max_window_bits" -> "")))) + WebSocketExtension("permessage-deflate", Map("client_max_window_bits" -> "", "server_max_window_bits" -> "10")), + WebSocketExtension("permessage-deflate", Map("client_max_window_bits" -> "")))) } "Sec-WebSocket-Key" in { "Sec-WebSocket-Key: c2Zxb3JpbmgyMzA5dGpoMDIzOWdlcm5vZ2luCg==" =!= `Sec-WebSocket-Key`("c2Zxb3JpbmgyMzA5dGpoMDIzOWdlcm5vZ2luCg==") diff --git a/akka-http-core/src/test/scala/akka/http/impl/util/EnhancedInetSocketAddressSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/util/EnhancedInetSocketAddressSpec.scala deleted file mode 100644 index 16d0fbc6b3..0000000000 --- a/akka-http-core/src/test/scala/akka/http/impl/util/EnhancedInetSocketAddressSpec.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (C) 2009-2015 Typesafe Inc. - */ - -package akka.http.impl.util - -import java.net.{ InetAddress, InetSocketAddress } - -import org.scalatest.{ Matchers, WordSpec } - -class EnhancedInetSocketAddressSpec extends WordSpec with Matchers { - "getHostStringJava6Compatible" should { - "return IPv4 address if InetSocketAddress was created with the address" in { - val addr = likelyReverseResolvableAddress - val socketAddress = new InetSocketAddress(addr, 80) - socketAddress.getHostStringJava6Compatible shouldEqual addr.getHostAddress - } - "return host name if InetSocketAddress was created with host name" in { - val address = new InetSocketAddress("github.com", 80) - address.getHostStringJava6Compatible shouldEqual "github.com" - } - } - - /** - * Returns an InetAddress that can likely be reverse looked up, so that - * getHostName returns a DNS address and not the IP. Unfortunately, we - * cannot be sure that a host name was already cached somewhere in which - * case getHostString may still return a host name even without doing - * a reverse lookup at this time. If this start to fail non-deterministically, - * it may be decided that this test needs to be disabled. - */ - def likelyReverseResolvableAddress: InetAddress = - InetAddress.getByAddress(InetAddress.getByName("google.com").getAddress) -} diff --git a/akka-http-core/src/test/scala/akka/http/impl/util/ExampleHttpContexts.scala b/akka-http-core/src/test/scala/akka/http/impl/util/ExampleHttpContexts.scala index 82750ce9f1..7d337c93cf 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/util/ExampleHttpContexts.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/util/ExampleHttpContexts.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.util @@ -9,12 +9,15 @@ import java.security.{ SecureRandom, KeyStore } import java.security.cert.{ CertificateFactory, Certificate } import javax.net.ssl.{ SSLParameters, SSLContext, TrustManagerFactory, KeyManagerFactory } -import akka.http.scaladsl.HttpsContext +import akka.http.scaladsl.HttpsConnectionContext /** * These are HTTPS example configurations that take key material from the resources/key folder. */ object ExampleHttpContexts { + + // TODO show example how to obtain pre-configured context from ssl-config + val exampleServerContext = { // never put passwords into code! val password = "abcdef".toCharArray @@ -28,8 +31,9 @@ object ExampleHttpContexts { val context = SSLContext.getInstance("TLS") context.init(keyManagerFactory.getKeyManagers, null, new SecureRandom) - HttpsContext(context) + new HttpsConnectionContext(context) } + val exampleClientContext = { val certStore = KeyStore.getInstance(KeyStore.getDefaultType) certStore.load(null, null) @@ -43,8 +47,8 @@ object ExampleHttpContexts { context.init(null, certManagerFactory.getTrustManagers, new SecureRandom) val params = new SSLParameters() - Java6Compat.trySetEndpointIdentificationAlgorithm(params, "https") - HttpsContext(context, sslParameters = Some(params)) + params.setEndpointIdentificationAlgorithm("https") + new HttpsConnectionContext(context, sslParameters = Some(params)) } def resourceStream(resourceName: String): InputStream = { diff --git a/akka-http-core/src/test/scala/akka/http/impl/util/RenderingSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/util/RenderingSpec.scala index 6e8787f01c..4669cdfccc 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/util/RenderingSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/util/RenderingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.util diff --git a/akka-http-core/src/test/scala/akka/http/javadsl/model/JavaApiSpec.scala b/akka-http-core/src/test/scala/akka/http/javadsl/model/JavaApiSpec.scala index 2d0591840a..d6ff22704f 100644 --- a/akka-http-core/src/test/scala/akka/http/javadsl/model/JavaApiSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/javadsl/model/JavaApiSpec.scala @@ -1,9 +1,11 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model +import java.util.Optional + import akka.japi.Pair import org.scalatest.{ FreeSpec, MustMatchers } @@ -53,10 +55,10 @@ class JavaApiSpec extends FreeSpec with MustMatchers { } "access single parameter" in { val query = Uri.create("/abc?name=blub").query() - query.get("name") must be(akka.japi.Option.some("blub")) - query.get("age") must be(akka.japi.Option.none) + query.get("name") must be(Optional.of("blub")) + query.get("age") must be(Optional.empty()) - Uri.create("/abc?name=blub&name=blib").query.get("name") must be(akka.japi.Option.some("blub")) + Uri.create("/abc?name=blub&name=blib").query.get("name") must be(Optional.of("blub")) } } } diff --git a/akka-http-core/src/test/scala/akka/http/javadsl/model/JavaApiTestCaseSpecs.scala b/akka-http-core/src/test/scala/akka/http/javadsl/model/JavaApiTestCaseSpecs.scala index d01daf2a9d..831f03328e 100644 --- a/akka-http-core/src/test/scala/akka/http/javadsl/model/JavaApiTestCaseSpecs.scala +++ b/akka-http-core/src/test/scala/akka/http/javadsl/model/JavaApiTestCaseSpecs.scala @@ -1,14 +1,16 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.model -import javax.net.ssl.SSLContext +import java.util.Optional +import javax.net.ssl.{ SSLParameters, SSLContext } import akka.http.javadsl.model.headers.Cookie import akka.http.scaladsl.model import akka.http.scaladsl.model.headers.BasicHttpCredentials +import akka.stream.io.ClientAuth import org.scalatest.{ FreeSpec, MustMatchers } import scala.collection.immutable @@ -59,12 +61,11 @@ class JavaApiTestCaseSpecs extends FreeSpec with MustMatchers { Uri.create("/order").query(JavaApiTestCases.addSessionId(orderId)) must be(Uri.create("/order?orderId=123&session=abcdefghijkl")) } "create HttpsContext" in { - import akka.japi.{ Option ⇒ JOption } - akka.http.javadsl.HttpsContext.create(SSLContext.getDefault, - JOption.none, - JOption.none, - JOption.none, - JOption.none) mustNot be(null) + akka.http.javadsl.ConnectionContext.https(SSLContext.getDefault, + Optional.empty[java.util.Collection[String]], + Optional.empty[java.util.Collection[String]], + Optional.empty[ClientAuth], + Optional.empty[SSLParameters]) mustNot be(null) } } } \ No newline at end of file diff --git a/akka-http-core/src/test/scala/akka/http/scaladsl/ClientServerSpec.scala b/akka-http-core/src/test/scala/akka/http/scaladsl/ClientServerSpec.scala index 3d45b3ac21..d8be965b22 100644 --- a/akka-http-core/src/test/scala/akka/http/scaladsl/ClientServerSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/scaladsl/ClientServerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl @@ -7,6 +7,8 @@ package akka.http.scaladsl import java.io.{ BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter } import java.net.{ BindException, Socket } import java.util.concurrent.TimeoutException +import akka.http.scaladsl.settings.{ ConnectionPoolSettings, ClientConnectionSettings, ServerSettings } + import scala.annotation.tailrec import scala.concurrent.duration._ import scala.concurrent.{ Await, Future, Promise } @@ -18,7 +20,6 @@ import akka.http.scaladsl.model.HttpEntity._ import akka.http.scaladsl.model.HttpMethods._ import akka.http.scaladsl.model._ import akka.http.scaladsl.model.headers._ -import akka.http.{ ConnectionPoolSettings, ClientConnectionSettings, ServerSettings } import akka.stream.scaladsl._ import akka.stream.testkit._ import akka.stream.{ ActorMaterializer, BindFailedException, StreamTcpException } @@ -35,7 +36,7 @@ class ClientServerSpec extends WordSpec with Matchers with BeforeAndAfterAll wit akka.stdout-loglevel = ERROR windows-connection-abort-workaround-enabled = auto akka.log-dead-letters = OFF - """) + akka.http.server.request-timeout = infinite""") implicit val system = ActorSystem(getClass.getSimpleName, testConf) import system.dispatcher implicit val materializer = ActorMaterializer() @@ -114,7 +115,7 @@ class ClientServerSpec extends WordSpec with Matchers with BeforeAndAfterAll wit "prevent more than the configured number of max-connections with bindAndHandle" in { val (_, hostname, port) = TestUtils.temporaryServerHostnameAndPort() - val settings = ServerSettings(system).copy(maxConnections = 1) + val settings = ServerSettings(system).withMaxConnections(1) val receivedSlow = Promise[Long]() val receivedFast = Promise[Long]() @@ -155,7 +156,7 @@ class ClientServerSpec extends WordSpec with Matchers with BeforeAndAfterAll wit "timeouts" should { def bindServer(hostname: String, port: Int, serverTimeout: FiniteDuration): (Promise[Long], ServerBinding) = { val s = ServerSettings(system) - val settings = s.copy(timeouts = s.timeouts.copy(idleTimeout = serverTimeout)) + val settings = s.withTimeouts(s.timeouts.withIdleTimeout(serverTimeout)) val receivedRequest = Promise[Long]() @@ -204,7 +205,7 @@ class ClientServerSpec extends WordSpec with Matchers with BeforeAndAfterAll wit val cs = ClientConnectionSettings(system) val clientTimeout = 345.millis - val clientSettings = cs.copy(idleTimeout = clientTimeout) + val clientSettings = cs.withIdleTimeout(clientTimeout) val (_, hostname, port) = TestUtils.temporaryServerHostnameAndPort() val (receivedRequest: Promise[Long], b1: ServerBinding) = bindServer(hostname, port, serverTimeout) @@ -237,7 +238,7 @@ class ClientServerSpec extends WordSpec with Matchers with BeforeAndAfterAll wit val cs = ConnectionPoolSettings(system) val clientTimeout = 345.millis - val clientPoolSettings = cs.copy(idleTimeout = clientTimeout) + val clientPoolSettings = cs.withIdleTimeout(clientTimeout) val (_, hostname, port) = TestUtils.temporaryServerHostnameAndPort() val (receivedRequest: Promise[Long], b1: ServerBinding) = bindServer(hostname, port, serverTimeout) @@ -272,7 +273,7 @@ class ClientServerSpec extends WordSpec with Matchers with BeforeAndAfterAll wit val cs = ConnectionPoolSettings(system) val clientTimeout = 345.millis - val clientPoolSettings = cs.copy(idleTimeout = clientTimeout) + val clientPoolSettings = cs.withIdleTimeout(clientTimeout) val (_, hostname, port) = TestUtils.temporaryServerHostnameAndPort() val (receivedRequest: Promise[Long], b1: ServerBinding) = bindServer(hostname, port, serverTimeout) @@ -449,7 +450,7 @@ class ClientServerSpec extends WordSpec with Matchers with BeforeAndAfterAll wit } override def afterAll() = { - system.shutdown() + system.terminate() system2.shutdown() } diff --git a/akka-http-core/src/test/scala/akka/http/scaladsl/TestClient.scala b/akka-http-core/src/test/scala/akka/http/scaladsl/TestClient.scala index 9234380599..bd7fe45431 100644 --- a/akka-http-core/src/test/scala/akka/http/scaladsl/TestClient.scala +++ b/akka-http-core/src/test/scala/akka/http/scaladsl/TestClient.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl @@ -28,12 +28,12 @@ object TestClient extends App { fetchServerVersion1() // Console.readLine() - // system.shutdown() + // system.terminate() def fetchServerVersion1(): Unit = { println(s"Fetching HTTPS server version of host `$host` via a direct low-level connection ...") - val connection = Http().outgoingConnectionTls(host) + val connection = Http().outgoingConnectionHttps(host) val result = Source.single(HttpRequest()).via(connection).runWith(Sink.head) result.map(_.header[headers.Server]) onComplete { case Success(res) ⇒ @@ -62,5 +62,5 @@ object TestClient extends App { } } - def shutdown(): Unit = system.shutdown() + def shutdown(): Unit = system.terminate() } \ No newline at end of file diff --git a/akka-http-core/src/test/scala/akka/http/scaladsl/TestServer.scala b/akka-http-core/src/test/scala/akka/http/scaladsl/TestServer.scala index 7a62627e83..08ae149a3f 100644 --- a/akka-http-core/src/test/scala/akka/http/scaladsl/TestServer.scala +++ b/akka-http-core/src/test/scala/akka/http/scaladsl/TestServer.scala @@ -1,9 +1,11 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl +import akka.NotUsed + import scala.concurrent.duration._ import scala.concurrent.Await import akka.actor.ActorSystem @@ -25,17 +27,17 @@ object TestServer extends App { try { val binding = Http().bindAndHandleSync({ - case req @ HttpRequest(GET, Uri.Path("/"), _, _, _) if req.header[UpgradeToWebsocket].isDefined ⇒ - req.header[UpgradeToWebsocket] match { - case Some(upgrade) ⇒ upgrade.handleMessages(echoWebsocketService) // needed for running the autobahn test suite + case req @ HttpRequest(GET, Uri.Path("/"), _, _, _) if req.header[UpgradeToWebSocket].isDefined ⇒ + req.header[UpgradeToWebSocket] match { + case Some(upgrade) ⇒ upgrade.handleMessages(echoWebSocketService) // needed for running the autobahn test suite case None ⇒ HttpResponse(400, entity = "Not a valid websocket request!") } case HttpRequest(GET, Uri.Path("/"), _, _, _) ⇒ index case HttpRequest(GET, Uri.Path("/ping"), _, _, _) ⇒ HttpResponse(entity = "PONG!") case HttpRequest(GET, Uri.Path("/crash"), _, _, _) ⇒ sys.error("BOOM!") case req @ HttpRequest(GET, Uri.Path("/ws-greeter"), _, _, _) ⇒ - req.header[UpgradeToWebsocket] match { - case Some(upgrade) ⇒ upgrade.handleMessages(greeterWebsocketService) + req.header[UpgradeToWebSocket] match { + case Some(upgrade) ⇒ upgrade.handleMessages(greeterWebSocketService) case None ⇒ HttpResponse(400, entity = "Not a valid websocket request!") } case _: HttpRequest ⇒ HttpResponse(404, entity = "Unknown resource!") @@ -46,7 +48,7 @@ object TestServer extends App { println("Press RETURN to stop...") Console.readLine() } finally { - system.shutdown() + system.terminate() } ////////////// helpers ////////////// @@ -64,10 +66,10 @@ object TestServer extends App { | |""".stripMargin)) - def echoWebsocketService: Flow[Message, Message, Unit] = + def echoWebSocketService: Flow[Message, Message, NotUsed] = Flow[Message] // just let message flow directly to the output - def greeterWebsocketService: Flow[Message, Message, Unit] = + def greeterWebSocketService: Flow[Message, Message, NotUsed] = Flow[Message] .collect { case TextMessage.Strict(name) ⇒ TextMessage(s"Hello '$name'") diff --git a/akka-http-core/src/test/scala/akka/http/scaladsl/TestUtils.scala b/akka-http-core/src/test/scala/akka/http/scaladsl/TestUtils.scala index aaab21d1d9..1e8bef0e4f 100644 --- a/akka-http-core/src/test/scala/akka/http/scaladsl/TestUtils.scala +++ b/akka-http-core/src/test/scala/akka/http/scaladsl/TestUtils.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl diff --git a/akka-http-core/src/test/scala/akka/http/scaladsl/model/DateTimeSpec.scala b/akka-http-core/src/test/scala/akka/http/scaladsl/model/DateTimeSpec.scala index 0b9d082b31..99de0dcacb 100644 --- a/akka-http-core/src/test/scala/akka/http/scaladsl/model/DateTimeSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/scaladsl/model/DateTimeSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model diff --git a/akka-http-core/src/test/scala/akka/http/scaladsl/model/HttpEntitySpec.scala b/akka-http-core/src/test/scala/akka/http/scaladsl/model/HttpEntitySpec.scala index 5275399d48..dbd8b2beeb 100755 --- a/akka-http-core/src/test/scala/akka/http/scaladsl/model/HttpEntitySpec.scala +++ b/akka-http-core/src/test/scala/akka/http/scaladsl/model/HttpEntitySpec.scala @@ -1,10 +1,11 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model import java.util.concurrent.TimeoutException +import akka.NotUsed import com.typesafe.config.{ ConfigFactory, Config } import scala.concurrent.{ Promise, Await } import scala.concurrent.duration._ @@ -32,7 +33,7 @@ class HttpEntitySpec extends FreeSpec with MustMatchers with BeforeAndAfterAll { implicit val system = ActorSystem(getClass.getSimpleName, testConf) implicit val materializer = ActorMaterializer() - override def afterAll() = system.shutdown() + override def afterAll() = system.terminate() "HttpEntity" - { "support dataBytes" - { @@ -176,7 +177,7 @@ class HttpEntitySpec extends FreeSpec with MustMatchers with BeforeAndAfterAll { strict.toString + " == " + expectedRendering) } - def duplicateBytesTransformer(): Flow[ByteString, ByteString, Unit] = + def duplicateBytesTransformer(): Flow[ByteString, ByteString, NotUsed] = Flow[ByteString].transform(() ⇒ StreamUtils.byteStringTransformer(doubleChars, () ⇒ trailer)) def trailer: ByteString = ByteString("--dup") diff --git a/akka-http-core/src/test/scala/akka/http/scaladsl/model/MultipartSpec.scala b/akka-http-core/src/test/scala/akka/http/scaladsl/model/MultipartSpec.scala index c86dd37860..814f0b0aac 100644 --- a/akka-http-core/src/test/scala/akka/http/scaladsl/model/MultipartSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/scaladsl/model/MultipartSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model @@ -22,7 +22,7 @@ class MultipartSpec extends WordSpec with Matchers with Inside with BeforeAndAft implicit val system = ActorSystem(getClass.getSimpleName, testConf) import system.dispatcher implicit val materializer = ActorMaterializer() - override def afterAll() = system.shutdown() + override def afterAll() = system.terminate() "Multipart.General" should { "support `toStrict` on the streamed model" in { diff --git a/akka-http-core/src/test/scala/akka/http/scaladsl/model/SerializabilitySpec.scala b/akka-http-core/src/test/scala/akka/http/scaladsl/model/SerializabilitySpec.scala index fd1ed6ff84..6826ad9944 100644 --- a/akka-http-core/src/test/scala/akka/http/scaladsl/model/SerializabilitySpec.scala +++ b/akka-http-core/src/test/scala/akka/http/scaladsl/model/SerializabilitySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model diff --git a/akka-http-core/src/test/scala/akka/http/scaladsl/model/TurkishISpec.scala b/akka-http-core/src/test/scala/akka/http/scaladsl/model/TurkishISpec.scala index 061117a169..b189f7ae72 100644 --- a/akka-http-core/src/test/scala/akka/http/scaladsl/model/TurkishISpec.scala +++ b/akka-http-core/src/test/scala/akka/http/scaladsl/model/TurkishISpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model diff --git a/akka-http-core/src/test/scala/akka/http/scaladsl/model/UriSpec.scala b/akka-http-core/src/test/scala/akka/http/scaladsl/model/UriSpec.scala index 189e203a87..4e39d39b75 100644 --- a/akka-http-core/src/test/scala/akka/http/scaladsl/model/UriSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/scaladsl/model/UriSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model @@ -370,7 +370,7 @@ class UriSpec extends WordSpec with Matchers { a[IllegalUriException] should be thrownBy Uri("foo/another@url/[]and{}", mode = Uri.ParsingMode.Strict) // handle query parameters with more than percent-encoded character - Uri("?%7Ba%7D=$%7B%7D", UTF8, Uri.ParsingMode.Strict).query() shouldEqual Query.Cons("{a}", "${}", Query.Empty) + Uri("?%7Ba%7D=$%7B%7D", UTF8, Uri.ParsingMode.Strict).query() shouldEqual Query.Cons("{a}", s"$${}", Query.Empty) // don't double decode Uri("%2520").path.head shouldEqual "%20" diff --git a/akka-http-core/src/test/scala/akka/http/scaladsl/model/headers/HeaderSpec.scala b/akka-http-core/src/test/scala/akka/http/scaladsl/model/headers/HeaderSpec.scala index e3e0948d1d..c50a6a81dc 100644 --- a/akka-http-core/src/test/scala/akka/http/scaladsl/model/headers/HeaderSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/scaladsl/model/headers/HeaderSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.model.headers diff --git a/akka-http-core/src/test/scala/akka/http/scaladsl/util/FastFutureSpec.scala b/akka-http-core/src/test/scala/akka/http/scaladsl/util/FastFutureSpec.scala index c189bdb821..86c14038b5 100644 --- a/akka-http-core/src/test/scala/akka/http/scaladsl/util/FastFutureSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/scaladsl/util/FastFutureSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.util diff --git a/akka-http-core/src/test/scala/io/akka/integrationtest/http/HttpModelIntegrationSpec.scala b/akka-http-core/src/test/scala/io/akka/integrationtest/http/HttpModelIntegrationSpec.scala index fbd8efaf95..311e0c0d45 100644 --- a/akka-http-core/src/test/scala/io/akka/integrationtest/http/HttpModelIntegrationSpec.scala +++ b/akka-http-core/src/test/scala/io/akka/integrationtest/http/HttpModelIntegrationSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package io.akka.integrationtest.http @@ -40,7 +40,7 @@ class HttpModelIntegrationSpec extends WordSpec with Matchers with BeforeAndAfte implicit val system = ActorSystem(getClass.getSimpleName, testConf) import system.dispatcher - override def afterAll() = system.shutdown() + override def afterAll() = system.terminate() implicit val materializer = ActorMaterializer() diff --git a/akka-http-marshallers-java/akka-http-jackson/build.sbt b/akka-http-marshallers-java/akka-http-jackson/build.sbt new file mode 100644 index 0000000000..dd2817222d --- /dev/null +++ b/akka-http-marshallers-java/akka-http-jackson/build.sbt @@ -0,0 +1,11 @@ +import akka._ +import com.typesafe.tools.mima.plugin.MimaKeys + +AkkaBuild.defaultSettings +AkkaBuild.experimentalSettings +Formatting.formatSettings +OSGi.httpJackson +Dependencies.httpJackson +MimaKeys.previousArtifacts := akkaStreamAndHttpPreviousArtifacts("akka-http-jackson").value + +enablePlugins(ScaladocNoVerificationOfDiagrams) diff --git a/akka-http-marshallers-java/akka-http-jackson/src/main/scala/akka/http/javadsl/marshallers/jackson/Jackson.scala b/akka-http-marshallers-java/akka-http-jackson/src/main/scala/akka/http/javadsl/marshallers/jackson/Jackson.scala index d2754e825a..835b5c4195 100644 --- a/akka-http-marshallers-java/akka-http-jackson/src/main/scala/akka/http/javadsl/marshallers/jackson/Jackson.scala +++ b/akka-http-marshallers-java/akka-http-jackson/src/main/scala/akka/http/javadsl/marshallers/jackson/Jackson.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.marshallers.jackson diff --git a/akka-http-marshallers-scala/akka-http-spray-json/build.sbt b/akka-http-marshallers-scala/akka-http-spray-json/build.sbt new file mode 100644 index 0000000000..ae6e21b61f --- /dev/null +++ b/akka-http-marshallers-scala/akka-http-spray-json/build.sbt @@ -0,0 +1,9 @@ +import akka._ +import com.typesafe.tools.mima.plugin.MimaKeys + +AkkaBuild.defaultSettings +AkkaBuild.experimentalSettings +Formatting.formatSettings +OSGi.httpSprayJson +Dependencies.httpSprayJson +MimaKeys.previousArtifacts := akkaStreamAndHttpPreviousArtifacts("akka-http-spray-json").value diff --git a/akka-http-marshallers-scala/akka-http-spray-json/src/main/scala/akka/http/scaladsl/marshallers/sprayjson/SprayJsonSupport.scala b/akka-http-marshallers-scala/akka-http-spray-json/src/main/scala/akka/http/scaladsl/marshallers/sprayjson/SprayJsonSupport.scala index e06c94de65..6212358c75 100644 --- a/akka-http-marshallers-scala/akka-http-spray-json/src/main/scala/akka/http/scaladsl/marshallers/sprayjson/SprayJsonSupport.scala +++ b/akka-http-marshallers-scala/akka-http-spray-json/src/main/scala/akka/http/scaladsl/marshallers/sprayjson/SprayJsonSupport.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.marshallers.sprayjson diff --git a/akka-http-marshallers-scala/akka-http-xml/build.sbt b/akka-http-marshallers-scala/akka-http-xml/build.sbt new file mode 100644 index 0000000000..dc6013cdc6 --- /dev/null +++ b/akka-http-marshallers-scala/akka-http-xml/build.sbt @@ -0,0 +1,9 @@ +import akka._ +import com.typesafe.tools.mima.plugin.MimaKeys + +AkkaBuild.defaultSettings +AkkaBuild.experimentalSettings +Formatting.formatSettings +OSGi.httpXml +Dependencies.httpXml +MimaKeys.previousArtifacts := akkaStreamAndHttpPreviousArtifacts("akka-http-xml").value diff --git a/akka-http-marshallers-scala/akka-http-xml/src/main/scala/akka/http/scaladsl/marshallers/xml/ScalaXmlSupport.scala b/akka-http-marshallers-scala/akka-http-xml/src/main/scala/akka/http/scaladsl/marshallers/xml/ScalaXmlSupport.scala index bf317c3818..eaebdd4db8 100644 --- a/akka-http-marshallers-scala/akka-http-xml/src/main/scala/akka/http/scaladsl/marshallers/xml/ScalaXmlSupport.scala +++ b/akka-http-marshallers-scala/akka-http-xml/src/main/scala/akka/http/scaladsl/marshallers/xml/ScalaXmlSupport.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.marshallers.xml @@ -33,7 +33,7 @@ trait ScalaXmlSupport { /** * Provides a SAXParser for the NodeSeqUnmarshaller to use. Override to provide a custom SAXParser implementation. - * Will be called once for for every request to be unmarshalled. The default implementation calls [[ScalaXmlSupport.createSaferSAXParser]]. + * Will be called once for for every request to be unmarshalled. The default implementation calls `ScalaXmlSupport.createSaferSAXParser`. */ protected def createSAXParser(): SAXParser = ScalaXmlSupport.createSaferSAXParser() } diff --git a/akka-http-testkit/build.sbt b/akka-http-testkit/build.sbt new file mode 100644 index 0000000000..fa3003fbd6 --- /dev/null +++ b/akka-http-testkit/build.sbt @@ -0,0 +1,11 @@ +import akka._ +import com.typesafe.tools.mima.plugin.MimaKeys + +AkkaBuild.defaultSettings +AkkaBuild.experimentalSettings +Formatting.formatSettings +OSGi.httpTestkit +Dependencies.httpTestkit +MimaKeys.previousArtifacts := akkaStreamAndHttpPreviousArtifacts("akka-http-testkit").value + +scalacOptions in Compile += "-language:postfixOps" diff --git a/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/DefaultHostInfo.scala b/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/DefaultHostInfo.scala index 4506718ec7..be86ca18d3 100644 --- a/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/DefaultHostInfo.scala +++ b/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/DefaultHostInfo.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.testkit diff --git a/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/JUnitRouteTest.scala b/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/JUnitRouteTest.scala index 97336f15b5..14ffa7be38 100644 --- a/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/JUnitRouteTest.scala +++ b/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/JUnitRouteTest.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.testkit @@ -10,8 +10,8 @@ import akka.http.scaladsl.model.HttpResponse import akka.stream.{ Materializer, ActorMaterializer } import org.junit.rules.ExternalResource import org.junit.{ Assert, Rule } - import scala.concurrent.duration._ +import scala.concurrent.Await /** * A RouteTest that uses JUnit assertions. ActorSystem and Materializer are provided as an [[ExternalResource]] @@ -66,8 +66,7 @@ class ActorSystemResource extends ExternalResource { _materializer = createMaterializer(_system) } override def after(): Unit = { - _system.shutdown() - _system.awaitTermination(5.seconds) + Await.result(_system.terminate(), 5.seconds) _system = null _materializer = null } diff --git a/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/RouteTest.scala b/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/RouteTest.scala index 94c99ad2e0..8cc777bb80 100644 --- a/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/RouteTest.scala +++ b/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/RouteTest.scala @@ -1,11 +1,13 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.testkit +import akka.http.scaladsl.settings.RoutingSettings + import scala.annotation.varargs -import scala.concurrent.ExecutionContext +import scala.concurrent.ExecutionContextExecutor import scala.concurrent.duration._ import akka.stream.Materializer import akka.http.scaladsl.server @@ -15,14 +17,14 @@ import akka.http.javadsl.server.{ HttpApp, AllDirectives, Route, Directives } import akka.http.impl.util.JavaMapping.Implicits._ import akka.http.impl.server.RouteImplementation import akka.http.scaladsl.model.HttpResponse -import akka.http.scaladsl.server.{ RouteResult, RoutingSettings, Route ⇒ ScalaRoute } +import akka.http.scaladsl.server.{ RouteResult, Route ⇒ ScalaRoute } import akka.actor.ActorSystem import akka.event.NoLogging import akka.http.impl.util._ /** * A base class to create route tests for testing libraries. An implementation needs to provide - * code to provide and shutdown an [[ActorSystem]], [[Materializer]], and [[ExecutionContext]]. + * code to provide and shutdown an [[ActorSystem]], [[Materializer]], and [[ExecutionContextExecutor]]. * Also an implementation should provide instances of [[TestResponse]] to define the assertion * facilities of the testing library. * @@ -31,7 +33,7 @@ import akka.http.impl.util._ abstract class RouteTest extends AllDirectives { implicit def system: ActorSystem implicit def materializer: Materializer - implicit def executionContext: ExecutionContext = system.dispatcher + implicit def executionContext: ExecutionContextExecutor = system.dispatcher protected def awaitDuration: FiniteDuration = 500.millis diff --git a/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/TestResponse.scala b/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/TestResponse.scala index 20e35279e3..d54bc5cbed 100644 --- a/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/TestResponse.scala +++ b/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/TestResponse.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.testkit diff --git a/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/TestRoute.scala b/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/TestRoute.scala index be03da323e..c9cde70e56 100644 --- a/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/TestRoute.scala +++ b/akka-http-testkit/src/main/scala/akka/http/javadsl/testkit/TestRoute.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.testkit diff --git a/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/MarshallingTestUtils.scala b/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/MarshallingTestUtils.scala index 59d7b915de..2c307dca3d 100644 --- a/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/MarshallingTestUtils.scala +++ b/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/MarshallingTestUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.testkit diff --git a/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/RouteTest.scala b/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/RouteTest.scala index a6baebf869..9afba3f1eb 100644 --- a/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/RouteTest.scala +++ b/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/RouteTest.scala @@ -1,9 +1,10 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.testkit +import akka.http.scaladsl.settings.RoutingSettings import com.typesafe.config.{ ConfigFactory, Config } import scala.collection.immutable import scala.concurrent.{ ExecutionContext, Await, Future } @@ -43,7 +44,7 @@ trait RouteTest extends RequestBuilding with WSTestRequestBuilding with RouteTes implicit def executor = system.dispatcher implicit val materializer = ActorMaterializer() - def cleanUp(): Unit = system.shutdown() + def cleanUp(): Unit = system.terminate() private val dynRR = new DynamicVariable[RouteTestResult](null) private def result = @@ -90,18 +91,18 @@ trait RouteTest extends RequestBuilding with WSTestRequestBuilding with RouteTes if (r.size == 1) r.head else failTest("Expected a single rejection but got %s (%s)".format(r.size, r)) } - def isWebsocketUpgrade: Boolean = - status == StatusCodes.SwitchingProtocols && header[Upgrade].exists(_.hasWebsocket) + def isWebSocketUpgrade: Boolean = + status == StatusCodes.SwitchingProtocols && header[Upgrade].exists(_.hasWebSocket) /** - * Asserts that the received response is a Websocket upgrade response and the extracts + * Asserts that the received response is a WebSocket upgrade response and the extracts * the chosen subprotocol and passes it to the handler. */ - def expectWebsocketUpgradeWithProtocol(body: String ⇒ Unit): Unit = { - if (!isWebsocketUpgrade) failTest("Response was no Websocket Upgrade response") + def expectWebSocketUpgradeWithProtocol(body: String ⇒ Unit): Unit = { + if (!isWebSocketUpgrade) failTest("Response was no WebSocket Upgrade response") header[`Sec-WebSocket-Protocol`] match { case Some(`Sec-WebSocket-Protocol`(Seq(protocol))) ⇒ body(protocol) - case _ ⇒ failTest("No Websocket protocol found in response.") + case _ ⇒ failTest("No WebSocket protocol found in response.") } } diff --git a/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/RouteTestResultComponent.scala b/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/RouteTestResultComponent.scala index 6d12b01f9c..2c0f45327e 100644 --- a/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/RouteTestResultComponent.scala +++ b/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/RouteTestResultComponent.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.testkit diff --git a/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/RouteTestTimeout.scala b/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/RouteTestTimeout.scala index 3773c30b32..c5a60ab13e 100644 --- a/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/RouteTestTimeout.scala +++ b/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/RouteTestTimeout.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.testkit diff --git a/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/ScalatestUtils.scala b/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/ScalatestUtils.scala index 21c94b13e6..a0f9fe266a 100644 --- a/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/ScalatestUtils.scala +++ b/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/ScalatestUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.testkit diff --git a/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/TestFrameworkInterface.scala b/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/TestFrameworkInterface.scala index 78ed7ba88d..a10755e890 100644 --- a/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/TestFrameworkInterface.scala +++ b/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/TestFrameworkInterface.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.testkit diff --git a/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/WSProbe.scala b/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/WSProbe.scala index 671f68c762..dbad501f66 100644 --- a/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/WSProbe.scala +++ b/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/WSProbe.scala @@ -1,9 +1,11 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.testkit +import akka.NotUsed + import scala.concurrent.duration._ import akka.util.ByteString @@ -24,7 +26,7 @@ import akka.http.scaladsl.model.ws.{ BinaryMessage, TextMessage, Message } * Requesting elements is handled automatically. */ trait WSProbe { - def flow: Flow[Message, Message, Unit] + def flow: Flow[Message, Message, NotUsed] /** * Send the given messages out of the flow. @@ -96,6 +98,7 @@ trait WSProbe { object WSProbe { /** * Creates a WSProbe to use in tests against websocket handlers. + * * @param maxChunks The maximum number of chunks to collect for streamed messages. * @param maxChunkCollectionMills The maximum time in milliseconds to collect chunks for streamed messages. */ @@ -104,7 +107,7 @@ object WSProbe { val subscriber = TestSubscriber.probe[Message]() val publisher = TestPublisher.probe[Message]() - def flow: Flow[Message, Message, Unit] = Flow.fromSinkAndSourceMat(Sink.fromSubscriber(subscriber), Source.fromPublisher(publisher))(Keep.none) + def flow: Flow[Message, Message, NotUsed] = Flow.fromSinkAndSourceMat(Sink.fromSubscriber(subscriber), Source.fromPublisher(publisher))(Keep.none) def sendMessage(message: Message): Unit = publisher.sendNext(message) def sendMessage(text: String): Unit = sendMessage(TextMessage(text)) diff --git a/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/WSTestRequestBuilding.scala b/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/WSTestRequestBuilding.scala index d210435771..478724fede 100644 --- a/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/WSTestRequestBuilding.scala +++ b/akka-http-testkit/src/main/scala/akka/http/scaladsl/testkit/WSTestRequestBuilding.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.testkit @@ -7,18 +7,18 @@ package akka.http.scaladsl.testkit import akka.http.impl.engine.ws.InternalCustomHeader import akka.http.scaladsl.model.headers.{ UpgradeProtocol, Upgrade, `Sec-WebSocket-Protocol` } import akka.http.scaladsl.model.{ StatusCodes, HttpResponse, HttpRequest, Uri } -import akka.http.scaladsl.model.ws.{ UpgradeToWebsocket, Message } -import akka.stream.scaladsl.Flow - +import akka.http.scaladsl.model.ws.{ UpgradeToWebSocket, Message } import scala.collection.immutable +import akka.stream.{ Graph, FlowShape } +import akka.stream.scaladsl.Flow trait WSTestRequestBuilding { self: RouteTest ⇒ def WS(uri: Uri, clientSideHandler: Flow[Message, Message, Any], subprotocols: Seq[String] = Nil)(): HttpRequest = HttpRequest(uri = uri) - .addHeader(new InternalCustomHeader("UpgradeToWebsocketTestHeader") with UpgradeToWebsocket { + .addHeader(new InternalCustomHeader("UpgradeToWebSocketTestHeader") with UpgradeToWebSocket { def requestedProtocols: immutable.Seq[String] = subprotocols.toList - def handleMessages(handlerFlow: Flow[Message, Message, Any], subprotocol: Option[String]): HttpResponse = { + def handleMessages(handlerFlow: Graph[FlowShape[Message, Message], Any], subprotocol: Option[String]): HttpResponse = { clientSideHandler.join(handlerFlow).run() HttpResponse(StatusCodes.SwitchingProtocols, headers = diff --git a/akka-http-testkit/src/test/scala/akka/http/scaladsl/testkit/ScalatestRouteTestSpec.scala b/akka-http-testkit/src/test/scala/akka/http/scaladsl/testkit/ScalatestRouteTestSpec.scala index cb302186f4..6a11ed62f2 100644 --- a/akka-http-testkit/src/test/scala/akka/http/scaladsl/testkit/ScalatestRouteTestSpec.scala +++ b/akka-http-testkit/src/test/scala/akka/http/scaladsl/testkit/ScalatestRouteTestSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.testkit diff --git a/akka-http-tests-java8/src/main/resources/web/calculator.html b/akka-http-tests-java8/src/main/resources/web/calculator.html deleted file mode 100644 index a32b054287..0000000000 --- a/akka-http-tests-java8/src/main/resources/web/calculator.html +++ /dev/null @@ -1,23 +0,0 @@ - - -

        Calculator

        - -

        Add

        -
        - - - - - -

        Subtract

        -
        - - - - - -

        Multiply

        -/multiply/42/23 - - - \ No newline at end of file diff --git a/akka-http-tests-java8/src/test/java/akka/http/javadsl/server/HandlerBindingTest.java b/akka-http-tests-java8/src/test/java/akka/http/javadsl/server/HandlerBindingTest.java deleted file mode 100644 index cad4872238..0000000000 --- a/akka-http-tests-java8/src/test/java/akka/http/javadsl/server/HandlerBindingTest.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package akka.http.javadsl.server; - -import akka.http.scaladsl.model.HttpRequest; -import org.junit.Test; -import akka.http.javadsl.testkit.*; -import akka.http.javadsl.server.values.*; - -public class HandlerBindingTest extends JUnitRouteTest { - Parameter aParam = Parameters.intValue("a"); - Parameter bParam = Parameters.intValue("b"); - Parameter cParam = Parameters.intValue("c"); - Parameter dParam = Parameters.intValue("d"); - - @Test - public void testHandlerWithoutExtractions() { - Route route = handleWith(ctx -> ctx.complete("Ok")); - TestResponse response = runRoute(route, HttpRequest.GET("/")); - response.assertEntity("Ok"); - } - @Test - public void testHandler1() { - Route route = handleWith1(aParam, (ctx, a) -> ctx.complete("Ok " + a)); - TestResponse response = runRoute(route, HttpRequest.GET("?a=23")); - response.assertStatusCode(200); - response.assertEntity("Ok 23"); - } - @Test - public void testHandler2() { - Route route = - handleWith2( - aParam, - bParam, - (ctx, a, b) -> ctx.complete("Sum: " + (a + b))); - TestResponse response = runRoute(route, HttpRequest.GET("?a=23&b=42")); - response.assertStatusCode(200); - response.assertEntity("Sum: 65"); - } - @Test - public void testHandler3() { - Route route = - handleWith3( - aParam, - bParam, - cParam, - (ctx, a, b, c) -> ctx.complete("Sum: " + (a + b + c))); - TestResponse response = runRoute(route, HttpRequest.GET("?a=23&b=42&c=30")); - response.assertStatusCode(200); - response.assertEntity("Sum: 95"); - } - @Test - public void testHandler4() { - Route route = - handleWith4( - aParam, - bParam, - cParam, - dParam, - (ctx, a, b, c, d) -> ctx.complete("Sum: " + (a + b + c + d))); - TestResponse response = runRoute(route, HttpRequest.GET("?a=23&b=42&c=30&d=45")); - response.assertStatusCode(200); - response.assertEntity("Sum: 140"); - } - public RouteResult sum(RequestContext ctx, int a, int b, int c, int d) { - return ctx.complete("Sum: "+(a + b + c + d)); - } - @Test - public void testHandler4MethodRef() { - Route route = - handleWith4( - aParam, - bParam, - cParam, - dParam, - this::sum); - TestResponse response = runRoute(route, HttpRequest.GET("?a=23&b=42&c=30&d=45")); - response.assertStatusCode(200); - response.assertEntity("Sum: 140"); - } -} diff --git a/akka-http-tests-java8/src/test/resources/reference.conf b/akka-http-tests-java8/src/test/resources/reference.conf deleted file mode 100644 index ab48718a51..0000000000 --- a/akka-http-tests-java8/src/test/resources/reference.conf +++ /dev/null @@ -1,6 +0,0 @@ -akka { - actor { - serialize-creators = on - serialize-messages = on - } -} \ No newline at end of file diff --git a/akka-http-tests/build.sbt b/akka-http-tests/build.sbt new file mode 100644 index 0000000000..3efb382ca1 --- /dev/null +++ b/akka-http-tests/build.sbt @@ -0,0 +1,13 @@ +import akka._ + +AkkaBuild.defaultSettings +AkkaBuild.dontPublishSettings +AkkaBuild.experimentalSettings +Formatting.formatSettings +Dependencies.httpTests + +// don't ignore Suites which is the default for the junit-interface +testOptions += Tests.Argument(TestFrameworks.JUnit, "--ignore-runners=") + +scalacOptions in Compile += "-language:_" +mainClass in run in Test := Some("akka.http.javadsl.SimpleServerApp") diff --git a/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/petstore/Pet.java b/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/petstore/Pet.java index 0c6cfe1adb..bc0afb3fa9 100644 --- a/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/petstore/Pet.java +++ b/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/petstore/Pet.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.examples.petstore; diff --git a/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/petstore/PetStoreController.java b/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/petstore/PetStoreController.java index a972413f27..559f0cae6b 100644 --- a/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/petstore/PetStoreController.java +++ b/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/petstore/PetStoreController.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.examples.petstore; diff --git a/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/petstore/PetStoreExample.java b/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/petstore/PetStoreExample.java index 6098480d87..9d89fbe50c 100644 --- a/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/petstore/PetStoreExample.java +++ b/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/petstore/PetStoreExample.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.examples.petstore; @@ -17,55 +17,53 @@ import java.util.concurrent.ConcurrentHashMap; import static akka.http.javadsl.server.Directives.*; public class PetStoreExample { - static PathMatcher petId = PathMatchers.intValue(); - static RequestVal petEntity = RequestVals.entityAs(Jackson.jsonAs(Pet.class)); + static PathMatcher petId = PathMatchers.intValue(); + static RequestVal petEntity = RequestVals.entityAs(Jackson.jsonAs(Pet.class)); - public static Route appRoute(final Map pets) { - PetStoreController controller = new PetStoreController(pets); + public static Route appRoute(final Map pets) { + PetStoreController controller = new PetStoreController(pets); - final RequestVal existingPet = RequestVals.lookupInMap(petId, Pet.class, pets); + final RequestVal existingPet = RequestVals.lookupInMap(petId, Pet.class, pets); - Handler1 putPetHandler = new Handler1() { - public RouteResult apply(RequestContext ctx, Pet thePet) { - pets.put(thePet.getId(), thePet); - return ctx.completeAs(Jackson.json(), thePet); - } - }; + Handler1 putPetHandler = (Handler1) (ctx, thePet) -> { + pets.put(thePet.getId(), thePet); + return ctx.completeAs(Jackson.json(), thePet); + }; - return - route( - path().route( - getFromResource("web/index.html") - ), - path("pet", petId).route( - // demonstrates three different ways of handling requests: + return + route( + path().route( + getFromResource("web/index.html") + ), + path("pet", petId).route( + // demonstrates three different ways of handling requests: - // 1. using a predefined route that completes with an extraction - get(extractAndComplete(Jackson.json(), existingPet)), + // 1. using a predefined route that completes with an extraction + get(extractAndComplete(Jackson.json(), existingPet)), - // 2. using a handler - put(handleWith1(petEntity, putPetHandler)), + // 2. using a handler + put(handleWith1(petEntity, putPetHandler)), - // 3. calling a method of a controller instance reflectively - delete(handleReflectively(controller, "deletePet", petId)) - ) - ); - } - - public static void main(String[] args) throws IOException { - Map pets = new ConcurrentHashMap(); - Pet dog = new Pet(0, "dog"); - Pet cat = new Pet(1, "cat"); - pets.put(0, dog); - pets.put(1, cat); - - ActorSystem system = ActorSystem.create(); - try { - HttpService.bindRoute("localhost", 8080, appRoute(pets), system); - System.out.println("Type RETURN to exit"); - System.in.read(); - } finally { - system.shutdown(); - } + // 3. calling a method of a controller instance reflectively + delete(handleReflectively(controller, "deletePet", petId)) + ) + ); + } + + public static void main(String[] args) throws IOException { + Map pets = new ConcurrentHashMap<>(); + Pet dog = new Pet(0, "dog"); + Pet cat = new Pet(1, "cat"); + pets.put(0, dog); + pets.put(1, cat); + + ActorSystem system = ActorSystem.create(); + try { + HttpService.bindRoute("localhost", 8080, appRoute(pets), system); + System.out.println("Type RETURN to exit"); + System.in.read(); + } finally { + system.terminate(); } + } } \ No newline at end of file diff --git a/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/simple/SimpleServerApp.java b/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/simple/SimpleServerApp.java index 4526373906..08627afe43 100644 --- a/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/simple/SimpleServerApp.java +++ b/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/simple/SimpleServerApp.java @@ -1,20 +1,20 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.examples.simple; import akka.actor.ActorSystem; -import akka.dispatch.Futures; import akka.http.javadsl.server.*; import akka.http.javadsl.server.values.Parameter; import akka.http.javadsl.server.values.Parameters; import akka.http.javadsl.server.values.PathMatcher; import akka.http.javadsl.server.values.PathMatchers; -import scala.concurrent.Future; import java.io.IOException; import java.util.concurrent.Callable; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; public class SimpleServerApp extends HttpApp { static Parameter x = Parameters.intValue("x"); @@ -29,12 +29,8 @@ public class SimpleServerApp extends HttpApp { int result = x * y; return ctx.complete(String.format("%d * %d = %d", x, y, result)); } - public static Future multiplyAsync(final RequestContext ctx, final int x, final int y) { - return Futures.future(new Callable() { - public RouteResult call() throws Exception { - return multiply(ctx, x, y); - } - }, ctx.executionContext()); + public static CompletionStage multiplyAsync(final RequestContext ctx, final int x, final int y) { + return CompletableFuture.supplyAsync(() -> multiply(ctx, x, y), ctx.executionContext()); } @Override @@ -96,6 +92,6 @@ public class SimpleServerApp extends HttpApp { new SimpleServerApp().bindRoute("localhost", 8080, system); System.out.println("Type RETURN to exit"); System.in.read(); - system.shutdown(); + system.terminate(); } } \ No newline at end of file diff --git a/akka-http-tests-java8/src/main/java/akka/http/javadsl/server/examples/simple/SimpleServerApp8.java b/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/simple/SimpleServerApp8.java similarity index 97% rename from akka-http-tests-java8/src/main/java/akka/http/javadsl/server/examples/simple/SimpleServerApp8.java rename to akka-http-tests/src/main/java/akka/http/javadsl/server/examples/simple/SimpleServerApp8.java index 8d36bc6429..9b18e27176 100644 --- a/akka-http-tests-java8/src/main/java/akka/http/javadsl/server/examples/simple/SimpleServerApp8.java +++ b/akka-http-tests/src/main/java/akka/http/javadsl/server/examples/simple/SimpleServerApp8.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.examples.simple; @@ -93,7 +93,7 @@ public class SimpleServerApp8 extends HttpApp { new SimpleServerApp8().bindRoute("localhost", 8080, system); System.out.println("Type RETURN to exit"); System.in.read(); - system.shutdown(); + system.terminate(); } } diff --git a/akka-http-tests-java8/src/test/java/AllJavaTests.java b/akka-http-tests/src/test/java/AllJavaTests.java similarity index 83% rename from akka-http-tests-java8/src/test/java/AllJavaTests.java rename to akka-http-tests/src/test/java/AllJavaTests.java index 1965a8fce1..f6c7405afc 100644 --- a/akka-http-tests-java8/src/test/java/AllJavaTests.java +++ b/akka-http-tests/src/test/java/AllJavaTests.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ import akka.http.javadsl.server.HandlerBindingTest; diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/client/HttpAPIsTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/client/HttpAPIsTest.java new file mode 100644 index 0000000000..ad2ab3c543 --- /dev/null +++ b/akka-http-tests/src/test/java/akka/http/javadsl/client/HttpAPIsTest.java @@ -0,0 +1,103 @@ +/* + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package akka.http.javadsl.client; + +import akka.event.LoggingAdapter; +import akka.http.javadsl.*; +import akka.http.javadsl.model.HttpRequest; +import akka.http.javadsl.model.HttpResponse; +import akka.http.javadsl.testkit.JUnitRouteTest; +import akka.http.scaladsl.settings.ConnectionPoolSettings; +import akka.japi.Function; +import akka.stream.javadsl.Flow; +import scala.concurrent.Future; + +import javax.net.ssl.SSLContext; + +import static akka.http.javadsl.ConnectHttp.*; +import static akka.http.javadsl.ConnectHttp.toHostHttps; + +import java.util.concurrent.CompletionStage; + +@SuppressWarnings("ConstantConditions") +public class HttpAPIsTest extends JUnitRouteTest { + + public void compileOnly() throws Exception { + final Http http = Http.get(system()); + + final ConnectionContext connectionContext = ConnectionContext.https(SSLContext.getDefault()); + final HttpConnectionContext httpContext = ConnectionContext.noEncryption(); + final HttpsConnectionContext httpsContext = ConnectionContext.https(SSLContext.getDefault()); + + String host = ""; + int port = 9090; + ConnectionPoolSettings conSettings = null; + LoggingAdapter log = null; + + http.bind("127.0.0.1", 8080, materializer()); + http.bind("127.0.0.1", 8080, connectionContext, materializer()); + http.bind("127.0.0.1", 8080, httpContext, materializer()); + http.bind("127.0.0.1", 8080, httpsContext, materializer()); + + final Flow handler = null; + http.bindAndHandle(handler, "127.0.0.1", 8080, materializer()); + http.bindAndHandle(handler, "127.0.0.1", 8080, httpsContext, materializer()); + + final Function> handler1 = null; + http.bindAndHandleAsync(handler1, "127.0.0.1", 8080, materializer()); + http.bindAndHandleAsync(handler1, "127.0.0.1", 8080, httpsContext, materializer()); + + final Function handler2 = null; + http.bindAndHandleSync(handler2, "127.0.0.1", 8080, materializer()); + http.bindAndHandleSync(handler2, "127.0.0.1", 8080, httpsContext, materializer()); + + final HttpRequest handler3 = null; + http.singleRequest(handler3, materializer()); + http.singleRequest(handler3, httpsContext, materializer()); + http.singleRequest(handler3, httpsContext, conSettings, log, materializer()); + + http.outgoingConnection("akka.io"); + http.outgoingConnection("akka.io:8080"); + http.outgoingConnection("https://akka.io"); + http.outgoingConnection("https://akka.io:8081"); + + http.outgoingConnection(toHost("akka.io")); + http.outgoingConnection(toHost("akka.io", 8080)); + http.outgoingConnection(toHost("https://akka.io")); + http.outgoingConnection(toHostHttps("akka.io")); // default ssl context (ssl-config) + http.outgoingConnection(toHostHttps("ssh://akka.io")); // throws, we explicitly require https or "" + http.outgoingConnection(toHostHttps("akka.io", 8081).withCustomHttpsContext(httpsContext)); + http.outgoingConnection(toHostHttps("akka.io", 8081).withCustomHttpsContext(httpsContext).withDefaultHttpsContext()); + http.outgoingConnection(toHostHttps("akka.io", 8081).withCustomHttpsContext(httpsContext).withDefaultHttpsContext()); + + // in future we can add modify(context -> Context) to "keep ssl-config defaults, but tweak them in code) + + http.newHostConnectionPool("akka.io", materializer()); + http.newHostConnectionPool("https://akka.io", materializer()); + http.newHostConnectionPool("https://akka.io:8080", materializer()); + http.newHostConnectionPool(toHost("akka.io"), materializer()); + http.newHostConnectionPool(toHostHttps("ftp://akka.io"), materializer()); // throws, we explicitly require https or "" + http.newHostConnectionPool(toHostHttps("https://akka.io:2222"), materializer()); + http.newHostConnectionPool(toHostHttps("akka.io"), materializer()); + http.newHostConnectionPool(toHost(""), conSettings, log, materializer()); + + + http.cachedHostConnectionPool("akka.io", materializer()); + http.cachedHostConnectionPool("https://akka.io", materializer()); + http.cachedHostConnectionPool("https://akka.io:8080", materializer()); + http.cachedHostConnectionPool(toHost("akka.io"), materializer()); + http.cachedHostConnectionPool(toHostHttps("smtp://akka.io"), materializer()); // throws, we explicitly require https or "" + http.cachedHostConnectionPool(toHostHttps("https://akka.io:2222"), materializer()); + http.cachedHostConnectionPool(toHostHttps("akka.io"), materializer()); + http.cachedHostConnectionPool(toHost("akka.io"), conSettings, log, materializer()); + + http.superPool(materializer()); + http.superPool(conSettings, log, materializer()); + http.superPool(conSettings, httpsContext, log, materializer()); + + final ConnectWithHttps connect = toHostHttps("akka.io", 8081).withCustomHttpsContext(httpsContext).withDefaultHttpsContext(); + connect.effectiveConnectionContext(http.defaultClientHttpsContext()); // usage by us internally + } +} \ No newline at end of file diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/CompleteTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/CompleteTest.java index e5a72c777f..9c36bb6ed9 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/CompleteTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/CompleteTest.java @@ -1,13 +1,13 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server; import org.junit.Test; -import java.util.concurrent.Callable; -import akka.dispatch.Futures; +import java.util.concurrent.CompletableFuture; + import akka.http.javadsl.testkit.*; import akka.http.javadsl.marshallers.jackson.Jackson; @@ -52,13 +52,10 @@ public class CompleteTest extends JUnitRouteTest { Handler2 slowCalc = new Handler2() { @Override public RouteResult apply(final RequestContext ctx, final Integer x, final Integer y) { - return ctx.completeWith(Futures.future(new Callable() { - @Override - public RouteResult call() throws Exception { - int result = x + y; - return ctx.complete(String.format("%d + %d = %d",x, y, result)); - } - }, executionContext())); + return ctx.completeWith(CompletableFuture.supplyAsync(() -> { + int result = x + y; + return ctx.complete(String.format("%d + %d = %d",x, y, result)); + }, ctx.executionContext())); } }; diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/HandlerBindingTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/HandlerBindingTest.java index 20b51c50e4..1568e15573 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/HandlerBindingTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/HandlerBindingTest.java @@ -1,149 +1,83 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server; -import org.junit.Test; - import akka.http.scaladsl.model.HttpRequest; +import org.junit.Test; import akka.http.javadsl.testkit.*; import akka.http.javadsl.server.values.*; public class HandlerBindingTest extends JUnitRouteTest { + Parameter aParam = Parameters.intValue("a"); + Parameter bParam = Parameters.intValue("b"); + Parameter cParam = Parameters.intValue("c"); + Parameter dParam = Parameters.intValue("d"); + @Test public void testHandlerWithoutExtractions() { - Route route = handleWith( - new Handler() { - @Override - public RouteResult apply(RequestContext ctx) { - return ctx.complete("Ok"); - } - } - ); - runRoute(route, HttpRequest.GET("/")) - .assertEntity("Ok"); - } - @Test - public void testHandlerWithSomeExtractions() { - final Parameter a = Parameters.intValue("a"); - final Parameter b = Parameters.intValue("b"); - - Route route = handleWith( - new Handler() { - @Override - public RouteResult apply(RequestContext ctx) { - return ctx.complete("Ok a:" + a.get(ctx) + " b:" + b.get(ctx)); - } - }, a, b - ); - runRoute(route, HttpRequest.GET("?a=23&b=42")) - .assertEntity("Ok a:23 b:42"); - } - @Test - public void testHandlerIfExtractionFails() { - final Parameter a = Parameters.intValue("a"); - - Route route = handleWith( - new Handler() { - @Override - public RouteResult apply(RequestContext ctx) { - return ctx.complete("Ok " + a.get(ctx)); - } - }, a - ); - runRoute(route, HttpRequest.GET("/")) - .assertStatusCode(404) - .assertEntity("Request is missing required query parameter 'a'"); + Route route = handleWith(ctx -> ctx.complete("Ok")); + TestResponse response = runRoute(route, HttpRequest.GET("/")); + response.assertEntity("Ok"); } @Test public void testHandler1() { - final Parameter a = Parameters.intValue("a"); - - Route route = handleWith1(a, - new Handler1() { - @Override - public RouteResult apply(RequestContext ctx, Integer a) { - return ctx.complete("Ok " + a); - } - } - ); - runRoute(route, HttpRequest.GET("?a=23")) - .assertStatusCode(200) - .assertEntity("Ok 23"); + Route route = handleWith1(aParam, (ctx, a) -> ctx.complete("Ok " + a)); + TestResponse response = runRoute(route, HttpRequest.GET("?a=23")); + response.assertStatusCode(200); + response.assertEntity("Ok 23"); } @Test public void testHandler2() { - Route route = handleWith2( - Parameters.intValue("a"), - Parameters.intValue("b"), - new Handler2() { - @Override - public RouteResult apply(RequestContext ctx, Integer a, Integer b) { - return ctx.complete("Sum: " + (a + b)); - } - } - ); - runRoute(route, HttpRequest.GET("?a=23&b=42")) - .assertStatusCode(200) - .assertEntity("Sum: 65"); + Route route = + handleWith2( + aParam, + bParam, + (ctx, a, b) -> ctx.complete("Sum: " + (a + b))); + TestResponse response = runRoute(route, HttpRequest.GET("?a=23&b=42")); + response.assertStatusCode(200); + response.assertEntity("Sum: 65"); } @Test public void testHandler3() { - Route route = handleWith3( - Parameters.intValue("a"), - Parameters.intValue("b"), - Parameters.intValue("c"), - new Handler3() { - @Override - public RouteResult apply(RequestContext ctx, Integer a, Integer b, Integer c) { - return ctx.complete("Sum: " + (a + b + c)); - } - } - ); + Route route = + handleWith3( + aParam, + bParam, + cParam, + (ctx, a, b, c) -> ctx.complete("Sum: " + (a + b + c))); TestResponse response = runRoute(route, HttpRequest.GET("?a=23&b=42&c=30")); response.assertStatusCode(200); response.assertEntity("Sum: 95"); } @Test public void testHandler4() { - Route route = handleWith4( - Parameters.intValue("a"), - Parameters.intValue("b"), - Parameters.intValue("c"), - Parameters.intValue("d"), - new Handler4() { - @Override - public RouteResult apply(RequestContext ctx, Integer a, Integer b, Integer c, Integer d) { - return ctx.complete("Sum: " + (a + b + c + d)); - } - } - ); - runRoute(route, HttpRequest.GET("?a=23&b=42&c=30&d=45")) - .assertStatusCode(200) - .assertEntity("Sum: 140"); + Route route = + handleWith4( + aParam, + bParam, + cParam, + dParam, + (ctx, a, b, c, d) -> ctx.complete("Sum: " + (a + b + c + d))); + TestResponse response = runRoute(route, HttpRequest.GET("?a=23&b=42&c=30&d=45")); + response.assertStatusCode(200); + response.assertEntity("Sum: 140"); + } + public RouteResult sum(RequestContext ctx, int a, int b, int c, int d) { + return ctx.complete("Sum: "+(a + b + c + d)); } @Test - public void testReflectiveInstanceHandler() { - class Test { - public RouteResult negate(RequestContext ctx, int a) { - return ctx.complete("Negated: " + (- a)); - } - } - Route route = handleReflectively(new Test(), "negate", Parameters.intValue("a")); - runRoute(route, HttpRequest.GET("?a=23")) - .assertStatusCode(200) - .assertEntity("Negated: -23"); - } - - public static RouteResult squared(RequestContext ctx, int a) { - return ctx.complete("Squared: " + (a * a)); - } - @Test - public void testStaticReflectiveHandler() { - Route route = handleReflectively(HandlerBindingTest.class, "squared", Parameters.intValue("a")); - runRoute(route, HttpRequest.GET("?a=23")) - .assertStatusCode(200) - .assertEntity("Squared: 529"); + public void testHandler4MethodRef() { + Route route = + handleWith4( + aParam, + bParam, + cParam, + dParam, + this::sum); + TestResponse response = runRoute(route, HttpRequest.GET("?a=23&b=42&c=30&d=45")); + response.assertStatusCode(200); + response.assertEntity("Sum: 140"); } } diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/MarshallerTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/MarshallerTest.java index 81a83f4da7..4c35ab5aaf 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/MarshallerTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/MarshallerTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server; diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/CodingDirectivesTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/CodingDirectivesTest.java index 81063d5358..e77b30ec9e 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/CodingDirectivesTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/CodingDirectivesTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.directives; @@ -29,9 +29,8 @@ public class CodingDirectivesTest extends JUnitRouteTest { } @AfterClass - public static void tearDown() { - system.shutdown(); - system.awaitTermination(); + public static void tearDown() throws Exception { + Await.result(system.terminate(), Duration.Inf()); system = null; } @@ -68,7 +67,7 @@ public class CodingDirectivesTest extends JUnitRouteTest { .assertHeaderExists(ContentEncoding.create(HttpEncodings.DEFLATE)); ByteString decompressed = - Await.result(Coder.Deflate.decode(response.entityBytes(), mat), Duration.apply(3, TimeUnit.SECONDS)); + Coder.Deflate.decode(response.entityBytes(), mat).toCompletableFuture().get(3, TimeUnit.SECONDS); Assert.assertEquals("tester", decompressed.utf8String()); } @Test diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/ExecutionDirectivesTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/ExecutionDirectivesTest.java index 1f9c3318d6..a898d888db 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/ExecutionDirectivesTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/ExecutionDirectivesTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.directives; diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/HostDirectivesTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/HostDirectivesTest.java index 3def552483..c2ad65cd17 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/HostDirectivesTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/HostDirectivesTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.directives; diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/MiscDirectivesTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/MiscDirectivesTest.java index d0ebd5c683..e2ba5413e7 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/MiscDirectivesTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/MiscDirectivesTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.directives; diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/PathDirectivesTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/PathDirectivesTest.java index cb4cc3167d..725cbf2f88 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/PathDirectivesTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/PathDirectivesTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.directives; diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/RouteDirectivesTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/RouteDirectivesTest.java index af570fae28..97c9181289 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/RouteDirectivesTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/RouteDirectivesTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.directives; @@ -52,32 +52,19 @@ public class RouteDirectivesTest extends JUnitRouteTest { .withoutSizeLimit() .getDataBytes() .runWith(Sink.head(), ctx.materializer()) - .map(new Mapper() { - @Override - public RouteResult apply(ByteString s) { - return ctx.complete(s.utf8String()); - } - }, ctx.executionContext())); + .thenApplyAsync(s -> ctx.complete(s.utf8String()), ctx.executionContext())); } })), path("limit-5") .route( - handleWith(new Function() { - @Override - public RouteResult apply(final RequestContext ctx) throws Exception { + handleWith(ctx -> { final RequestEntity entity = ctx.request().entity(); return ctx.completeWith( entity .withSizeLimit(5) .getDataBytes() .runWith(Sink.head(), ctx.materializer()) - .map(new Mapper() { - @Override - public RouteResult apply(ByteString s) { - return ctx.complete(s.utf8String()); - } - }, ctx.executionContext())); - } + .thenApplyAsync(s -> ctx.complete(s.utf8String()), ctx.executionContext())); })) ); diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/SchemeDirectivesTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/SchemeDirectivesTest.java index 45fe646a4c..34e9666504 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/SchemeDirectivesTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/directives/SchemeDirectivesTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.directives; diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/examples/petstore/PetStoreAPITest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/examples/petstore/PetStoreAPITest.java index 15b8d4bbe9..daae0f60ef 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/examples/petstore/PetStoreAPITest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/examples/petstore/PetStoreAPITest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.examples.petstore; diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/examples/simple/SimpleServerTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/examples/simple/SimpleServerTest.java index b5b51c10dd..b55df18ae3 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/examples/simple/SimpleServerTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/examples/simple/SimpleServerTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.examples.simple; diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/values/CookiesTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/values/CookiesTest.java index 2bfc8c3400..906613b209 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/values/CookiesTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/values/CookiesTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.values; diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/values/FormFieldsTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/values/FormFieldsTest.java index 886a9889ff..7982c87bfd 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/values/FormFieldsTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/values/FormFieldsTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.values; @@ -10,11 +10,10 @@ import akka.http.javadsl.model.MediaTypes; import akka.http.javadsl.server.RequestVal; import akka.http.javadsl.testkit.JUnitRouteTest; import akka.http.javadsl.testkit.TestRoute; -import akka.japi.Option; +import akka.japi.Pair; import org.junit.Test; -import java.util.AbstractMap; -import java.util.Map; +import java.util.Optional; public class FormFieldsTest extends JUnitRouteTest { static FormField stringParam = FormFields.stringValue("stringParam"); @@ -31,22 +30,23 @@ public class FormFieldsTest extends JUnitRouteTest { static FormField hexLongParam = FormFields.hexLongValue("hexLongParam"); static RequestVal nameWithDefault = FormFields.stringValue("nameWithDefault").withDefault("John Doe"); - static RequestVal> optionalIntParam = FormFields.intValue("optionalIntParam").optional(); + static RequestVal> optionalIntParam = FormFields.intValue("optionalIntParam").optional(); - private Map.Entry entry(String name, String value) { - return new AbstractMap.SimpleImmutableEntry(name, value); + private Pair param(String name, String value) { + return Pair.create(name, value); } - private HttpRequest urlEncodedRequest(Map.Entry... entries) { + @SafeVarargs + final private HttpRequest urlEncodedRequest(Pair... params) { StringBuilder sb = new StringBuilder(); boolean next = false; - for (Map.Entry entry: entries) { + for (Pair param: params) { if (next) { sb.append('&'); - next = true; } - sb.append(entry.getKey()); + next = true; + sb.append(param.first()); sb.append('='); - sb.append(entry.getValue()); + sb.append(param.second()); } return @@ -54,7 +54,7 @@ public class FormFieldsTest extends JUnitRouteTest { .withEntity(MediaTypes.APPLICATION_X_WWW_FORM_URLENCODED.toContentType(HttpCharsets.UTF_8), sb.toString()); } private HttpRequest singleParameterUrlEncodedRequest(String name, String value) { - return urlEncodedRequest(entry(name, value)); + return urlEncodedRequest(param(name, value)); } @Test diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/values/HeadersTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/values/HeadersTest.java index 402766c276..8a6ad7596b 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/values/HeadersTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/values/HeadersTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.values; diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/values/HttpBasicAuthenticationTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/values/HttpBasicAuthenticationTest.java index 0369d94d97..51c71d0898 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/values/HttpBasicAuthenticationTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/values/HttpBasicAuthenticationTest.java @@ -1,12 +1,13 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.values; +import java.util.Optional; +import java.util.concurrent.CompletionStage; + import org.junit.Test; -import scala.Option; -import scala.concurrent.Future; import akka.http.javadsl.server.*; import akka.http.javadsl.model.HttpRequest; @@ -17,7 +18,7 @@ public class HttpBasicAuthenticationTest extends JUnitRouteTest { HttpBasicAuthenticator authenticatedUser = new HttpBasicAuthenticator("test-realm") { @Override - public Future> authenticate(BasicCredentials credentials) { + public CompletionStage> authenticate(BasicCredentials credentials) { if (credentials.available() && // no anonymous access credentials.identifier().equals("sina") && credentials.verify("1234")) @@ -29,7 +30,7 @@ public class HttpBasicAuthenticationTest extends JUnitRouteTest { OAuth2Authenticator authenticatedToken = new OAuth2Authenticator("test-realm") { @Override - public Future> authenticate(OAuth2Credentials credentials) { + public CompletionStage> authenticate(OAuth2Credentials credentials) { if (credentials.available() && // no anonymous access credentials.identifier().equals("myToken") && credentials.verify("myToken")) diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/values/ParametersTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/values/ParametersTest.java index 3dce146173..e8322f3b8c 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/values/ParametersTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/values/ParametersTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.values; @@ -11,7 +11,6 @@ import akka.http.javadsl.server.RequestVal; import akka.http.javadsl.server.RouteResult; import akka.http.javadsl.testkit.JUnitRouteTest; import akka.http.javadsl.testkit.TestRoute; -import akka.japi.Option; import org.junit.Test; import java.util.*; @@ -31,7 +30,7 @@ public class ParametersTest extends JUnitRouteTest { static Parameter hexLongParam = Parameters.hexLongValue("hexLongParam"); static RequestVal nameWithDefault = Parameters.stringValue("nameWithDefault").withDefault("John Doe"); - static RequestVal> optionalIntParam = Parameters.intValue("optionalIntParam").optional(); + static RequestVal> optionalIntParam = Parameters.intValue("optionalIntParam").optional(); static RequestVal> paramMap = Parameters.asMap(); static RequestVal>> paramMultiMap = Parameters.asMultiMap(); diff --git a/akka-http-tests/src/test/java/akka/http/javadsl/server/values/RequestValTest.java b/akka-http-tests/src/test/java/akka/http/javadsl/server/values/RequestValTest.java index 4e1b1d3279..a903a39ad2 100644 --- a/akka-http-tests/src/test/java/akka/http/javadsl/server/values/RequestValTest.java +++ b/akka-http-tests/src/test/java/akka/http/javadsl/server/values/RequestValTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.values; diff --git a/akka-http-tests-java8/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java b/akka-http-tests/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java similarity index 87% rename from akka-http-tests-java8/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java rename to akka-http-tests/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java index 9847b85bb7..090021d4fa 100644 --- a/akka-http-tests-java8/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java +++ b/akka-http-tests/src/test/java/docs/http/javadsl/server/HandlerExampleDocTest.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package docs.http.javadsl.server; @@ -13,9 +13,11 @@ import akka.http.javadsl.server.values.Parameters; import akka.http.javadsl.server.values.PathMatchers; import akka.http.javadsl.testkit.JUnitRouteTest; import akka.http.javadsl.testkit.TestRoute; + +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; + import org.junit.Test; -import scala.concurrent.ExecutionContext; -import scala.concurrent.Future; public class HandlerExampleDocTest extends JUnitRouteTest { @Test @@ -236,12 +238,12 @@ public class HandlerExampleDocTest extends JUnitRouteTest { //#async-example-full //#async-service-definition class CalculatorService { - public Future multiply(final int x, final int y, ExecutionContext ec) { - return akka.dispatch.Futures.future(() -> x * y, ec); + public CompletionStage multiply(final int x, final int y) { + return CompletableFuture.supplyAsync(() -> x * y); } - public Future add(final int x, final int y, ExecutionContext ec) { - return akka.dispatch.Futures.future(() -> x + y, ec); + public CompletionStage add(final int x, final int y) { + return CompletableFuture.supplyAsync(() -> x + y); } } //#async-service-definition @@ -253,15 +255,10 @@ public class HandlerExampleDocTest extends JUnitRouteTest { //#async-handler-1 // would probably be injected or passed at construction time in real code CalculatorService calculatorService = new CalculatorService(); - public Future multiplyAsync(final RequestContext ctx, int x, int y) { - Future result = calculatorService.multiply(x, y, ctx.executionContext()); - Mapper func = new Mapper() { - @Override - public RouteResult apply(Integer product) { - return ctx.complete("x * y = " + product); - } - }; // cannot be written as lambda, unfortunately - return result.map(func, ctx.executionContext()); + public CompletionStage multiplyAsync(final RequestContext ctx, int x, int y) { + CompletionStage result = calculatorService.multiply(x, y); + return result.thenApplyAsync(product -> ctx.complete("x * y = " + product), + ctx.executionContext()); } Route multiplyAsyncRoute = path("multiply").route( @@ -271,14 +268,9 @@ public class HandlerExampleDocTest extends JUnitRouteTest { //#async-handler-2 public RouteResult addAsync(final RequestContext ctx, int x, int y) { - Future result = calculatorService.add(x, y, ctx.executionContext()); - Mapper func = new Mapper() { - @Override - public RouteResult apply(Integer sum) { - return ctx.complete("x + y = " + sum); - } - }; // cannot be written as lambda, unfortunately - return ctx.completeWith(result.map(func, ctx.executionContext())); + CompletionStage result = calculatorService.add(x, y); + return ctx.completeWith(result.thenApplyAsync(sum -> ctx.complete("x + y = " + sum), + ctx.executionContext())); } Route addAsyncRoute = path("add").route( diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/FormDataSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/FormDataSpec.scala index 9b58041dc4..e40cf7ac2b 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/FormDataSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/FormDataSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl @@ -12,6 +12,7 @@ import akka.stream.ActorMaterializer import akka.http.scaladsl.unmarshalling.Unmarshal import akka.http.scaladsl.marshalling.Marshal import akka.http.scaladsl.model._ +import scala.concurrent.Await class FormDataSpec extends WordSpec with Matchers with ScalaFutures with BeforeAndAfterAll { implicit val system = ActorSystem(getClass.getSimpleName) @@ -20,6 +21,8 @@ class FormDataSpec extends WordSpec with Matchers with ScalaFutures with BeforeA val formData = FormData(Map("surname" -> "Smith", "age" -> "42")) + implicit val patience = PatienceConfig(3.seconds) + "The FormData infrastructure" should { "properly round-trip the fields of www-urlencoded forms" in { Marshal(formData).to[HttpEntity] @@ -36,7 +39,6 @@ class FormDataSpec extends WordSpec with Matchers with ScalaFutures with BeforeA } override def afterAll() = { - system.shutdown() - system.awaitTermination(10.seconds) + Await.result(system.terminate(), 10.seconds) } } diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/TestUtils.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/TestUtils.scala index bbb3d4520f..4008bb2385 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/TestUtils.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/TestUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/CodecSpecSupport.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/CodecSpecSupport.scala index 3e4a6929f9..09e0ed6681 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/CodecSpecSupport.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/CodecSpecSupport.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.coding @@ -9,6 +9,7 @@ import org.scalatest.{ Suite, BeforeAndAfterAll, Matchers } import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.util.ByteString +import scala.concurrent.Await trait CodecSpecSupport extends Matchers with BeforeAndAfterAll { self: Suite ⇒ @@ -73,7 +74,6 @@ est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscin implicit val materializer = ActorMaterializer() override def afterAll() = { - system.shutdown() - system.awaitTermination(10.seconds) + Await.result(system.terminate(), 10.seconds) } } diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/CoderSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/CoderSpec.scala index 4927987f78..e731ebbe26 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/CoderSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/CoderSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.coding @@ -7,6 +7,8 @@ package akka.http.scaladsl.coding import java.io.{ OutputStream, InputStream, ByteArrayInputStream, ByteArrayOutputStream } import java.util import java.util.zip.DataFormatException +import akka.NotUsed + import scala.annotation.tailrec import scala.concurrent.duration._ import scala.concurrent.Await @@ -182,7 +184,7 @@ abstract class CoderSpec extends WordSpec with CodecSpecSupport with Inspectors ByteString(output.toByteArray) } - def decodeChunks(input: Source[ByteString, Unit]): ByteString = + def decodeChunks(input: Source[ByteString, NotUsed]): ByteString = input.via(Coder.decoderFlow).join.awaitResult(3.seconds) def decodeFromIterator(iterator: () ⇒ Iterator[ByteString]): ByteString = diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/DecoderSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/DecoderSpec.scala index 8b3dd3d102..3b51d767ec 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/DecoderSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/DecoderSpec.scala @@ -1,13 +1,16 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.coding +import akka.stream.{ Attributes, FlowShape } +import akka.stream.impl.fusing.GraphStages.SimpleLinearGraphStage + import scala.concurrent.duration._ import org.scalatest.WordSpec import akka.util.ByteString -import akka.stream.stage.{ SyncDirective, Context, PushStage, Stage } +import akka.stream.stage._ import akka.http.scaladsl.model._ import akka.http.impl.util._ import headers._ @@ -34,10 +37,17 @@ class DecoderSpec extends WordSpec with CodecSpecSupport { case object DummyDecoder extends StreamDecoder { val encoding = HttpEncodings.compress - def newDecompressorStage(maxBytesPerChunk: Int): () ⇒ Stage[ByteString, ByteString] = - () ⇒ new PushStage[ByteString, ByteString] { - def onPush(elem: ByteString, ctx: Context[ByteString]): SyncDirective = - ctx.push(elem ++ ByteString("compressed")) + override def newDecompressorStage(maxBytesPerChunk: Int): () ⇒ GraphStage[FlowShape[ByteString, ByteString]] = + () ⇒ new SimpleLinearGraphStage[ByteString] { + override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { + setHandler(in, new InHandler { + override def onPush(): Unit = push(out, grab(in) ++ ByteString("compressed")) + }) + setHandler(out, new OutHandler { + override def onPull(): Unit = pull(in) + }) + } } } + } diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/DeflateSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/DeflateSpec.scala index 798b9320c0..22894d2bc1 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/DeflateSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/DeflateSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.coding diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/EncoderSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/EncoderSpec.scala index bcbd4bd5d2..b0d15e9bb6 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/EncoderSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/EncoderSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.coding diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/GzipSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/GzipSpec.scala index 914f834f31..459b0cf88c 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/GzipSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/GzipSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.coding @@ -33,7 +33,6 @@ class GzipSpec extends CoderSpec { } "throw an error if compressed data is just missing the trailer at the end" in { def brokenCompress(payload: String) = Gzip.newCompressor.compress(ByteString(payload, "UTF-8")) - val ex = the[RuntimeException] thrownBy ourDecode(brokenCompress("abcdefghijkl")) ex.getCause.getMessage should equal("Truncated GZIP stream") } diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/NoCodingSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/NoCodingSpec.scala index e1671a5410..4a4a8b488e 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/NoCodingSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/coding/NoCodingSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.coding diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/marshallers/JsonSupportSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/marshallers/JsonSupportSpec.scala index 0edf33dd3b..675db90dfd 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/marshallers/JsonSupportSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/marshallers/JsonSupportSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.marshallers diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/marshallers/sprayjson/SprayJsonSupportSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/marshallers/sprayjson/SprayJsonSupportSpec.scala index 6a98de2252..62c56dec8c 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/marshallers/sprayjson/SprayJsonSupportSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/marshallers/sprayjson/SprayJsonSupportSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.marshallers.sprayjson diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/marshallers/xml/ScalaXmlSupportSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/marshallers/xml/ScalaXmlSupportSpec.scala index ef16eaa5c9..2a098bcb26 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/marshallers/xml/ScalaXmlSupportSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/marshallers/xml/ScalaXmlSupportSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.marshallers.xml diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/marshalling/ContentNegotiationSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/marshalling/ContentNegotiationSpec.scala index 8fe3d5faad..5c54cd7de5 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/marshalling/ContentNegotiationSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/marshalling/ContentNegotiationSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.marshalling diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/marshalling/MarshallingSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/marshalling/MarshallingSpec.scala index b833b3834e..4095e0bb3d 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/marshalling/MarshallingSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/marshalling/MarshallingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.marshalling @@ -140,7 +140,7 @@ class MarshallingSpec extends FreeSpec with Matchers with BeforeAndAfterAll with } } - override def afterAll() = system.shutdown() + override def afterAll() = system.terminate() protected class FixedRandom extends java.util.Random { override def nextBytes(array: Array[Byte]): Unit = "my-stable-boundary".getBytes("UTF-8").copyToArray(array) diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/BasicRouteSpecs.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/BasicRouteSpecs.scala index 15b044db49..ecac41ce63 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/BasicRouteSpecs.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/BasicRouteSpecs.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/ConnectionTestApp.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/ConnectionTestApp.scala index 8d288020c7..7ce8786d76 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/ConnectionTestApp.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/ConnectionTestApp.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server @@ -92,7 +92,7 @@ object ConnectionTestApp { readLine() println("===================== \n\n" + system.asInstanceOf[ActorSystemImpl].printTree + "\n\n========================") readLine() - system.shutdown() + system.terminate() } } diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/DontLeakActorsOnFailingConnectionSpecs.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/DontLeakActorsOnFailingConnectionSpecs.scala index f2b79ad422..4ed317a19d 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/DontLeakActorsOnFailingConnectionSpecs.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/DontLeakActorsOnFailingConnectionSpecs.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server @@ -95,8 +95,7 @@ class DontLeakActorsOnFailingConnectionSpecs extends WordSpecLike with Matchers } override def afterAll = { - system.shutdown() - system.awaitTermination(3.seconds) + Await.result(system.terminate(), 3.seconds) } } diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/ModeledCustomHeaderSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/ModeledCustomHeaderSpec.scala index addf17f5e5..f0e37406b5 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/ModeledCustomHeaderSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/ModeledCustomHeaderSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server @@ -14,22 +14,30 @@ object ModeledCustomHeaderSpec { //#modeled-api-key-custom-header object ApiTokenHeader extends ModeledCustomHeaderCompanion[ApiTokenHeader] { + def renderInRequests = false + def renderInResponses = false override val name = "apiKey" override def parse(value: String) = Try(new ApiTokenHeader(value)) } final class ApiTokenHeader(token: String) extends ModeledCustomHeader[ApiTokenHeader] { + def renderInRequests = false + def renderInResponses = false override val companion = ApiTokenHeader override def value: String = token } //#modeled-api-key-custom-header object DifferentHeader extends ModeledCustomHeaderCompanion[DifferentHeader] { + def renderInRequests = false + def renderInResponses = false override val name = "different" override def parse(value: String) = if (value contains " ") Failure(new Exception("Contains illegal whitespace!")) else Success(new DifferentHeader(value)) } final class DifferentHeader(token: String) extends ModeledCustomHeader[DifferentHeader] { + def renderInRequests = false + def renderInResponses = false override val companion = DifferentHeader override def value = token } diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/RoutingSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/RoutingSpec.scala index acf41818c1..fbeac7d818 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/RoutingSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/RoutingSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/StreamingResponseSpecs.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/StreamingResponseSpecs.scala index b5c7a5d37e..6b7b0d27dc 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/StreamingResponseSpecs.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/StreamingResponseSpecs.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/TcpLeakApp.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/TcpLeakApp.scala index 0afb6cff95..e1cfb2392d 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/TcpLeakApp.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/TcpLeakApp.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server @@ -43,5 +43,5 @@ object TcpLeakApp extends App { } readLine() - system.shutdown() + system.terminate() } diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/TestServer.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/TestServer.scala index f7ee4efaad..c62efd4da3 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/TestServer.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/TestServer.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server @@ -51,7 +51,7 @@ object TestServer extends App { println(s"Server online at http://localhost:8080/\nPress RETURN to stop...") Console.readLine() - bindingFuture.flatMap(_.unbind()).onComplete(_ ⇒ system.shutdown()) + bindingFuture.flatMap(_.unbind()).onComplete(_ ⇒ system.terminate()) lazy val index = diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/BasicDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/BasicDirectivesSpec.scala index 44f60b1a66..2dda1b7be7 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/BasicDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/BasicDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/CacheConditionDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/CacheConditionDirectivesSpec.scala index f18b017982..4ea7495833 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/CacheConditionDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/CacheConditionDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/CodingDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/CodingDirectivesSpec.scala index 1a6f92debc..b3ad18bb2b 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/CodingDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/CodingDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/CookieDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/CookieDirectivesSpec.scala index 915f913296..4c6435901f 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/CookieDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/CookieDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/DebuggingDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/DebuggingDirectivesSpec.scala index 27d174e322..28b321d62f 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/DebuggingDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/DebuggingDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/ExecutionDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/ExecutionDirectivesSpec.scala index f5891ecdcb..ae24ef8efd 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/ExecutionDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/ExecutionDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/FileAndResourceDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/FileAndResourceDirectivesSpec.scala index ac31b1cb17..6d61f08808 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/FileAndResourceDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/FileAndResourceDirectivesSpec.scala @@ -1,11 +1,14 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server package directives import java.io.File + +import akka.http.scaladsl.settings.RoutingSettings + import scala.concurrent.duration._ import scala.concurrent.{ ExecutionContext, Future } import scala.util.Properties @@ -229,7 +232,7 @@ class FileAndResourceDirectivesSpec extends RoutingSpec with Inspectors with Ins val base = new File(getClass.getClassLoader.getResource("").toURI).getPath new File(base, "subDirectory/emptySub").mkdir() def eraseDateTime(s: String) = s.replaceAll("""\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d""", "xxxx-xx-xx xx:xx:xx") - implicit val settings = RoutingSettings.default.copy(renderVanityFooter = false) + implicit val settings = RoutingSettings.default.withRenderVanityFooter(false) "properly render a simple directory" in { Get() ~> listDirectoryContents(base + "/someDir") ~> check { diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/FileUploadDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/FileUploadDirectivesSpec.scala index 22f41b0b0d..c3b5539351 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/FileUploadDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/FileUploadDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server.directives diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/FormFieldDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/FormFieldDirectivesSpec.scala index f92f634066..219cdb2eb8 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/FormFieldDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/FormFieldDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server @@ -162,4 +162,33 @@ class FormFieldDirectivesSpec extends RoutingSpec { } ~> check { responseAs[String] === "List(3, 10)" } } } + + "The 'formFieldMap' directive" should { + "extract fields with different keys" in { + Post("/", FormData("age" -> "42", "numberA" -> "3", "numberB" -> "5")) ~> { + formFieldMap { echoComplete } + } ~> check { responseAs[String] shouldEqual "Map(age -> 42, numberA -> 3, numberB -> 5)" } + } + } + + "The 'formFieldSeq' directive" should { + "extract all fields" in { + Post("/", FormData("age" -> "42", "number" -> "3", "number" -> "5")) ~> { + formFieldSeq { echoComplete } + } ~> check { responseAs[String] shouldEqual "Vector((age,42), (number,3), (number,5))" } + } + "produce empty Seq when FormData is empty" in { + Post("/", FormData.Empty) ~> { + formFieldSeq { echoComplete } + } ~> check { responseAs[String] shouldEqual "Vector()" } + } + } + + "The 'formFieldMultiMap' directive" should { + "extract fields with different keys (with duplicates)" in { + Post("/", FormData("age" -> "42", "number" -> "3", "number" -> "5")) ~> { + formFieldMultiMap { echoComplete } + } ~> check { responseAs[String] shouldEqual "Map(age -> List(42), number -> List(5, 3))" } + } + } } diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/FutureDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/FutureDirectivesSpec.scala index 2dc05f770d..85286aabf2 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/FutureDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/FutureDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/HeaderDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/HeaderDirectivesSpec.scala index a030f1dd62..0cc012d83a 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/HeaderDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/HeaderDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server.directives diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/HostDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/HostDirectivesSpec.scala index 008857897a..c7defd957b 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/HostDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/HostDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/MarshallingDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/MarshallingDirectivesSpec.scala index 776195f46f..67bd4c5ebf 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/MarshallingDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/MarshallingDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/MethodDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/MethodDirectivesSpec.scala index a8d84eae14..c11037b208 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/MethodDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/MethodDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server.directives diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/MiscDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/MiscDirectivesSpec.scala index 74f55dd8c6..9af72926c7 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/MiscDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/MiscDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/ParameterDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/ParameterDirectivesSpec.scala index 5dc6a5cfc8..033ec5fcc7 100755 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/ParameterDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/ParameterDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/PathDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/PathDirectivesSpec.scala index 5b2dfe11ae..42cd30713c 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/PathDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/PathDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server.directives diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/RangeDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/RangeDirectivesSpec.scala index 11e5d59ac7..572970e7b0 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/RangeDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/RangeDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server @@ -17,7 +17,7 @@ import org.scalatest.{ Inside, Inspectors } class RangeDirectivesSpec extends RoutingSpec with Inspectors with Inside { lazy val wrs = - mapSettings(_.copy(rangeCountLimit = 10, rangeCoalescingThreshold = 1L)) & + mapSettings(_.withRangeCountLimit(10).withRangeCoalescingThreshold(1L)) & withRangeSupport def bytes(length: Byte) = Array.tabulate[Byte](length)(_.toByte) diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/RespondWithDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/RespondWithDirectivesSpec.scala index abdd3dcd56..36f3b0fdc9 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/RespondWithDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/RespondWithDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server.directives diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/RouteDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/RouteDirectivesSpec.scala index c46c0cf3c9..cdbdbe3a40 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/RouteDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/RouteDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server.directives diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/SchemeDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/SchemeDirectivesSpec.scala index 8ae590dd41..7676b6269e 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/SchemeDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/SchemeDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/SecurityDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/SecurityDirectivesSpec.scala index a6ae7308e0..566d6458c8 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/SecurityDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/SecurityDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/WebsocketDirectivesSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/WebSocketDirectivesSpec.scala similarity index 80% rename from akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/WebsocketDirectivesSpec.scala rename to akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/WebSocketDirectivesSpec.scala index daa6f4b712..d133151e88 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/WebsocketDirectivesSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/directives/WebSocketDirectivesSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server.directives @@ -14,16 +14,16 @@ import akka.http.scaladsl.testkit.WSProbe import akka.http.scaladsl.model.headers.`Sec-WebSocket-Protocol` import akka.http.scaladsl.model.StatusCodes import akka.http.scaladsl.model.ws._ -import akka.http.scaladsl.server.{ UnsupportedWebsocketSubprotocolRejection, ExpectedWebsocketRequestRejection, Route, RoutingSpec } +import akka.http.scaladsl.server.{ UnsupportedWebSocketSubprotocolRejection, ExpectedWebSocketRequestRejection, Route, RoutingSpec } -class WebsocketDirectivesSpec extends RoutingSpec { - "the handleWebsocketMessages directive" should { +class WebSocketDirectivesSpec extends RoutingSpec { + "the handleWebSocketMessages directive" should { "handle websocket requests" in { val wsClient = WSProbe() WS("http://localhost/", wsClient.flow) ~> websocketRoute ~> check { - isWebsocketUpgrade shouldEqual true + isWebSocketUpgrade shouldEqual true wsClient.sendMessage("Peter") wsClient.expectMessage("Hello Peter!") @@ -42,7 +42,7 @@ class WebsocketDirectivesSpec extends RoutingSpec { WS("http://localhost/", wsClient.flow, List("other", "echo", "greeter")) ~> websocketMultipleProtocolRoute ~> check { - expectWebsocketUpgradeWithProtocol { protocol ⇒ + expectWebSocketUpgradeWithProtocol { protocol ⇒ protocol shouldEqual "echo" wsClient.sendMessage("Peter") @@ -62,7 +62,7 @@ class WebsocketDirectivesSpec extends RoutingSpec { "reject websocket requests if no subprotocol matches" in { WS("http://localhost/", Flow[Message], List("other")) ~> websocketMultipleProtocolRoute ~> check { rejections.collect { - case UnsupportedWebsocketSubprotocolRejection(p) ⇒ p + case UnsupportedWebSocketSubprotocolRejection(p) ⇒ p }.toSet shouldEqual Set("greeter", "echo") } @@ -74,20 +74,20 @@ class WebsocketDirectivesSpec extends RoutingSpec { } "reject non-websocket requests" in { Get("http://localhost/") ~> websocketRoute ~> check { - rejection shouldEqual ExpectedWebsocketRequestRejection + rejection shouldEqual ExpectedWebSocketRequestRejection } Get("http://localhost/") ~> Route.seal(websocketRoute) ~> check { status shouldEqual StatusCodes.BadRequest - responseAs[String] shouldEqual "Expected Websocket Upgrade request" + responseAs[String] shouldEqual "Expected WebSocket Upgrade request" } } } - def websocketRoute = handleWebsocketMessages(greeter) + def websocketRoute = handleWebSocketMessages(greeter) def websocketMultipleProtocolRoute = - handleWebsocketMessagesForProtocol(echo, "echo") ~ - handleWebsocketMessagesForProtocol(greeter, "greeter") + handleWebSocketMessagesForProtocol(echo, "echo") ~ + handleWebSocketMessagesForProtocol(greeter, "greeter") def greeter: Flow[Message, Message, Any] = Flow[Message].mapConcat { diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/util/TupleOpsSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/util/TupleOpsSpec.scala index 276f32b44b..3831896494 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/server/util/TupleOpsSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/server/util/TupleOpsSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server.util diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/unmarshalling/MultipartUnmarshallersSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/unmarshalling/MultipartUnmarshallersSpec.scala index ee29686d45..6011ef9dca 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/unmarshalling/MultipartUnmarshallersSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/unmarshalling/MultipartUnmarshallersSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.unmarshalling @@ -298,7 +298,7 @@ class MultipartUnmarshallersSpec extends FreeSpec with Matchers with BeforeAndAf } } - override def afterAll() = system.shutdown() + override def afterAll() = system.terminate() def haveParts[T <: Multipart](parts: Multipart.BodyPart.Strict*): Matcher[Future[T]] = equal(parts).matcher[Seq[Multipart.BodyPart.Strict]] compose { x ⇒ diff --git a/akka-http-tests/src/test/scala/akka/http/scaladsl/unmarshalling/UnmarshallingSpec.scala b/akka-http-tests/src/test/scala/akka/http/scaladsl/unmarshalling/UnmarshallingSpec.scala index d794935dd9..51d5e269ec 100644 --- a/akka-http-tests/src/test/scala/akka/http/scaladsl/unmarshalling/UnmarshallingSpec.scala +++ b/akka-http-tests/src/test/scala/akka/http/scaladsl/unmarshalling/UnmarshallingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.unmarshalling @@ -23,5 +23,5 @@ class UnmarshallingSpec extends FreeSpec with Matchers with BeforeAndAfterAll wi Unmarshal(HttpEntity("árvíztűrő ütvefúrógép")).to[Array[Char]] should evaluateTo("árvíztűrő ütvefúrógép".toCharArray) } } - override def afterAll() = system.shutdown() + override def afterAll() = system.terminate() } diff --git a/akka-http/build.sbt b/akka-http/build.sbt new file mode 100644 index 0000000000..b73b827a90 --- /dev/null +++ b/akka-http/build.sbt @@ -0,0 +1,13 @@ +import akka._ +import com.typesafe.tools.mima.plugin.MimaKeys +import spray.boilerplate.BoilerplatePlugin._ + +AkkaBuild.defaultSettings +AkkaBuild.experimentalSettings +Formatting.formatSettings +OSGi.http +Dependencies.http +MimaKeys.previousArtifacts := akkaStreamAndHttpPreviousArtifacts("akka-http").value +Boilerplate.settings + +scalacOptions in Compile += "-language:_" diff --git a/akka-http/src/main/boilerplate/akka/http/javadsl/server/Handlers.scala.template b/akka-http/src/main/boilerplate/akka/http/javadsl/server/Handlers.scala.template index 1bcf458e23..7cf3cea93f 100644 --- a/akka-http/src/main/boilerplate/akka/http/javadsl/server/Handlers.scala.template +++ b/akka-http/src/main/boilerplate/akka/http/javadsl/server/Handlers.scala.template @@ -3,7 +3,7 @@ */ package akka.http.javadsl.server -import scala.concurrent.Future +import java.util.concurrent.CompletionStage [..21#/** * A route Handler that handles a request (that is encapsulated in a [[RequestContext]]) @@ -21,7 +21,7 @@ trait Handler1[[#T1#]] extends akka.japi.function.Function2[RequestContext, [#T1 } /** * A route Handler that handles a request (that is encapsulated in a [[RequestContext]]) - * and returns a [[scala.concurrent.Future]] of [[RouteResult]] with the response (or the rejection). + * and returns a [[java.util.concurrent.CompletionStage]] of [[RouteResult]] with the response (or the rejection). * * A route `Handler1` is a convenience class that extends Function of arity `N+1`, * since it needs to pass along the [[RequestContext]] as well, yet for readability @@ -30,8 +30,8 @@ trait Handler1[[#T1#]] extends akka.japi.function.Function2[RequestContext, [#T1 * Use the methods in [[RequestContext]] to create a [[RouteResult]]. * A handler MUST NOT return `null` as the result. */ -trait AsyncHandler1[[#T1#]] extends akka.japi.function.Function2[RequestContext, [#T1#], Future[RouteResult]] { - override def apply(ctx: RequestContext, [#t1: T1#]): Future[RouteResult] +trait AsyncHandler1[[#T1#]] extends akka.japi.function.Function2[RequestContext, [#T1#], CompletionStage[RouteResult]] { + override def apply(ctx: RequestContext, [#t1: T1#]): CompletionStage[RouteResult] }# ] diff --git a/akka-http/src/main/boilerplate/akka/http/javadsl/server/directives/BasicDirectivesBase.scala.template b/akka-http/src/main/boilerplate/akka/http/javadsl/server/directives/BasicDirectivesBase.scala.template index 105aceaa47..5404d0af7d 100644 --- a/akka-http/src/main/boilerplate/akka/http/javadsl/server/directives/BasicDirectivesBase.scala.template +++ b/akka-http/src/main/boilerplate/akka/http/javadsl/server/directives/BasicDirectivesBase.scala.template @@ -8,7 +8,7 @@ import akka.http.javadsl.server.RouteResult import akka.http.javadsl.server.RequestVal import akka.http.javadsl.server.RequestContext import scala.annotation.varargs -import scala.concurrent.Future +import java.util.concurrent.CompletionStage abstract class BasicDirectivesBase { /** INTERNAL API */ @@ -24,12 +24,12 @@ abstract class BasicDirectivesBase { handle(extractions: _*)(ctx => handler(ctx)) /** - * Handles the route using the given function, completing the route once the returned [[scala.concurrent.Future]] completes. + * Handles the route using the given function, completing the route once the returned [[java.util.concurrent.CompletionStage]] completes. * The function MUST NOT return `null`. * * If the `handler` is accessing request values these must be passed to this method in order for extraction to be performed. */ - @varargs def handleWithAsync(handler: akka.japi.function.Function[RequestContext, Future[RouteResult]], extractions: RequestVal[_]*): Route = + @varargs def handleWithAsync(handler: akka.japi.function.Function[RequestContext, CompletionStage[RouteResult]], extractions: RequestVal[_]*): Route = handle(extractions: _*)(ctx => ctx.completeWith(handler(ctx))) @@ -45,7 +45,7 @@ abstract class BasicDirectivesBase { handle([#v1#])(ctx => handler(ctx, [#v1.get(ctx)#])) /** - * Handles the route using the given function, completing the route once the returned [[scala.concurrent.Future]] completes. + * Handles the route using the given function, completing the route once the returned [[java.util.concurrent.CompletionStage]] completes. * The function MUST NOT return `null`. * * For convenience, using Java 8 lambda expressions as the `handler` function is recommended. @@ -53,7 +53,7 @@ abstract class BasicDirectivesBase { * [[akka.japi.function.Function2]] should prove to be useful, as it matches naming-wise with the number of * handled request values. */ - def handleWithAsync1[[#T1#]]([#v1: RequestVal[T1]#], handler: akka.japi.function.Function2[RequestContext, [#T1#], Future[RouteResult]]): Route = + def handleWithAsync1[[#T1#]]([#v1: RequestVal[T1]#], handler: akka.japi.function.Function2[RequestContext, [#T1#], CompletionStage[RouteResult]]): Route = handle([#v1#])(ctx => ctx.completeWith(handler(ctx, [#v1.get(ctx)#])))# ] diff --git a/akka-http/src/main/java/akka/http/javadsl/server/AbstractDirective.java b/akka-http/src/main/java/akka/http/javadsl/server/AbstractDirective.java index 906fe7e14f..5f3e344937 100644 --- a/akka-http/src/main/java/akka/http/javadsl/server/AbstractDirective.java +++ b/akka-http/src/main/java/akka/http/javadsl/server/AbstractDirective.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server; diff --git a/akka-http/src/main/java/akka/http/javadsl/server/Coder.java b/akka-http/src/main/java/akka/http/javadsl/server/Coder.java index 1c929961dd..3f0f10b077 100644 --- a/akka-http/src/main/java/akka/http/javadsl/server/Coder.java +++ b/akka-http/src/main/java/akka/http/javadsl/server/Coder.java @@ -1,15 +1,17 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server; +import java.util.concurrent.CompletionStage; + import akka.http.scaladsl.coding.Deflate$; import akka.http.scaladsl.coding.Gzip$; import akka.http.scaladsl.coding.NoCoding$; import akka.stream.Materializer; import akka.util.ByteString; -import scala.concurrent.Future; +import scala.compat.java8.FutureConverters; /** * A coder is an implementation of the predefined encoders/decoders defined for HTTP. @@ -26,8 +28,8 @@ public enum Coder { public ByteString encode(ByteString input) { return underlying.encode(input); } - public Future decode(ByteString input, Materializer mat) { - return underlying.decode(input, mat); + public CompletionStage decode(ByteString input, Materializer mat) { + return FutureConverters.toJava(underlying.decode(input, mat)); } public akka.http.scaladsl.coding.Coder _underlyingScalaCoder() { return underlying; diff --git a/akka-http/src/main/java/akka/http/javadsl/server/Directive.java b/akka-http/src/main/java/akka/http/javadsl/server/Directive.java index 8612754f03..80c9bedcef 100644 --- a/akka-http/src/main/java/akka/http/javadsl/server/Directive.java +++ b/akka-http/src/main/java/akka/http/javadsl/server/Directive.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server; diff --git a/akka-http/src/main/scala/akka/http/impl/server/CookieImpl.scala b/akka-http/src/main/scala/akka/http/impl/server/CookieImpl.scala index 5b0623fcfc..2b09542526 100644 --- a/akka-http/src/main/scala/akka/http/impl/server/CookieImpl.scala +++ b/akka-http/src/main/scala/akka/http/impl/server/CookieImpl.scala @@ -1,29 +1,30 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.server +import java.util.Optional + import akka.http.javadsl.model.headers.HttpCookie import akka.http.javadsl.server.values.Cookie import akka.http.javadsl.server.{ Directive, Directives, RequestVal } import akka.http.scaladsl.server.Directive1 import akka.http.scaladsl.server.directives.CookieDirectives._ -import akka.japi.Option import akka.http.impl.util.JavaMapping.Implicits._ -case class CookieImpl(name: String, domain: Option[String] = None, path: Option[String] = None) extends Cookie { - def withDomain(domain: String): Cookie = copy(domain = Option.some(domain)) - def withPath(path: String): Cookie = copy(path = Option.some(path)) +case class CookieImpl(name: String, domain: Optional[String] = Optional.empty[String], path: Optional[String] = Optional.empty[String]) extends Cookie { + def withDomain(domain: String): Cookie = copy(domain = Optional.of(domain)) + def withPath(path: String): Cookie = copy(path = Optional.of(path)) val value: RequestVal[String] = new StandaloneExtractionImpl[String] { def directive: Directive1[String] = cookie(name).map(_.value) } - def optionalValue(): RequestVal[Option[String]] = - new StandaloneExtractionImpl[Option[String]] { - def directive: Directive1[Option[String]] = optionalCookie(name).map(_.map(_.value).asJava) + def optionalValue(): RequestVal[Optional[String]] = + new StandaloneExtractionImpl[Optional[String]] { + def directive: Directive1[Optional[String]] = optionalCookie(name).map(_.map(_.value).asJava) } def set(value: String): Directive = diff --git a/akka-http/src/main/scala/akka/http/impl/server/CustomRejectionWrapper.scala b/akka-http/src/main/scala/akka/http/impl/server/CustomRejectionWrapper.scala index 70dce92ca4..1a5e4683b1 100644 --- a/akka-http/src/main/scala/akka/http/impl/server/CustomRejectionWrapper.scala +++ b/akka-http/src/main/scala/akka/http/impl/server/CustomRejectionWrapper.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.server diff --git a/akka-http/src/main/scala/akka/http/impl/server/ExtractionImpl.scala b/akka-http/src/main/scala/akka/http/impl/server/ExtractionImpl.scala index a9d88a4e13..50fabd4395 100644 --- a/akka-http/src/main/scala/akka/http/impl/server/ExtractionImpl.scala +++ b/akka-http/src/main/scala/akka/http/impl/server/ExtractionImpl.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.server diff --git a/akka-http/src/main/scala/akka/http/impl/server/FormFieldImpl.scala b/akka-http/src/main/scala/akka/http/impl/server/FormFieldImpl.scala index 03e1c204f5..ca99ebbd8e 100644 --- a/akka-http/src/main/scala/akka/http/impl/server/FormFieldImpl.scala +++ b/akka-http/src/main/scala/akka/http/impl/server/FormFieldImpl.scala @@ -1,19 +1,22 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.server +import java.util.Optional + import akka.http.javadsl.server.RequestVal import akka.http.javadsl.server.values.FormField import akka.http.scaladsl.common.{ StrictForm, NameUnmarshallerReceptacle, NameReceptacle } import akka.http.scaladsl.unmarshalling._ -import akka.japi.{ Option ⇒ JOption } import scala.reflect.ClassTag import akka.http.scaladsl.server.directives.FormFieldDirectives._ import akka.http.scaladsl.server.{ Directives, Directive1 } +import scala.compat.java8.OptionConverters._ + /** * INTERNAL API */ @@ -27,19 +30,19 @@ private[http] class FormFieldImpl[T, U](receptacle: NameReceptacle[T])( formField(receptacle).map(conv) } - def optional: RequestVal[JOption[U]] = - new StandaloneExtractionImpl[JOption[U]] { - def directive: Directive1[JOption[U]] = optionalDirective + def optional: RequestVal[Optional[U]] = + new StandaloneExtractionImpl[Optional[U]] { + def directive: Directive1[Optional[U]] = optionalDirective } - private def optionalDirective: Directive1[JOption[U]] = + private def optionalDirective: Directive1[Optional[U]] = extractMaterializer.flatMap { implicit fm ⇒ - formField(receptacle.?).map(v ⇒ JOption.fromScalaOption(v.map(conv))) + formField(receptacle.?).map(v ⇒ v.map(conv).asJava) } def withDefault(defaultValue: U): RequestVal[U] = new StandaloneExtractionImpl[U] { - def directive: Directive1[U] = optionalDirective.map(_.getOrElse(defaultValue)) + def directive: Directive1[U] = optionalDirective.map(_.orElse(defaultValue)) } } object FormFieldImpl { diff --git a/akka-http/src/main/scala/akka/http/impl/server/HeaderImpl.scala b/akka-http/src/main/scala/akka/http/impl/server/HeaderImpl.scala index 0ff24e6cd0..1882eddf3d 100644 --- a/akka-http/src/main/scala/akka/http/impl/server/HeaderImpl.scala +++ b/akka-http/src/main/scala/akka/http/impl/server/HeaderImpl.scala @@ -1,9 +1,11 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.server +import java.util.Optional + import akka.http.javadsl.model.HttpHeader import akka.http.javadsl.server.RequestVal import akka.http.javadsl.server.values.Header @@ -12,6 +14,7 @@ import akka.http.scaladsl.server._ import akka.http.scaladsl.server.directives.BasicDirectives._ import akka.http.scaladsl.server.directives.RouteDirectives._ +import scala.compat.java8.OptionConverters._ import scala.reflect.ClassTag /** @@ -38,9 +41,9 @@ private[http] object HeaderImpl { def directive: Directive1[U] = instanceDirective } - def optionalInstance(): RequestVal[Option[U]] = - new StandaloneExtractionImpl[Option[U]] { - def directive: Directive1[Option[U]] = optionalDirective(uClassTag) + def optionalInstance(): RequestVal[Optional[U]] = + new StandaloneExtractionImpl[Optional[U]] { + def directive: Directive1[Optional[U]] = optionalDirective(uClassTag).map(_.asJava) } def value(): RequestVal[String] = @@ -48,9 +51,9 @@ private[http] object HeaderImpl { def directive: Directive1[String] = instanceDirective.map(_.value) } - def optionalValue(): RequestVal[Option[String]] = - new StandaloneExtractionImpl[Option[String]] { - def directive: Directive1[Option[String]] = optionalDirective(uClassTag).map(_.map(_.value)) + def optionalValue(): RequestVal[Optional[String]] = + new StandaloneExtractionImpl[Optional[String]] { + def directive: Directive1[Optional[String]] = optionalDirective(uClassTag).map(_.map(_.value).asJava) } }.asInstanceOf[Header[T]] // undeclared covariance } diff --git a/akka-http/src/main/scala/akka/http/impl/server/MarshallerImpl.scala b/akka-http/src/main/scala/akka/http/impl/server/MarshallerImpl.scala index 3690ca5fc2..26a1e4f6ad 100644 --- a/akka-http/src/main/scala/akka/http/impl/server/MarshallerImpl.scala +++ b/akka-http/src/main/scala/akka/http/impl/server/MarshallerImpl.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.server diff --git a/akka-http/src/main/scala/akka/http/impl/server/ParameterImpl.scala b/akka-http/src/main/scala/akka/http/impl/server/ParameterImpl.scala index 895a0f45cf..06770056a7 100644 --- a/akka-http/src/main/scala/akka/http/impl/server/ParameterImpl.scala +++ b/akka-http/src/main/scala/akka/http/impl/server/ParameterImpl.scala @@ -1,20 +1,23 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.server +import java.util.Optional + import akka.http.javadsl.server.RequestVal import akka.http.javadsl.server.values.Parameter import akka.http.scaladsl.common.{ NameUnmarshallerReceptacle, NameReceptacle } import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.unmarshalling._ -import akka.japi.{ Option ⇒ JOption } import scala.reflect.ClassTag import akka.http.scaladsl.server.directives.ParameterDirectives import akka.http.scaladsl.server.Directive1 +import scala.compat.java8.OptionConverters._ + /** * INTERNAL API */ @@ -25,19 +28,19 @@ private[http] class ParameterImpl[T, U](receptacle: NameReceptacle[T])( import ParameterDirectives._ def directive: Directive1[U] = parameter(receptacle).map(conv) - def optional: RequestVal[JOption[U]] = - new StandaloneExtractionImpl[JOption[U]] { - def directive: Directive1[JOption[U]] = optionalDirective + def optional: RequestVal[Optional[U]] = + new StandaloneExtractionImpl[Optional[U]] { + def directive: Directive1[Optional[U]] = optionalDirective } - private def optionalDirective: Directive1[JOption[U]] = + private def optionalDirective: Directive1[Optional[U]] = extractMaterializer.flatMap { implicit fm ⇒ - parameter(receptacle.?).map(v ⇒ JOption.fromScalaOption(v.map(conv))) + parameter(receptacle.?).map(v ⇒ v.map(conv).asJava) } def withDefault(defaultValue: U): RequestVal[U] = new StandaloneExtractionImpl[U] { - def directive: Directive1[U] = optionalDirective.map(_.getOrElse(defaultValue)) + def directive: Directive1[U] = optionalDirective.map(_.orElse(defaultValue)) } } private[http] object ParameterImpl { diff --git a/akka-http/src/main/scala/akka/http/impl/server/PathMatcherImpl.scala b/akka-http/src/main/scala/akka/http/impl/server/PathMatcherImpl.scala index 37595c1821..91f7bc5e0e 100644 --- a/akka-http/src/main/scala/akka/http/impl/server/PathMatcherImpl.scala +++ b/akka-http/src/main/scala/akka/http/impl/server/PathMatcherImpl.scala @@ -1,19 +1,22 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.server +import java.util.Optional + import akka.http.javadsl.server.values.PathMatcher -import akka.japi.Option import scala.reflect.ClassTag import akka.http.scaladsl.server.{ PathMatcher ⇒ ScalaPathMatcher } +import scala.compat.java8.OptionConverters + /** * INTERNAL API */ private[http] class PathMatcherImpl[T: ClassTag](val matcher: ScalaPathMatcher[Tuple1[T]]) extends ExtractionImpl[T] with PathMatcher[T] { - def optional: PathMatcher[Option[T]] = new PathMatcherImpl[Option[T]](matcher.?.map(Option.fromScalaOption)) + def optional: PathMatcher[Optional[T]] = new PathMatcherImpl[Optional[T]](matcher.?.map(OptionConverters.toJava)) } \ No newline at end of file diff --git a/akka-http/src/main/scala/akka/http/impl/server/RejectionHandlerWrapper.scala b/akka-http/src/main/scala/akka/http/impl/server/RejectionHandlerWrapper.scala index ecf631a6cf..0f48b6d188 100644 --- a/akka-http/src/main/scala/akka/http/impl/server/RejectionHandlerWrapper.scala +++ b/akka-http/src/main/scala/akka/http/impl/server/RejectionHandlerWrapper.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.server @@ -61,10 +61,10 @@ private[http] class RejectionHandlerWrapper(javaHandler: server.RejectionHandler handleAuthorizationFailedRejection(ctx) case MissingCookieRejection(cookieName) ⇒ handleMissingCookieRejection(ctx, cookieName) - case ExpectedWebsocketRequestRejection ⇒ - handleExpectedWebsocketRequestRejection(ctx) - case UnsupportedWebsocketSubprotocolRejection(supportedProtocol) ⇒ - handleUnsupportedWebsocketSubprotocolRejection(ctx, supportedProtocol) + case ExpectedWebSocketRequestRejection ⇒ + handleExpectedWebSocketRequestRejection(ctx) + case UnsupportedWebSocketSubprotocolRejection(supportedProtocol) ⇒ + handleUnsupportedWebSocketSubprotocolRejection(ctx, supportedProtocol) case ValidationRejection(message, cause) ⇒ handleValidationRejection(ctx, message, cause.orNull) diff --git a/akka-http/src/main/scala/akka/http/impl/server/RequestContextImpl.scala b/akka-http/src/main/scala/akka/http/impl/server/RequestContextImpl.scala index c40cbf0e43..5f92455b3a 100644 --- a/akka-http/src/main/scala/akka/http/impl/server/RequestContextImpl.scala +++ b/akka-http/src/main/scala/akka/http/impl/server/RequestContextImpl.scala @@ -1,18 +1,20 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.server import akka.http.javadsl.model.ContentType +import akka.http.javadsl.settings.{ RoutingSettings, ParserSettings } import akka.http.scaladsl.model.HttpEntity import akka.stream.Materializer - -import scala.concurrent.{ ExecutionContext, Future } +import scala.concurrent.{ ExecutionContextExecutor, Future } import akka.http.javadsl.{ model ⇒ jm } import akka.http.impl.util.JavaMapping.Implicits._ import akka.http.scaladsl.server.{ RequestContext ⇒ ScalaRequestContext } import akka.http.javadsl.server._ +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters._ /** * INTERNAL API @@ -28,6 +30,7 @@ private[http] final case class RequestContextImpl(underlying: ScalaRequestContex futureResult.flatMap { case r: RouteResultImpl ⇒ r.underlying }(executionContext()) + def completeWith(futureResult: CompletionStage[RouteResult]): RouteResult = completeWith(futureResult.toScala) def complete(text: String): RouteResult = underlying.complete(text) def complete(contentType: ContentType.NonBinary, text: String): RouteResult = underlying.complete(HttpEntity(contentType.asScala, text)) @@ -40,7 +43,7 @@ private[http] final case class RequestContextImpl(underlying: ScalaRequestContex case MarshallerImpl(m) ⇒ implicit val marshaller = m(underlying.executionContext) underlying.complete(value) - case _ ⇒ throw new IllegalArgumentException("Unsupported marshaller: $marshaller") + case _ ⇒ throw new IllegalArgumentException(s"Unsupported marshaller: $marshaller") } def complete(response: jm.HttpResponse): RouteResult = underlying.complete(response.asScala) @@ -48,6 +51,9 @@ private[http] final case class RequestContextImpl(underlying: ScalaRequestContex def reject(customRejection: CustomRejection): RouteResult = underlying.reject(CustomRejectionWrapper(customRejection)) - def executionContext(): ExecutionContext = underlying.executionContext + def executionContext(): ExecutionContextExecutor = underlying.executionContext def materializer(): Materializer = underlying.materializer + + override def settings: RoutingSettings = underlying.settings + override def parserSettings: ParserSettings = underlying.parserSettings } diff --git a/akka-http/src/main/scala/akka/http/impl/server/RouteImplementation.scala b/akka-http/src/main/scala/akka/http/impl/server/RouteImplementation.scala index 68fe36a603..deec2ff961 100644 --- a/akka-http/src/main/scala/akka/http/impl/server/RouteImplementation.scala +++ b/akka-http/src/main/scala/akka/http/impl/server/RouteImplementation.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.server @@ -8,7 +8,6 @@ import akka.http.impl.util.JavaMapping import akka.http.javadsl.server.values.{ PathMatcher, BasicCredentials, OAuth2Credentials } import akka.http.scaladsl.model.StatusCodes.Redirection import akka.http.scaladsl.server.util.TupleOps.Join - import scala.language.implicitConversions import scala.annotation.tailrec import scala.collection.immutable @@ -23,6 +22,10 @@ import akka.http.scaladsl.server import akka.http.javadsl.server._ import RouteStructure._ +import scala.compat.java8.FutureConverters._ +import scala.compat.java8.OptionConverters._ +import akka.dispatch.ExecutionContexts.sameThreadExecutionContext + /** * INTERNAL API */ @@ -47,8 +50,8 @@ private[http] object ExtractionMap { def addAll(values: Map[RequestVal[_], Any]): ExtractionMap = ExtractionMap(map ++ values) - // CustomHeader methods - override def suppressRendering: Boolean = true + def renderInRequests = false + def renderInResponses = false def name(): String = "ExtractedValues" def value(): String = "" } @@ -94,7 +97,7 @@ private[http] object RouteImplementation extends Directives with server.RouteCon } } - authenticator.authenticate(javaCreds) + authenticator.authenticate(javaCreds).toScala.map(_.asScala)(akka.dispatch.ExecutionContexts.sameThreadExecutionContext) }).flatMap { user ⇒ addExtraction(authenticator.asInstanceOf[RequestVal[Any]], user) } @@ -117,7 +120,7 @@ private[http] object RouteImplementation extends Directives with server.RouteCon } } - authenticator.authenticate(javaCreds) + authenticator.authenticate(javaCreds).toScala.map(_.asScala)(sameThreadExecutionContext) }).flatMap { user ⇒ addExtraction(authenticator.asInstanceOf[RequestVal[Any]], user) } @@ -158,7 +161,7 @@ private[http] object RouteImplementation extends Directives with server.RouteCon def resolve(fileName: String): ContentType = ContentTypeResolver.Default(fileName) })) - case HandleWebsocketMessages(handler) ⇒ handleWebsocketMessages(JavaMapping.toScala(handler)) + case HandleWebSocketMessages(handler) ⇒ handleWebSocketMessages(JavaMapping.toScala(handler)) case Redirect(uri, code) ⇒ redirect(uri.asScala, code.asScala.asInstanceOf[Redirection]) // guarded by require in Redirect case dyn: DynamicDirectiveRoute1[t1Type] ⇒ diff --git a/akka-http/src/main/scala/akka/http/impl/server/RouteResultImpl.scala b/akka-http/src/main/scala/akka/http/impl/server/RouteResultImpl.scala index 0bc2e02d04..6c1dda1c01 100644 --- a/akka-http/src/main/scala/akka/http/impl/server/RouteResultImpl.scala +++ b/akka-http/src/main/scala/akka/http/impl/server/RouteResultImpl.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.server diff --git a/akka-http/src/main/scala/akka/http/impl/server/RouteStructure.scala b/akka-http/src/main/scala/akka/http/impl/server/RouteStructure.scala index 6d7bd3a120..fe9be41145 100644 --- a/akka-http/src/main/scala/akka/http/impl/server/RouteStructure.scala +++ b/akka-http/src/main/scala/akka/http/impl/server/RouteStructure.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.server @@ -84,7 +84,7 @@ private[http] object RouteStructure { case class RangeSupport()(val innerRoute: Route, val moreInnerRoutes: immutable.Seq[Route]) extends DirectiveRoute - case class HandleWebsocketMessages(handler: Flow[Message, Message, Any]) extends Route + case class HandleWebSocketMessages(handler: Flow[Message, Message, Any]) extends Route case class SetCookie(cookie: HttpCookie)(val innerRoute: Route, val moreInnerRoutes: immutable.Seq[Route]) extends DirectiveRoute case class DeleteCookie(name: String, domain: Option[String], path: Option[String])(val innerRoute: Route, val moreInnerRoutes: immutable.Seq[Route]) extends DirectiveRoute diff --git a/akka-http/src/main/scala/akka/http/impl/server/StandaloneExtractionImpl.scala b/akka-http/src/main/scala/akka/http/impl/server/StandaloneExtractionImpl.scala index ef49b0e09a..2ec76f6d5c 100644 --- a/akka-http/src/main/scala/akka/http/impl/server/StandaloneExtractionImpl.scala +++ b/akka-http/src/main/scala/akka/http/impl/server/StandaloneExtractionImpl.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.server diff --git a/akka-http/src/main/scala/akka/http/impl/server/UnmarshallerImpl.scala b/akka-http/src/main/scala/akka/http/impl/server/UnmarshallerImpl.scala index bbcbbdd193..7472e24a99 100644 --- a/akka-http/src/main/scala/akka/http/impl/server/UnmarshallerImpl.scala +++ b/akka-http/src/main/scala/akka/http/impl/server/UnmarshallerImpl.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.server diff --git a/akka-http/src/main/scala/akka/http/impl/server/Util.scala b/akka-http/src/main/scala/akka/http/impl/server/Util.scala index fa03d0e2fd..0b31de1f1e 100644 --- a/akka-http/src/main/scala/akka/http/impl/server/Util.scala +++ b/akka-http/src/main/scala/akka/http/impl/server/Util.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.impl.server diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/CustomRejection.scala b/akka-http/src/main/scala/akka/http/javadsl/server/CustomRejection.scala index 05e64ca6dc..73e59882d8 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/CustomRejection.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/CustomRejection.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/CustomRequestVal.scala b/akka-http/src/main/scala/akka/http/javadsl/server/CustomRequestVal.scala index 0e85c9b67c..7ec91c0d8e 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/CustomRequestVal.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/CustomRequestVal.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/Directives.scala b/akka-http/src/main/scala/akka/http/javadsl/server/Directives.scala index a06592c0b2..9fb09ce93d 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/Directives.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/Directives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server @@ -9,7 +9,7 @@ import scala.collection.immutable import scala.annotation.varargs import akka.http.javadsl.model.HttpMethods -abstract class AllDirectives extends WebsocketDirectives +abstract class AllDirectives extends WebSocketDirectives /** * diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/ExceptionHandler.scala b/akka-http/src/main/scala/akka/http/javadsl/server/ExceptionHandler.scala index 774de380f1..d973fa8f98 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/ExceptionHandler.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/ExceptionHandler.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/Handler.scala b/akka-http/src/main/scala/akka/http/javadsl/server/Handler.scala index b95e1c3383..0d70b20b02 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/Handler.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/Handler.scala @@ -1,9 +1,9 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server -import scala.concurrent.Future +import java.util.concurrent.CompletionStage /** * A route Handler that handles a request (that is encapsulated in a [[RequestContext]]) @@ -22,12 +22,11 @@ trait Handler extends akka.japi.function.Function[RequestContext, RouteResult] { /** * A route Handler that handles a request (that is encapsulated in a [[RequestContext]]) - * and returns a [[scala.concurrent.Future]] of [[RouteResult]] with the response (or the rejection). + * and returns a [[java.util.concurrent.CompletionStage]] of [[RouteResult]] with the response (or the rejection). * * Use the methods in [[RequestContext]] to create a [[RouteResult]]. * A handler MUST NOT return `null` as the result. */ -trait AsyncHandler extends akka.japi.function.Function[RequestContext, Future[RouteResult]] { - override def apply(ctx: RequestContext): Future[RouteResult] +trait AsyncHandler extends akka.japi.function.Function[RequestContext, CompletionStage[RouteResult]] { + override def apply(ctx: RequestContext): CompletionStage[RouteResult] } - diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/HttpApp.scala b/akka-http/src/main/scala/akka/http/javadsl/server/HttpApp.scala index 45fe666e31..8f89f4a10b 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/HttpApp.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/HttpApp.scala @@ -1,12 +1,12 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server -import scala.concurrent.Future import akka.actor.ActorSystem import akka.http.scaladsl.Http.ServerBinding +import java.util.concurrent.CompletionStage /** * A convenience class to derive from to get everything from HttpService and Directives into scope. @@ -22,6 +22,6 @@ abstract class HttpApp * Starts an HTTP server on the given interface and port. Creates the route by calling the * user-implemented [[createRoute]] method and uses the route to handle requests of the server. */ - def bindRoute(interface: String, port: Int, system: ActorSystem): Future[ServerBinding] = + def bindRoute(interface: String, port: Int, system: ActorSystem): CompletionStage[ServerBinding] = bindRoute(interface, port, createRoute(), system) } diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/HttpService.scala b/akka-http/src/main/scala/akka/http/javadsl/server/HttpService.scala index 5e8e9a80e5..e5f004b9af 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/HttpService.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/HttpService.scala @@ -1,22 +1,24 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server -import scala.concurrent.Future import akka.actor.ActorSystem import akka.http.scaladsl.{ server, Http } import akka.http.scaladsl.Http.ServerBinding +import akka.http.scaladsl.server.RouteResult import akka.http.impl.server.RouteImplementation import akka.stream.{ ActorMaterializer, Materializer } import akka.stream.scaladsl.{ Keep, Sink } +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters._ trait HttpServiceBase { /** * Starts a server on the given interface and port and uses the route to handle incoming requests. */ - def bindRoute(interface: String, port: Int, route: Route, system: ActorSystem): Future[ServerBinding] = { + def bindRoute(interface: String, port: Int, route: Route, system: ActorSystem): CompletionStage[ServerBinding] = { implicit val sys = system implicit val materializer = ActorMaterializer() handleConnectionsWithRoute(interface, port, route, system, materializer) @@ -25,19 +27,19 @@ trait HttpServiceBase { /** * Starts a server on the given interface and port and uses the route to handle incoming requests. */ - def bindRoute(interface: String, port: Int, route: Route, system: ActorSystem, materializer: Materializer): Future[ServerBinding] = + def bindRoute(interface: String, port: Int, route: Route, system: ActorSystem, materializer: Materializer): CompletionStage[ServerBinding] = handleConnectionsWithRoute(interface, port, route, system, materializer) /** * Uses the route to handle incoming connections and requests for the ServerBinding. */ - def handleConnectionsWithRoute(interface: String, port: Int, route: Route, system: ActorSystem, materializer: Materializer): Future[ServerBinding] = { + def handleConnectionsWithRoute(interface: String, port: Int, route: Route, system: ActorSystem, materializer: Materializer): CompletionStage[ServerBinding] = { implicit val s = system implicit val m = materializer import system.dispatcher val r: server.Route = RouteImplementation(route) - Http(system).bind(interface, port).toMat(Sink.foreach(_.handleWith(r)))(Keep.left).run()(materializer) + Http(system).bind(interface, port).toMat(Sink.foreach(_.handleWith(RouteResult.route2HandlerFlow(r))))(Keep.left).run()(materializer).toJava } } diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/Marshaller.scala b/akka-http/src/main/scala/akka/http/javadsl/server/Marshaller.scala index b6322cf601..63586c8ef9 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/Marshaller.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/Marshaller.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/Marshallers.scala b/akka-http/src/main/scala/akka/http/javadsl/server/Marshallers.scala index 1189c80537..2944278b25 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/Marshallers.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/Marshallers.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/RejectionHandler.scala b/akka-http/src/main/scala/akka/http/javadsl/server/RejectionHandler.scala index 85e2738451..61b796c62f 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/RejectionHandler.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/RejectionHandler.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server @@ -157,13 +157,13 @@ abstract class RejectionHandler { /** * Callback called to handle rejection created when a websocket request was expected but none was found. */ - def handleExpectedWebsocketRequestRejection(ctx: RequestContext): RouteResult = passRejection() + def handleExpectedWebSocketRequestRejection(ctx: RequestContext): RouteResult = passRejection() /** * Callback called to handle rejection created when a websocket request was not handled because none * of the given subprotocols was supported. */ - def handleUnsupportedWebsocketSubprotocolRejection(ctx: RequestContext, supportedProtocol: String): RouteResult = passRejection() + def handleUnsupportedWebSocketSubprotocolRejection(ctx: RequestContext, supportedProtocol: String): RouteResult = passRejection() /** * Callback called to handle rejection created by the `validation` directive as well as for `IllegalArgumentExceptions` diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/RequestContext.scala b/akka-http/src/main/scala/akka/http/javadsl/server/RequestContext.scala index e2b41703e8..9c88068e5a 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/RequestContext.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/RequestContext.scala @@ -1,12 +1,14 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server -import scala.concurrent.{ ExecutionContext, Future } +import scala.concurrent.ExecutionContextExecutor import akka.http.javadsl.model._ +import akka.http.javadsl.settings.{ RoutingSettings, ParserSettings } import akka.stream.Materializer +import java.util.concurrent.CompletionStage /** * The RequestContext represents the state of the request while it is routed through @@ -24,11 +26,21 @@ trait RequestContext { def unmatchedPath: String /** Returns the ExecutionContext of this RequestContext */ - def executionContext(): ExecutionContext + def executionContext(): ExecutionContextExecutor /** Returns the Materializer of this RequestContext */ def materializer(): Materializer + /** + * The default RoutingSettings to be used for configuring directives. + */ + def settings: RoutingSettings + + /** + * The default ParserSettings to be used for configuring directives. + */ + def parserSettings: ParserSettings + /** * Completes the request with a value of type T and marshals it using the given * marshaller. @@ -63,7 +75,7 @@ trait RequestContext { /** * Defers completion of the request */ - def completeWith(futureResult: Future[RouteResult]): RouteResult + def completeWith(futureResult: CompletionStage[RouteResult]): RouteResult /** * Explicitly rejects the request as not found. Other route alternatives @@ -75,4 +87,4 @@ trait RequestContext { * Reject this request with an application-defined CustomRejection. */ def reject(customRejection: CustomRejection): RouteResult -} \ No newline at end of file +} diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/RequestVal.scala b/akka-http/src/main/scala/akka/http/javadsl/server/RequestVal.scala index 8c8d05ae4b..d4cef3333b 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/RequestVal.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/RequestVal.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/RequestVals.scala b/akka-http/src/main/scala/akka/http/javadsl/server/RequestVals.scala index 67c3ef687a..e741b6ceee 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/RequestVals.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/RequestVals.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/Route.scala b/akka-http/src/main/scala/akka/http/javadsl/server/Route.scala index 6c68213120..0a93395acd 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/Route.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/Route.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/RouteResult.scala b/akka-http/src/main/scala/akka/http/javadsl/server/RouteResult.scala index 223489b8cd..97492d57d0 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/RouteResult.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/RouteResult.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/Unmarshaller.scala b/akka-http/src/main/scala/akka/http/javadsl/server/Unmarshaller.scala index 8e2b5e43a3..dda22441cf 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/Unmarshaller.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/Unmarshaller.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/Unmarshallers.scala b/akka-http/src/main/scala/akka/http/javadsl/server/Unmarshallers.scala index bcc9ee4756..76790ebc02 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/Unmarshallers.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/Unmarshallers.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/directives/BasicDirectives.scala b/akka-http/src/main/scala/akka/http/javadsl/server/directives/BasicDirectives.scala index 0db54eed84..6a11b4fe3a 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/directives/BasicDirectives.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/directives/BasicDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.directives @@ -13,6 +13,8 @@ import akka.http.javadsl.server._ import scala.annotation.varargs import scala.concurrent.Future +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters._ abstract class BasicDirectives extends BasicDirectivesBase { /** @@ -152,16 +154,22 @@ abstract class BasicDirectives extends BasicDirectivesBase { res } def returnTypeMatches(method: Method): Boolean = - method.getReturnType == classOf[RouteResult] || returnsFuture(method) + method.getReturnType == classOf[RouteResult] || returnsFuture(method) || returnsCompletionStage(method) def returnsFuture(method: Method): Boolean = method.getReturnType == classOf[Future[_]] && method.getGenericReturnType.isInstanceOf[ParameterizedType] && method.getGenericReturnType.asInstanceOf[ParameterizedType].getActualTypeArguments()(0) == classOf[RouteResult] + def returnsCompletionStage(method: Method): Boolean = + method.getReturnType == classOf[CompletionStage[_]] && + method.getGenericReturnType.isInstanceOf[ParameterizedType] && + method.getGenericReturnType.asInstanceOf[ParameterizedType].getActualTypeArguments()(0) == classOf[RouteResult] + /** Makes sure both RouteResult and Future[RouteResult] are acceptable result types. */ def adaptResult(method: Method): (RequestContext, AnyRef) ⇒ RouteResult = - if (returnsFuture(method)) (ctx, v) ⇒ ctx.completeWith(v.asInstanceOf[Future[RouteResult]]) + if (returnsFuture(method)) (ctx, v) ⇒ ctx.completeWith(v.asInstanceOf[Future[RouteResult]].toJava) + else if (returnsCompletionStage(method)) (ctx, v) => ctx.completeWith(v.asInstanceOf[CompletionStage[RouteResult]]) else (_, v) ⇒ v.asInstanceOf[RouteResult] val IdentityAdaptor: (RequestContext, Seq[Any]) ⇒ Seq[Any] = (_, ps) ⇒ ps @@ -192,4 +200,4 @@ abstract class BasicDirectives extends BasicDirectivesBase { handle(extractions: _*)(ctx ⇒ method(ctx, extractions.map(_.get(ctx)))) } -} \ No newline at end of file +} diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/directives/CacheConditionDirectives.scala b/akka-http/src/main/scala/akka/http/javadsl/server/directives/CacheConditionDirectives.scala index d4c5f90396..5e74b63530 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/directives/CacheConditionDirectives.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/directives/CacheConditionDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/directives/CodingDirectives.scala b/akka-http/src/main/scala/akka/http/javadsl/server/directives/CodingDirectives.scala index 743143f99f..8ba5571796 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/directives/CodingDirectives.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/directives/CodingDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/directives/CookieDirectives.scala b/akka-http/src/main/scala/akka/http/javadsl/server/directives/CookieDirectives.scala index 948c8a9cd3..500b9cd104 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/directives/CookieDirectives.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/directives/CookieDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.directives diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/directives/ExecutionDirectives.scala b/akka-http/src/main/scala/akka/http/javadsl/server/directives/ExecutionDirectives.scala index 144704b526..9065e49ff0 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/directives/ExecutionDirectives.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/directives/ExecutionDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/directives/FileAndResourceDirectives.scala b/akka-http/src/main/scala/akka/http/javadsl/server/directives/FileAndResourceDirectives.scala index a4cd59be95..7d5c2f78fd 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/directives/FileAndResourceDirectives.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/directives/FileAndResourceDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.directives diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/directives/HostDirectives.scala b/akka-http/src/main/scala/akka/http/javadsl/server/directives/HostDirectives.scala index de905a0453..55f38884d1 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/directives/HostDirectives.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/directives/HostDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/directives/MethodDirectives.scala b/akka-http/src/main/scala/akka/http/javadsl/server/directives/MethodDirectives.scala index 58d3871648..d4438b5186 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/directives/MethodDirectives.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/directives/MethodDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.directives diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/directives/MiscDirectives.scala b/akka-http/src/main/scala/akka/http/javadsl/server/directives/MiscDirectives.scala index 275e80f305..270726ca70 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/directives/MiscDirectives.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/directives/MiscDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/directives/PathDirectives.scala b/akka-http/src/main/scala/akka/http/javadsl/server/directives/PathDirectives.scala index 12be8a3e22..b99cc00b5a 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/directives/PathDirectives.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/directives/PathDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/directives/RangeDirectives.scala b/akka-http/src/main/scala/akka/http/javadsl/server/directives/RangeDirectives.scala index 70dcdc4f66..7df32a6736 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/directives/RangeDirectives.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/directives/RangeDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/directives/SchemeDirectives.scala b/akka-http/src/main/scala/akka/http/javadsl/server/directives/SchemeDirectives.scala index 7678ae04f3..41ea4fda93 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/directives/SchemeDirectives.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/directives/SchemeDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.directives diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/directives/WebsocketDirectives.scala b/akka-http/src/main/scala/akka/http/javadsl/server/directives/WebSocketDirectives.scala similarity index 50% rename from akka-http/src/main/scala/akka/http/javadsl/server/directives/WebsocketDirectives.scala rename to akka-http/src/main/scala/akka/http/javadsl/server/directives/WebSocketDirectives.scala index 12d42c4ba9..3f11c1dd23 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/directives/WebsocketDirectives.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/directives/WebSocketDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server @@ -9,11 +9,11 @@ import akka.http.impl.server.RouteStructure import akka.http.javadsl.model.ws.Message import akka.stream.javadsl.Flow -abstract class WebsocketDirectives extends SchemeDirectives { +abstract class WebSocketDirectives extends SchemeDirectives { /** * Handles websocket requests with the given handler and rejects other requests with a - * [[ExpectedWebsocketRequestRejection]]. + * [[ExpectedWebSocketRequestRejection]]. */ - def handleWebsocketMessages(handler: Flow[Message, Message, _]): Route = - RouteStructure.HandleWebsocketMessages(handler) + def handleWebSocketMessages(handler: Flow[Message, Message, _]): Route = + RouteStructure.HandleWebSocketMessages(handler) } diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/values/Cookie.scala b/akka-http/src/main/scala/akka/http/javadsl/server/values/Cookie.scala index b0327d7c99..96804e476f 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/values/Cookie.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/values/Cookie.scala @@ -1,32 +1,32 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.values import akka.http.impl.server.{ RouteStructure, CookieImpl } import akka.http.javadsl.server.{ Directive, RequestVal, Route } -import akka.japi.Option +import java.util.Optional import scala.annotation.varargs -import scala.collection.immutable +import scala.compat.java8.OptionConverters._ abstract class Cookie { def name(): String - def domain(): Option[String] - def path(): Option[String] + def domain(): Optional[String] + def path(): Optional[String] def withDomain(domain: String): Cookie def withPath(path: String): Cookie def value(): RequestVal[String] - def optionalValue(): RequestVal[Option[String]] + def optionalValue(): RequestVal[Optional[String]] def set(value: String): Directive @varargs def delete(innerRoute: Route, moreInnerRoutes: Route*): Route = - RouteStructure.DeleteCookie(name(), domain(), path())(innerRoute, moreInnerRoutes.toList) + RouteStructure.DeleteCookie(name(), domain().asScala, path().asScala)(innerRoute, moreInnerRoutes.toList) } object Cookies { def create(name: String): Cookie = new CookieImpl(name) diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/values/FormField.scala b/akka-http/src/main/scala/akka/http/javadsl/server/values/FormField.scala index beb9448dda..b69424ccdb 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/values/FormField.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/values/FormField.scala @@ -1,21 +1,21 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server package values +import java.util.Optional import java.{ lang ⇒ jl } import akka.http.impl.server.{ FormFieldImpl, Util } import akka.http.scaladsl.unmarshalling._ import akka.japi.function.Function -import akka.japi.{ Option ⇒ JOption } import scala.reflect.ClassTag trait FormField[T] extends RequestVal[T] { - def optional: RequestVal[JOption[T]] + def optional: RequestVal[Optional[T]] def withDefault(defaultValue: T): RequestVal[T] } diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/values/Header.scala b/akka-http/src/main/scala/akka/http/javadsl/server/values/Header.scala index 542b784125..3d672c93c9 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/values/Header.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/values/Header.scala @@ -1,9 +1,11 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.values +import java.util.Optional + import akka.http.impl.server.HeaderImpl import akka.http.javadsl.model.HttpHeader import akka.http.javadsl.server.RequestVal @@ -11,27 +13,28 @@ import akka.http.scaladsl.model import akka.http.scaladsl.server.Directive1 import akka.http.scaladsl.server.util.ClassMagnet +import scala.compat.java8.OptionConverters._ import scala.reflect.{ ClassTag, classTag } trait Header[T <: HttpHeader] { def instance(): RequestVal[T] - def optionalInstance(): RequestVal[Option[T]] + def optionalInstance(): RequestVal[Optional[T]] def value(): RequestVal[String] - def optionalValue(): RequestVal[Option[String]] + def optionalValue(): RequestVal[Optional[String]] } object Headers { import akka.http.scaladsl.server.directives.BasicDirectives._ import akka.http.scaladsl.server.directives.HeaderDirectives._ def byName(name: String): Header[HttpHeader] = - HeaderImpl[HttpHeader](name, _ ⇒ optionalHeaderInstanceByName(name.toLowerCase()), classTag[HttpHeader]) + HeaderImpl[HttpHeader](name, _ ⇒ optionalHeaderInstanceByName(name.toLowerCase()).map(_.asScala), classTag[HttpHeader]) def byClass[T <: HttpHeader](clazz: Class[T]): Header[T] = HeaderImpl[T](clazz.getSimpleName, ct ⇒ optionalHeaderValueByType(ClassMagnet(ct)), ClassTag(clazz)) - private def optionalHeaderInstanceByName(lowercaseName: String): Directive1[Option[model.HttpHeader]] = + private def optionalHeaderInstanceByName(lowercaseName: String): Directive1[Optional[model.HttpHeader]] = extract(_.request.headers.collectFirst { case h @ model.HttpHeader(`lowercaseName`, _) ⇒ h - }) -} \ No newline at end of file + }.asJava) +} diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/values/HttpBasicAuthenticator.scala b/akka-http/src/main/scala/akka/http/javadsl/server/values/HttpBasicAuthenticator.scala index 9ce8b01d2f..3f7e8fc18e 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/values/HttpBasicAuthenticator.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/values/HttpBasicAuthenticator.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.values @@ -7,9 +7,10 @@ package akka.http.javadsl.server.values import akka.http.impl.server.{ ExtractionImplBase, RouteStructure } import akka.http.javadsl.server.{ AbstractDirective, RequestVal, Route } import akka.http.scaladsl.util.FastFuture - -import scala.concurrent.Future import scala.reflect.ClassTag +import java.util.concurrent.CompletionStage +import java.util.Optional +import java.util.concurrent.CompletableFuture /** * Represents existing or missing Http Basic authentication credentials. @@ -38,18 +39,18 @@ trait BasicCredentials { */ abstract class HttpBasicAuthenticator[T](val realm: String) extends AbstractDirective with ExtractionImplBase[T] with RequestVal[T] { protected[http] implicit def classTag: ClassTag[T] = reflect.classTag[AnyRef].asInstanceOf[ClassTag[T]] - def authenticate(credentials: BasicCredentials): Future[Option[T]] + def authenticate(credentials: BasicCredentials): CompletionStage[Optional[T]] /** * Creates a return value for use in [[authenticate]] that successfully authenticates the requests and provides * the given user. */ - def authenticateAs(user: T): Future[Option[T]] = FastFuture.successful(Some(user)) + def authenticateAs(user: T): CompletionStage[Optional[T]] = CompletableFuture.completedFuture(Optional.of(user)) /** * Refuses access for this user. */ - def refuseAccess(): Future[Option[T]] = FastFuture.successful(None) + def refuseAccess(): CompletionStage[Optional[T]] = CompletableFuture.completedFuture(Optional.empty()) /** * INTERNAL API diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/values/OAuth2Authenticator.scala b/akka-http/src/main/scala/akka/http/javadsl/server/values/OAuth2Authenticator.scala index 6845115048..99d5f2a89a 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/values/OAuth2Authenticator.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/values/OAuth2Authenticator.scala @@ -1,15 +1,15 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.values import akka.http.impl.server.{ ExtractionImplBase, RouteStructure } import akka.http.javadsl.server.{ AbstractDirective, RequestVal, Route } -import akka.http.scaladsl.util.FastFuture - -import scala.concurrent.Future import scala.reflect.ClassTag +import java.util.concurrent.CompletionStage +import java.util.Optional +import java.util.concurrent.CompletableFuture /** * Represents existing or missing OAuth 2 authentication credentials. @@ -38,18 +38,18 @@ trait OAuth2Credentials { */ abstract class OAuth2Authenticator[T](val realm: String) extends AbstractDirective with ExtractionImplBase[T] with RequestVal[T] { protected[http] implicit def classTag: ClassTag[T] = reflect.classTag[AnyRef].asInstanceOf[ClassTag[T]] - def authenticate(credentials: OAuth2Credentials): Future[Option[T]] + def authenticate(credentials: OAuth2Credentials): CompletionStage[Optional[T]] /** * Creates a return value for use in [[authenticate]] that successfully authenticates the requests and provides * the given user. */ - def authenticateAs(user: T): Future[Option[T]] = FastFuture.successful(Some(user)) + def authenticateAs(user: T): CompletionStage[Optional[T]] = CompletableFuture.completedFuture(Optional.of(user)) /** * Refuses access for this user. */ - def refuseAccess(): Future[Option[T]] = FastFuture.successful(None) + def refuseAccess(): CompletionStage[Optional[T]] = CompletableFuture.completedFuture(Optional.empty()) /** * INTERNAL API diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/values/Parameter.scala b/akka-http/src/main/scala/akka/http/javadsl/server/values/Parameter.scala index 093d6e8967..b822d5e998 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/values/Parameter.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/values/Parameter.scala @@ -1,11 +1,11 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.values import java.util.AbstractMap.SimpleEntry -import java.util.{ Collection ⇒ JCollection, Map ⇒ JMap } +import java.util.{ Collection ⇒ JCollection, Map ⇒ JMap, Optional } import java.{ lang ⇒ jl } import akka.http.impl.server.{ ParameterImpl, StandaloneExtractionImpl, Util } @@ -13,7 +13,6 @@ import akka.http.javadsl.server.RequestVal import akka.http.scaladsl.server.directives.ParameterDirectives import akka.http.scaladsl.unmarshalling.Unmarshaller import akka.japi.function.Function -import akka.japi.{ Option ⇒ JOption } import scala.reflect.ClassTag @@ -21,7 +20,7 @@ import scala.reflect.ClassTag * A RequestVal representing a query parameter of type T. */ trait Parameter[T] extends RequestVal[T] { - def optional: RequestVal[JOption[T]] + def optional: RequestVal[Optional[T]] def withDefault(defaultValue: T): RequestVal[T] } diff --git a/akka-http/src/main/scala/akka/http/javadsl/server/values/PathMatchers.scala b/akka-http/src/main/scala/akka/http/javadsl/server/values/PathMatchers.scala index ade74aaf8e..1c8d9e1098 100644 --- a/akka-http/src/main/scala/akka/http/javadsl/server/values/PathMatchers.scala +++ b/akka-http/src/main/scala/akka/http/javadsl/server/values/PathMatchers.scala @@ -1,16 +1,16 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.javadsl.server.values +import java.util.Optional import java.util.regex.Pattern import java.{ lang ⇒ jl, util ⇒ ju } import akka.http.impl.server.PathMatcherImpl import akka.http.javadsl.server.RequestVal import akka.http.scaladsl.server.{ PathMatcher0, PathMatcher1, PathMatchers ⇒ ScalaPathMatchers, PathMatcher ⇒ ScalaPathMatcher } -import akka.japi.Option import akka.japi.function.Function import scala.collection.JavaConverters._ @@ -26,7 +26,7 @@ import scala.util.matching.Regex * "consumes" a part of the path which is recorded in [[RequestContext.unmatchedPath]]. */ trait PathMatcher[T] extends RequestVal[T] { - def optional: PathMatcher[Option[T]] + def optional: PathMatcher[Optional[T]] } /** diff --git a/akka-http/src/main/scala/akka/http/scaladsl/client/RequestBuilding.scala b/akka-http/src/main/scala/akka/http/scaladsl/client/RequestBuilding.scala index 8f41713983..742fe2d3b0 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/client/RequestBuilding.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/client/RequestBuilding.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.client diff --git a/akka-http/src/main/scala/akka/http/scaladsl/client/TransformerPipelineSupport.scala b/akka-http/src/main/scala/akka/http/scaladsl/client/TransformerPipelineSupport.scala index c644e00575..d9277c093d 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/client/TransformerPipelineSupport.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/client/TransformerPipelineSupport.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.client diff --git a/akka-http/src/main/scala/akka/http/scaladsl/coding/Coder.scala b/akka-http/src/main/scala/akka/http/scaladsl/coding/Coder.scala index 9186cde443..0013f2f350 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/coding/Coder.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/coding/Coder.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.coding diff --git a/akka-http/src/main/scala/akka/http/scaladsl/coding/DataMapper.scala b/akka-http/src/main/scala/akka/http/scaladsl/coding/DataMapper.scala index 71e7370c62..a77bb13b45 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/coding/DataMapper.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/coding/DataMapper.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.coding diff --git a/akka-http/src/main/scala/akka/http/scaladsl/coding/Decoder.scala b/akka-http/src/main/scala/akka/http/scaladsl/coding/Decoder.scala index e4227db256..53b68dfe48 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/coding/Decoder.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/coding/Decoder.scala @@ -1,12 +1,13 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.coding +import akka.NotUsed import akka.http.scaladsl.model._ -import akka.stream.Materializer -import akka.stream.stage.Stage +import akka.stream.{ FlowShape, Materializer } +import akka.stream.stage.{ GraphStage, Stage } import akka.util.ByteString import headers.HttpEncoding import akka.stream.scaladsl.{ Sink, Source, Flow } @@ -26,7 +27,7 @@ trait Decoder { def maxBytesPerChunk: Int def withMaxBytesPerChunk(maxBytesPerChunk: Int): Decoder - def decoderFlow: Flow[ByteString, ByteString, Unit] + def decoderFlow: Flow[ByteString, ByteString, NotUsed] def decode(input: ByteString)(implicit mat: Materializer): Future[ByteString] = Source.single(input).via(decoderFlow).runWith(Sink.fold(ByteString.empty)(_ ++ _)) } @@ -36,7 +37,7 @@ object Decoder { /** A decoder that is implemented in terms of a [[Stage]] */ trait StreamDecoder extends Decoder { outer ⇒ - protected def newDecompressorStage(maxBytesPerChunk: Int): () ⇒ Stage[ByteString, ByteString] + protected def newDecompressorStage(maxBytesPerChunk: Int): () ⇒ GraphStage[FlowShape[ByteString, ByteString]] def maxBytesPerChunk: Int = Decoder.MaxBytesPerChunkDefault def withMaxBytesPerChunk(newMaxBytesPerChunk: Int): Decoder = @@ -44,11 +45,11 @@ trait StreamDecoder extends Decoder { outer ⇒ def encoding: HttpEncoding = outer.encoding override def maxBytesPerChunk: Int = newMaxBytesPerChunk - def newDecompressorStage(maxBytesPerChunk: Int): () ⇒ Stage[ByteString, ByteString] = + def newDecompressorStage(maxBytesPerChunk: Int): () ⇒ GraphStage[FlowShape[ByteString, ByteString]] = outer.newDecompressorStage(maxBytesPerChunk) } - def decoderFlow: Flow[ByteString, ByteString, Unit] = - Flow[ByteString].transform(newDecompressorStage(maxBytesPerChunk)) + def decoderFlow: Flow[ByteString, ByteString, NotUsed] = + Flow.fromGraph(newDecompressorStage(maxBytesPerChunk)()) } diff --git a/akka-http/src/main/scala/akka/http/scaladsl/coding/Deflate.scala b/akka-http/src/main/scala/akka/http/scaladsl/coding/Deflate.scala index 887f8e27b2..646b564fe6 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/coding/Deflate.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/coding/Deflate.scala @@ -1,21 +1,19 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.coding -import java.lang.reflect.{ Method, InvocationTargetException } import java.util.zip.{ Inflater, Deflater } -import akka.stream.stage._ +import akka.stream.Attributes +import akka.stream.io.ByteStringParser +import akka.stream.io.ByteStringParser.{ ParseResult, ParseStep } import akka.util.{ ByteStringBuilder, ByteString } import scala.annotation.tailrec -import akka.http.impl.util._ import akka.http.scaladsl.model._ import akka.http.scaladsl.model.headers.HttpEncodings -import scala.util.control.NonFatal - class Deflate(val messageFilter: HttpMessage ⇒ Boolean) extends Coder with StreamDecoder { val encoding = HttpEncodings.deflate def newCompressor = new DeflateCompressor @@ -47,7 +45,10 @@ class DeflateCompressor extends Compressor { deflater.setInput(input.toArray) drainDeflater(deflater, buffer) } - protected def flushWithBuffer(buffer: Array[Byte]): ByteString = DeflateCompressor.flush(deflater, buffer) + protected def flushWithBuffer(buffer: Array[Byte]): ByteString = { + val written = deflater.deflate(buffer, 0, buffer.length, Deflater.SYNC_FLUSH) + ByteString.fromArray(buffer, 0, written) + } protected def finishWithBuffer(buffer: Array[Byte]): ByteString = { deflater.finish() val res = drainDeflater(deflater, buffer) @@ -72,40 +73,6 @@ class DeflateCompressor extends Compressor { private[http] object DeflateCompressor { val MinBufferSize = 1024 - // TODO: remove reflective call once Java 6 support is dropped - /** - * Compatibility mode: reflectively call deflate(..., flushMode) if available or use a hack otherwise - */ - private[this] val flushImplementation: (Deflater, Array[Byte]) ⇒ ByteString = { - def flushHack(deflater: Deflater, buffer: Array[Byte]): ByteString = { - // hack: change compression mode to provoke flushing - deflater.deflate(EmptyByteArray, 0, 0) - deflater.setLevel(Deflater.NO_COMPRESSION) - val res1 = drainDeflater(deflater, buffer) - deflater.setLevel(Deflater.BEST_COMPRESSION) - val res2 = drainDeflater(deflater, buffer) - res1 ++ res2 - } - def reflectiveDeflateWithSyncMode(method: Method, syncFlushConstant: Int)(deflater: Deflater, buffer: Array[Byte]): ByteString = - try { - val written = method.invoke(deflater, buffer, 0: java.lang.Integer, buffer.length: java.lang.Integer, syncFlushConstant: java.lang.Integer).asInstanceOf[Int] - ByteString.fromArray(buffer, 0, written) - } catch { - case t: InvocationTargetException ⇒ throw t.getTargetException - } - - try { - val deflateWithFlush = classOf[Deflater].getMethod("deflate", classOf[Array[Byte]], classOf[Int], classOf[Int], classOf[Int]) - require(deflateWithFlush.getReturnType == classOf[Int]) - val flushModeSync = classOf[Deflater].getField("SYNC_FLUSH").get(null).asInstanceOf[Int] - reflectiveDeflateWithSyncMode(deflateWithFlush, flushModeSync) - } catch { - case NonFatal(e) ⇒ flushHack - } - } - - def flush(deflater: Deflater, buffer: Array[Byte]): ByteString = flushImplementation(deflater, buffer) - @tailrec def drainDeflater(deflater: Deflater, buffer: Array[Byte], result: ByteStringBuilder = new ByteStringBuilder()): ByteString = { val len = deflater.deflate(buffer) @@ -120,56 +87,49 @@ private[http] object DeflateCompressor { } class DeflateDecompressor(maxBytesPerChunk: Int = Decoder.MaxBytesPerChunkDefault) extends DeflateDecompressorBase(maxBytesPerChunk) { - protected def createInflater() = new Inflater() - def initial: State = StartInflate - def afterInflate: State = StartInflate + override def createLogic(attr: Attributes) = new DecompressorParsingLogic { + override val inflater: Inflater = new Inflater() - protected def afterBytesRead(buffer: Array[Byte], offset: Int, length: Int): Unit = {} - protected def onTruncation(ctx: Context[ByteString]): SyncDirective = ctx.finish() + override val inflateState = new Inflate(true) { + override def onTruncation(): Unit = completeStage() + } + + override def afterInflate = inflateState + override def afterBytesRead(buffer: Array[Byte], offset: Int, length: Int): Unit = {} + + startWith(inflateState) + } } -abstract class DeflateDecompressorBase(maxBytesPerChunk: Int = Decoder.MaxBytesPerChunkDefault) extends ByteStringParserStage[ByteString] { - protected def createInflater(): Inflater - val inflater = createInflater() +abstract class DeflateDecompressorBase(maxBytesPerChunk: Int = Decoder.MaxBytesPerChunkDefault) + extends ByteStringParser[ByteString] { - protected def afterInflate: State - protected def afterBytesRead(buffer: Array[Byte], offset: Int, length: Int): Unit + abstract class DecompressorParsingLogic extends ParsingLogic { + val inflater: Inflater + def afterInflate: ParseStep[ByteString] + def afterBytesRead(buffer: Array[Byte], offset: Int, length: Int): Unit + val inflateState: Inflate - /** Start inflating */ - case object StartInflate extends IntermediateState { - def onPush(data: ByteString, ctx: Context[ByteString]): SyncDirective = { - require(inflater.needsInput()) - inflater.setInput(data.toArray) + abstract class Inflate(noPostProcessing: Boolean) extends ParseStep[ByteString] { + override def canWorkWithPartialData = true + override def parse(reader: ByteStringParser.ByteReader): ParseResult[ByteString] = { + inflater.setInput(reader.remainingData.toArray) - becomeWithRemaining(Inflate()(data), ByteString.empty, ctx) - } - } + val buffer = new Array[Byte](maxBytesPerChunk) + val read = inflater.inflate(buffer) - /** Inflate */ - case class Inflate()(data: ByteString) extends IntermediateState { - override def onPull(ctx: Context[ByteString]): SyncDirective = { - val buffer = new Array[Byte](maxBytesPerChunk) - val read = inflater.inflate(buffer) - if (read > 0) { - afterBytesRead(buffer, 0, read) - ctx.push(ByteString.fromArray(buffer, 0, read)) - } else { - val remaining = data.takeRight(inflater.getRemaining) - val next = - if (inflater.finished()) afterInflate - else StartInflate + reader.skip(reader.remainingSize - inflater.getRemaining) - becomeWithRemaining(next, remaining, ctx) + if (read > 0) { + afterBytesRead(buffer, 0, read) + val next = if (inflater.finished()) afterInflate else this + ParseResult(Some(ByteString.fromArray(buffer, 0, read)), next, noPostProcessing) + } else { + if (inflater.finished()) ParseResult(None, afterInflate, noPostProcessing) + else throw ByteStringParser.NeedMoreData + } } } - def onPush(elem: ByteString, ctx: Context[ByteString]): SyncDirective = - throw new IllegalStateException("Don't expect a new Element") - } - - def becomeWithRemaining(next: State, remaining: ByteString, ctx: Context[ByteString]) = { - become(next) - if (remaining.isEmpty) current.onPull(ctx) - else current.onPush(remaining, ctx) } } diff --git a/akka-http/src/main/scala/akka/http/scaladsl/coding/Encoder.scala b/akka-http/src/main/scala/akka/http/scaladsl/coding/Encoder.scala index 39bfc4d8a9..412881bc22 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/coding/Encoder.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/coding/Encoder.scala @@ -1,9 +1,10 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.coding +import akka.NotUsed import akka.http.scaladsl.model._ import akka.http.impl.util.StreamUtils import akka.stream.stage.Stage @@ -26,7 +27,7 @@ trait Encoder { def encode(input: ByteString): ByteString = newCompressor.compressAndFinish(input) - def encoderFlow: Flow[ByteString, ByteString, Unit] = Flow[ByteString].transform(newEncodeTransformer) + def encoderFlow: Flow[ByteString, ByteString, NotUsed] = Flow[ByteString].transform(newEncodeTransformer) def newCompressor: Compressor diff --git a/akka-http/src/main/scala/akka/http/scaladsl/coding/Gzip.scala b/akka-http/src/main/scala/akka/http/scaladsl/coding/Gzip.scala index 5f03dfd7a8..35724e3de6 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/coding/Gzip.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/coding/Gzip.scala @@ -1,17 +1,18 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.coding -import akka.util.ByteString -import akka.stream.stage._ - -import akka.http.impl.util.ByteReader -import java.util.zip.{ Inflater, CRC32, ZipException, Deflater } +import java.util.zip.{ CRC32, Deflater, Inflater, ZipException } +import akka.http.impl.engine.ws.{ ProtocolException, FrameEvent } import akka.http.scaladsl.model._ -import headers.HttpEncodings +import akka.http.scaladsl.model.headers.HttpEncodings +import akka.stream.Attributes +import akka.stream.io.ByteStringParser +import akka.stream.io.ByteStringParser.{ ParseResult, ParseStep } +import akka.util.ByteString class Gzip(val messageFilter: HttpMessage ⇒ Boolean) extends Coder with StreamDecoder { val encoding = HttpEncodings.gzip @@ -60,71 +61,55 @@ class GzipCompressor extends DeflateCompressor { } class GzipDecompressor(maxBytesPerChunk: Int = Decoder.MaxBytesPerChunkDefault) extends DeflateDecompressorBase(maxBytesPerChunk) { - protected def createInflater(): Inflater = new Inflater(true) + override def createLogic(attr: Attributes) = new DecompressorParsingLogic { + override val inflater: Inflater = new Inflater(true) + override def afterInflate: ParseStep[ByteString] = ReadTrailer + override def afterBytesRead(buffer: Array[Byte], offset: Int, length: Int): Unit = + crc32.update(buffer, offset, length) - def initial: State = Initial + trait Step extends ParseStep[ByteString] { + override def onTruncation(): Unit = failStage(new ZipException("Truncated GZIP stream")) + } + override val inflateState = new Inflate(false) with Step + startWith(ReadHeaders) - /** No bytes were received yet */ - case object Initial extends State { - def onPush(data: ByteString, ctx: Context[ByteString]): SyncDirective = - if (data.isEmpty) ctx.pull() - else becomeWithRemaining(ReadHeaders, data, ctx) + /** Reading the header bytes */ + case object ReadHeaders extends Step { + override def parse(reader: ByteStringParser.ByteReader): ParseResult[ByteString] = { + import reader._ + if (readByte() != 0x1F || readByte() != 0x8B) fail("Not in GZIP format") // check magic header + if (readByte() != 8) fail("Unsupported GZIP compression method") // check compression method + val flags = readByte() + skip(6) // skip MTIME, XFL and OS fields + if ((flags & 4) > 0) skip(readShortLE()) // skip optional extra fields + if ((flags & 8) > 0) skipZeroTerminatedString() // skip optional file name + if ((flags & 16) > 0) skipZeroTerminatedString() // skip optional file comment + if ((flags & 2) > 0 && crc16(fromStartToHere) != readShortLE()) fail("Corrupt GZIP header") - override def onPull(ctx: Context[ByteString]): SyncDirective = - if (ctx.isFinishing) { - ctx.finish() - } else super.onPull(ctx) - } + inflater.reset() + crc32.reset() + ParseResult(None, inflateState, false) + } + } + var crc32: CRC32 = new CRC32 + private def fail(msg: String) = throw new ZipException(msg) - var crc32: CRC32 = new CRC32 - protected def afterInflate: State = ReadTrailer - - /** Reading the header bytes */ - case object ReadHeaders extends ByteReadingState { - def read(reader: ByteReader, ctx: Context[ByteString]): SyncDirective = { - import reader._ - - if (readByte() != 0x1F || readByte() != 0x8B) fail("Not in GZIP format") // check magic header - if (readByte() != 8) fail("Unsupported GZIP compression method") // check compression method - val flags = readByte() - skip(6) // skip MTIME, XFL and OS fields - if ((flags & 4) > 0) skip(readShortLE()) // skip optional extra fields - if ((flags & 8) > 0) skipZeroTerminatedString() // skip optional file name - if ((flags & 16) > 0) skipZeroTerminatedString() // skip optional file comment - if ((flags & 2) > 0 && crc16(fromStartToHere) != readShortLE()) fail("Corrupt GZIP header") - - inflater.reset() - crc32.reset() - becomeWithRemaining(StartInflate, remainingData, ctx) + /** Reading the trailer */ + case object ReadTrailer extends Step { + override def parse(reader: ByteStringParser.ByteReader): ParseResult[ByteString] = { + import reader._ + if (readIntLE() != crc32.getValue.toInt) fail("Corrupt data (CRC32 checksum error)") + if (readIntLE() != inflater.getBytesWritten.toInt /* truncated to 32bit */ ) + fail("Corrupt GZIP trailer ISIZE") + ParseResult(None, ReadHeaders, true) + } } } - - protected def afterBytesRead(buffer: Array[Byte], offset: Int, length: Int): Unit = - crc32.update(buffer, offset, length) - - /** Reading the trailer */ - case object ReadTrailer extends ByteReadingState { - def read(reader: ByteReader, ctx: Context[ByteString]): SyncDirective = { - import reader._ - - if (readIntLE() != crc32.getValue.toInt) fail("Corrupt data (CRC32 checksum error)") - if (readIntLE() != inflater.getBytesWritten.toInt /* truncated to 32bit */ ) fail("Corrupt GZIP trailer ISIZE") - - becomeWithRemaining(Initial, remainingData, ctx) - } - } - - override def onUpstreamFinish(ctx: Context[ByteString]): TerminationDirective = ctx.absorbTermination() - private def crc16(data: ByteString) = { val crc = new CRC32 crc.update(data.toArray) crc.getValue.toInt & 0xFFFF } - - override protected def onTruncation(ctx: Context[ByteString]): SyncDirective = ctx.fail(new ZipException("Truncated GZIP stream")) - - private def fail(msg: String) = throw new ZipException(msg) } /** INTERNAL API */ diff --git a/akka-http/src/main/scala/akka/http/scaladsl/coding/NoCoding.scala b/akka-http/src/main/scala/akka/http/scaladsl/coding/NoCoding.scala index ba59b93fd4..4a3d6046fe 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/coding/NoCoding.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/coding/NoCoding.scala @@ -1,12 +1,13 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.coding import akka.http.scaladsl.model._ import akka.http.impl.util.StreamUtils -import akka.stream.stage.Stage +import akka.stream.FlowShape +import akka.stream.stage.{ GraphStage, Stage } import akka.util.ByteString import headers.HttpEncodings @@ -25,7 +26,7 @@ object NoCoding extends Coder with StreamDecoder { def newCompressor = NoCodingCompressor - def newDecompressorStage(maxBytesPerChunk: Int): () ⇒ Stage[ByteString, ByteString] = + def newDecompressorStage(maxBytesPerChunk: Int): () ⇒ GraphStage[FlowShape[ByteString, ByteString]] = () ⇒ StreamUtils.limitByteChunksStage(maxBytesPerChunk) } diff --git a/akka-http/src/main/scala/akka/http/scaladsl/common/NameReceptacle.scala b/akka-http/src/main/scala/akka/http/scaladsl/common/NameReceptacle.scala index 00854da023..2b32d22fd5 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/common/NameReceptacle.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/common/NameReceptacle.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.common diff --git a/akka-http/src/main/scala/akka/http/scaladsl/common/StrictForm.scala b/akka-http/src/main/scala/akka/http/scaladsl/common/StrictForm.scala index 209e0a011e..63df21cbe5 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/common/StrictForm.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/common/StrictForm.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.common diff --git a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/ContentTypeOverrider.scala b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/ContentTypeOverrider.scala index 6a5842f01e..a792fdf1c7 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/ContentTypeOverrider.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/ContentTypeOverrider.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.marshalling diff --git a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/EmptyValue.scala b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/EmptyValue.scala index 10d7c2d5c6..3b8d965b64 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/EmptyValue.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/EmptyValue.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.marshalling diff --git a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/GenericMarshallers.scala b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/GenericMarshallers.scala index 2c2b70b21f..1db21ba02a 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/GenericMarshallers.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/GenericMarshallers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.marshalling diff --git a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/Marshal.scala b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/Marshal.scala index ea358902e3..9ae8f98b60 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/Marshal.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/Marshal.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.marshalling diff --git a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/Marshaller.scala b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/Marshaller.scala index f1312867f3..5d82d6ee41 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/Marshaller.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/Marshaller.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.marshalling diff --git a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/MultipartMarshallers.scala b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/MultipartMarshallers.scala index 5d50eae789..9159774917 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/MultipartMarshallers.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/MultipartMarshallers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.marshalling diff --git a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/PredefinedToEntityMarshallers.scala b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/PredefinedToEntityMarshallers.scala index f8c80906ae..d710a5f5cd 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/PredefinedToEntityMarshallers.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/PredefinedToEntityMarshallers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.marshalling diff --git a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/PredefinedToRequestMarshallers.scala b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/PredefinedToRequestMarshallers.scala index 5b78aaa5a9..e4648b7c38 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/PredefinedToRequestMarshallers.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/PredefinedToRequestMarshallers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.marshalling diff --git a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/PredefinedToResponseMarshallers.scala b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/PredefinedToResponseMarshallers.scala index 9fb1628d75..3beffb37a2 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/PredefinedToResponseMarshallers.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/PredefinedToResponseMarshallers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.marshalling diff --git a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/ToResponseMarshallable.scala b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/ToResponseMarshallable.scala index 40b4540bef..f4fb5ce756 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/ToResponseMarshallable.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/ToResponseMarshallable.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.marshalling diff --git a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/package.scala b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/package.scala index c218f07f28..da6c188605 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/marshalling/package.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/marshalling/package.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/ContentNegotation.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/ContentNegotation.scala index 589b515ce7..48bbf1dfd1 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/ContentNegotation.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/ContentNegotation.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/Directive.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/Directive.scala index 9e723a06ac..e95cab03db 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/Directive.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/Directive.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/Directives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/Directives.scala index 30a0c9affe..c2f66bbbe8 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/Directives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/Directives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server @@ -29,6 +29,6 @@ trait Directives extends RouteConcatenation with RouteDirectives with SchemeDirectives with SecurityDirectives - with WebsocketDirectives + with WebSocketDirectives object Directives extends Directives diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/ExceptionHandler.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/ExceptionHandler.scala index 278f173fa1..b900514219 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/ExceptionHandler.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/ExceptionHandler.scala @@ -1,10 +1,11 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server import scala.util.control.NonFatal +import akka.http.scaladsl.settings.RoutingSettings import akka.http.scaladsl.model._ import StatusCodes._ diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/PathMatcher.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/PathMatcher.scala index 48d486d53d..280aed80dd 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/PathMatcher.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/PathMatcher.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/Rejection.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/Rejection.scala index 3a09367347..b3278ffe69 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/Rejection.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/Rejection.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server @@ -166,13 +166,13 @@ case class MissingCookieRejection(cookieName: String) extends Rejection /** * Rejection created when a websocket request was expected but none was found. */ -case object ExpectedWebsocketRequestRejection extends Rejection +case object ExpectedWebSocketRequestRejection extends Rejection /** * Rejection created when a websocket request was not handled because none of the given subprotocols * was supported. */ -case class UnsupportedWebsocketSubprotocolRejection(supportedProtocol: String) extends Rejection +case class UnsupportedWebSocketSubprotocolRejection(supportedProtocol: String) extends Rejection /** * Rejection created by the `validation` directive as well as for `IllegalArgumentExceptions` diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/RejectionHandler.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/RejectionHandler.scala index 864694878e..b533f750b2 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/RejectionHandler.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/RejectionHandler.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server @@ -204,8 +204,8 @@ object RejectionHandler { val supported = rejections.map(_.supported.value).mkString(" or ") complete((BadRequest, "The request's Content-Encoding is not supported. Expected:\n" + supported)) } - .handle { case ExpectedWebsocketRequestRejection ⇒ complete((BadRequest, "Expected Websocket Upgrade request")) } - .handleAll[UnsupportedWebsocketSubprotocolRejection] { rejections ⇒ + .handle { case ExpectedWebSocketRequestRejection ⇒ complete((BadRequest, "Expected WebSocket Upgrade request")) } + .handleAll[UnsupportedWebSocketSubprotocolRejection] { rejections ⇒ val supported = rejections.map(_.supportedProtocol) complete(HttpResponse(BadRequest, entity = s"None of the websocket subprotocols offered in the request are supported. Supported are ${supported.map("'" + _ + "'").mkString(",")}.", diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContext.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContext.scala index a88fea5a70..77f23830ea 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContext.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContext.scala @@ -1,14 +1,15 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server -import scala.concurrent.{ Future, ExecutionContext } +import scala.concurrent.{ Future, ExecutionContextExecutor } import akka.stream.Materializer import akka.event.LoggingAdapter import akka.http.scaladsl.marshalling.ToResponseMarshallable import akka.http.scaladsl.model._ +import akka.http.scaladsl.settings.{ RoutingSettings, ParserSettings } /** * Immutable object encapsulating the context of an [[akka.http.scaladsl.model.HttpRequest]] @@ -25,7 +26,7 @@ trait RequestContext { /** * The default ExecutionContext to be used for scheduling asynchronous logic related to this request. */ - implicit def executionContext: ExecutionContext + implicit def executionContext: ExecutionContextExecutor /** * The default Materializer. @@ -42,11 +43,16 @@ trait RequestContext { */ def settings: RoutingSettings + /** + * The default ParserSettings to be used for configuring directives. + */ + def parserSettings: ParserSettings + /** * Returns a copy of this context with the given fields updated. */ def reconfigure( - executionContext: ExecutionContext = executionContext, + executionContext: ExecutionContextExecutor = executionContext, materializer: Materializer = materializer, log: LoggingAdapter = log, settings: RoutingSettings = settings): RequestContext @@ -76,7 +82,7 @@ trait RequestContext { /** * Returns a copy of this context with the new HttpRequest. */ - def withExecutionContext(ec: ExecutionContext): RequestContext + def withExecutionContext(ec: ExecutionContextExecutor): RequestContext /** * Returns a copy of this context with the new HttpRequest. @@ -91,7 +97,12 @@ trait RequestContext { /** * Returns a copy of this context with the new RoutingSettings. */ - def withSettings(settings: RoutingSettings): RequestContext + def withRoutingSettings(settings: RoutingSettings): RequestContext + + /** + * Returns a copy of this context with the new [[ParserSettings]]. + */ + def withParserSettings(settings: ParserSettings): RequestContext /** * Returns a copy of this context with the HttpRequest transformed by the given function. diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContextImpl.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContextImpl.scala index 76aa562b35..16682045c3 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContextImpl.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/RequestContextImpl.scala @@ -1,12 +1,13 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server -import scala.concurrent.{ Future, ExecutionContext } -import akka.stream.Materializer +import scala.concurrent.{ Future, ExecutionContextExecutor } +import akka.stream.{ ActorMaterializer, Materializer } import akka.event.LoggingAdapter +import akka.http.scaladsl.settings.{ RoutingSettings, ParserSettings } import akka.http.scaladsl.marshalling.{ Marshal, ToResponseMarshallable } import akka.http.scaladsl.model._ import akka.http.scaladsl.util.FastFuture @@ -18,16 +19,20 @@ import akka.http.scaladsl.util.FastFuture._ private[http] class RequestContextImpl( val request: HttpRequest, val unmatchedPath: Uri.Path, - val executionContext: ExecutionContext, + val executionContext: ExecutionContextExecutor, val materializer: Materializer, val log: LoggingAdapter, - val settings: RoutingSettings) extends RequestContext { + val settings: RoutingSettings, + val parserSettings: ParserSettings) extends RequestContext { - def this(request: HttpRequest, log: LoggingAdapter, settings: RoutingSettings)(implicit ec: ExecutionContext, materializer: Materializer) = - this(request, request.uri.path, ec, materializer, log, settings) + def this(request: HttpRequest, log: LoggingAdapter, settings: RoutingSettings, parserSettings: ParserSettings)(implicit ec: ExecutionContextExecutor, materializer: Materializer) = + this(request, request.uri.path, ec, materializer, log, settings, parserSettings) - def reconfigure(executionContext: ExecutionContext, materializer: Materializer, log: LoggingAdapter, settings: RoutingSettings): RequestContext = - copy(executionContext = executionContext, materializer = materializer, log = log, settings = settings) + def this(request: HttpRequest, log: LoggingAdapter, settings: RoutingSettings)(implicit ec: ExecutionContextExecutor, materializer: Materializer) = + this(request, request.uri.path, ec, materializer, log, settings, ParserSettings(ActorMaterializer.downcast(materializer).system)) + + def reconfigure(executionContext: ExecutionContextExecutor, materializer: Materializer, log: LoggingAdapter, settings: RoutingSettings): RequestContext = + copy(executionContext = executionContext, materializer = materializer, log = log, routingSettings = settings) override def complete(trm: ToResponseMarshallable): Future[RouteResult] = trm(request)(executionContext) @@ -47,7 +52,7 @@ private[http] class RequestContextImpl( override def withRequest(request: HttpRequest): RequestContext = if (request != this.request) copy(request = request) else this - override def withExecutionContext(executionContext: ExecutionContext): RequestContext = + override def withExecutionContext(executionContext: ExecutionContextExecutor): RequestContext = if (executionContext != this.executionContext) copy(executionContext = executionContext) else this override def withMaterializer(materializer: Materializer): RequestContext = @@ -56,8 +61,11 @@ private[http] class RequestContextImpl( override def withLog(log: LoggingAdapter): RequestContext = if (log != this.log) copy(log = log) else this - override def withSettings(settings: RoutingSettings): RequestContext = - if (settings != this.settings) copy(settings = settings) else this + override def withRoutingSettings(routingSettings: RoutingSettings): RequestContext = + if (routingSettings != this.settings) copy(routingSettings = routingSettings) else this + + override def withParserSettings(parserSettings: ParserSettings): RequestContext = + if (parserSettings != this.parserSettings) copy(parserSettings = parserSettings) else this override def mapRequest(f: HttpRequest ⇒ HttpRequest): RequestContext = copy(request = f(request)) @@ -83,9 +91,10 @@ private[http] class RequestContextImpl( private def copy(request: HttpRequest = request, unmatchedPath: Uri.Path = unmatchedPath, - executionContext: ExecutionContext = executionContext, + executionContext: ExecutionContextExecutor = executionContext, materializer: Materializer = materializer, log: LoggingAdapter = log, - settings: RoutingSettings = settings) = - new RequestContextImpl(request, unmatchedPath, executionContext, materializer, log, settings) + routingSettings: RoutingSettings = settings, + parserSettings: ParserSettings = parserSettings) = + new RequestContextImpl(request, unmatchedPath, executionContext, materializer, log, routingSettings, parserSettings) } diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/Route.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/Route.scala index 825584003b..7bb8ec9685 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/Route.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/Route.scala @@ -1,12 +1,14 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server -import akka.stream.Materializer +import akka.NotUsed +import akka.http.scaladsl.settings.{ RoutingSettings, ParserSettings } +import akka.stream.{ ActorMaterializer, Materializer } -import scala.concurrent.{ ExecutionContext, Future } +import scala.concurrent.{ ExecutionContextExecutor, Future } import akka.stream.scaladsl.Flow import akka.http.scaladsl.model.{ HttpRequest, HttpResponse } import akka.http.scaladsl.util.FastFuture._ @@ -22,6 +24,7 @@ object Route { * "Seals" a route by wrapping it with exception handling and rejection conversion. */ def seal(route: Route)(implicit routingSettings: RoutingSettings, + parserSettings: ParserSettings = null, rejectionHandler: RejectionHandler = RejectionHandler.default, exceptionHandler: ExceptionHandler = null): Route = { import directives.ExecutionDirectives._ @@ -38,30 +41,33 @@ object Route { * This conversion is also implicitly available through [[RouteResult.route2HandlerFlow]]. */ def handlerFlow(route: Route)(implicit routingSettings: RoutingSettings, + parserSettings: ParserSettings, materializer: Materializer, routingLog: RoutingLog, - executionContext: ExecutionContext = null, + executionContext: ExecutionContextExecutor = null, rejectionHandler: RejectionHandler = RejectionHandler.default, - exceptionHandler: ExceptionHandler = null): Flow[HttpRequest, HttpResponse, Unit] = + exceptionHandler: ExceptionHandler = null): Flow[HttpRequest, HttpResponse, NotUsed] = Flow[HttpRequest].mapAsync(1)(asyncHandler(route)) /** * Turns a `Route` into an async handler function. */ def asyncHandler(route: Route)(implicit routingSettings: RoutingSettings, + parserSettings: ParserSettings, materializer: Materializer, routingLog: RoutingLog, - executionContext: ExecutionContext = null, + executionContext: ExecutionContextExecutor = null, rejectionHandler: RejectionHandler = RejectionHandler.default, exceptionHandler: ExceptionHandler = null): HttpRequest ⇒ Future[HttpResponse] = { val effectiveEC = if (executionContext ne null) executionContext else materializer.executionContext { implicit val executionContext = effectiveEC // overrides parameter + val effectiveParserSettings = if (parserSettings ne null) parserSettings else ParserSettings(ActorMaterializer.downcast(materializer).system) val sealedRoute = seal(route) request ⇒ - sealedRoute(new RequestContextImpl(request, routingLog.requestLog(request), routingSettings)).fast + sealedRoute(new RequestContextImpl(request, routingLog.requestLog(request), routingSettings, effectiveParserSettings)).fast .map { case RouteResult.Complete(response) ⇒ response case RouteResult.Rejected(rejected) ⇒ throw new IllegalStateException(s"Unhandled rejections '$rejected', unsealed RejectionHandler?!") diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/RouteConcatenation.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/RouteConcatenation.scala index 925ad6d5dd..7947b6a717 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/RouteConcatenation.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/RouteConcatenation.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/RouteResult.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/RouteResult.scala index ec4af4849e..772a4c824f 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/RouteResult.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/RouteResult.scala @@ -1,11 +1,13 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server import scala.collection.immutable import scala.concurrent.ExecutionContext +import akka.NotUsed +import akka.http.scaladsl.settings.{ RoutingSettings, ParserSettings } import akka.stream.Materializer import akka.stream.scaladsl.Flow import akka.http.scaladsl.model.{ HttpRequest, HttpResponse } @@ -23,10 +25,11 @@ object RouteResult { final case class Rejected(rejections: immutable.Seq[Rejection]) extends RouteResult implicit def route2HandlerFlow(route: Route)(implicit routingSettings: RoutingSettings, + parserSettings: ParserSettings, materializer: Materializer, routingLog: RoutingLog, executionContext: ExecutionContext = null, rejectionHandler: RejectionHandler = RejectionHandler.default, - exceptionHandler: ExceptionHandler = null): Flow[HttpRequest, HttpResponse, Unit] = + exceptionHandler: ExceptionHandler = null): Flow[HttpRequest, HttpResponse, NotUsed] = Route.handlerFlow(route) } diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/RoutingLog.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/RoutingLog.scala index d3950167eb..d872cdc0b4 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/RoutingLog.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/RoutingLog.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/StandardRoute.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/StandardRoute.scala index 0f399d5a9e..9cc70168a1 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/StandardRoute.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/StandardRoute.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/BasicDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/BasicDirectives.scala index 5a91f6d688..15646befae 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/BasicDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/BasicDirectives.scala @@ -1,14 +1,15 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server package directives -import scala.concurrent.{ Future, ExecutionContext } +import scala.concurrent.{ Future, ExecutionContextExecutor } import scala.collection.immutable import akka.event.LoggingAdapter import akka.stream.Materializer +import akka.http.scaladsl.settings.{ RoutingSettings, ParserSettings } import akka.http.scaladsl.server.util.Tuple import akka.http.scaladsl.util.FastFuture import akka.http.scaladsl.model._ @@ -130,15 +131,15 @@ trait BasicDirectives { def extractUri: Directive1[Uri] = BasicDirectives._extractUri /** - * Runs its inner route with the given alternative [[ExecutionContext]]. + * Runs its inner route with the given alternative [[ExecutionContextExecutor]]. */ - def withExecutionContext(ec: ExecutionContext): Directive0 = + def withExecutionContext(ec: ExecutionContextExecutor): Directive0 = mapRequestContext(_ withExecutionContext ec) /** - * Extracts the [[ExecutionContext]] from the [[RequestContext]]. + * Extracts the [[ExecutionContextExecutor]] from the [[RequestContext]]. */ - def extractExecutionContext: Directive1[ExecutionContext] = BasicDirectives._extractExecutionContext + def extractExecutionContext: Directive1[ExecutionContextExecutor] = BasicDirectives._extractExecutionContext /** * Runs its inner route with the given alternative [[Materializer]]. @@ -167,13 +168,13 @@ trait BasicDirectives { * Runs its inner route with the given alternative [[RoutingSettings]]. */ def withSettings(settings: RoutingSettings): Directive0 = - mapRequestContext(_ withSettings settings) + mapRequestContext(_ withRoutingSettings settings) /** * Runs the inner route with settings mapped by the given function. */ def mapSettings(f: RoutingSettings ⇒ RoutingSettings): Directive0 = - mapRequestContext(ctx ⇒ ctx.withSettings(f(ctx.settings))) + mapRequestContext(ctx ⇒ ctx.withRoutingSettings(f(ctx.settings))) /** * Extracts the [[RoutingSettings]] from the [[RequestContext]]. @@ -181,6 +182,12 @@ trait BasicDirectives { def extractSettings: Directive1[RoutingSettings] = BasicDirectives._extractSettings + /** + * Extracts the [[akka.http.scaladsl.settings.ParserSettings]] from the [[RequestContext]]. + */ + def extractParserSettings: Directive1[ParserSettings] = + BasicDirectives._extractParserSettings + /** * Extracts the [[RequestContext]] itself. */ @@ -191,9 +198,10 @@ object BasicDirectives extends BasicDirectives { private val _extractUnmatchedPath: Directive1[Uri.Path] = extract(_.unmatchedPath) private val _extractRequest: Directive1[HttpRequest] = extract(_.request) private val _extractUri: Directive1[Uri] = extract(_.request.uri) - private val _extractExecutionContext: Directive1[ExecutionContext] = extract(_.executionContext) + private val _extractExecutionContext: Directive1[ExecutionContextExecutor] = extract(_.executionContext) private val _extractMaterializer: Directive1[Materializer] = extract(_.materializer) private val _extractLog: Directive1[LoggingAdapter] = extract(_.log) private val _extractSettings: Directive1[RoutingSettings] = extract(_.settings) + private val _extractParserSettings: Directive1[ParserSettings] = extract(_.parserSettings) private val _extractRequestContext: Directive1[RequestContext] = extract(conforms) } diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/CacheConditionDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/CacheConditionDirectives.scala index e9330b2df3..aa96961c4d 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/CacheConditionDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/CacheConditionDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/CodingDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/CodingDirectives.scala index bb1cbd2a46..4bb54588f6 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/CodingDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/CodingDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/CookieDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/CookieDirectives.scala index 4143c9f083..4a7a83570a 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/CookieDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/CookieDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/DebuggingDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/DebuggingDirectives.scala index e94c687cc6..fe1d06dc09 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/DebuggingDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/DebuggingDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/ExecutionDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/ExecutionDirectives.scala index ec356af339..5579212c9c 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/ExecutionDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/ExecutionDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/FileAndResourceDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/FileAndResourceDirectives.scala index cc4f13f064..464c428eb1 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/FileAndResourceDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/FileAndResourceDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/FileUploadDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/FileUploadDirectives.scala index 6a5e33d792..773d09065c 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/FileUploadDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/FileUploadDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.http.scaladsl.server.directives @@ -76,7 +76,6 @@ trait FileUploadDirectives { case Some(tuple) ⇒ provide(tuple) case None ⇒ reject(MissingFormFieldRejection(fieldName)) } - } object FileUploadDirectives extends FileUploadDirectives diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/FormFieldDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/FormFieldDirectives.scala index e856ebbeca..2e8b5fff9b 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/FormFieldDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/FormFieldDirectives.scala @@ -1,20 +1,39 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server package directives +import akka.http.impl.util._ +import akka.http.scaladsl.common._ +import akka.http.scaladsl.server.directives.RouteDirectives._ +import akka.http.scaladsl.unmarshalling.Unmarshaller.UnsupportedContentTypeException +import akka.http.scaladsl.util.FastFuture._ + +import scala.annotation.tailrec +import scala.collection.immutable import scala.concurrent.Future import scala.util.{ Failure, Success } -import akka.http.scaladsl.unmarshalling.Unmarshaller.UnsupportedContentTypeException -import akka.http.scaladsl.common._ -import akka.http.impl.util._ -import akka.http.scaladsl.util.FastFuture._ trait FormFieldDirectives extends ToNameReceptacleEnhancements { import FormFieldDirectives._ + /** + * Extracts HTTP form fields from the request as a ``Map[String, String]``. + */ + def formFieldMap: Directive1[Map[String, String]] = _formFieldMap + + /** + * Extracts HTTP form fields from the request as a ``Map[String, List[String]]``. + */ + def formFieldMultiMap: Directive1[Map[String, List[String]]] = _formFieldMultiMap + + /** + * Extracts HTTP form fields from the request as a ``Seq[(String, String)]``. + */ + def formFieldSeq: Directive1[immutable.Seq[(String, String)]] = _formFieldSeq + /** * Extracts an HTTP form field from the request. * Rejects the request if the defined form field matcher(s) don't match. @@ -40,6 +59,50 @@ trait FormFieldDirectives extends ToNameReceptacleEnhancements { } object FormFieldDirectives extends FormFieldDirectives { + + private val _formFieldSeq: Directive1[immutable.Seq[(String, String)]] = { + import BasicDirectives._ + import FutureDirectives._ + import akka.http.scaladsl.unmarshalling._ + + extract { ctx ⇒ + import ctx.{ executionContext, materializer } + Unmarshal(ctx.request.entity).to[StrictForm].fast.flatMap { form ⇒ + val fields = form.fields.collect { + case (name, field) if name.nonEmpty ⇒ + Unmarshal(field).to[String].map(fieldString ⇒ (name, fieldString)) + } + Future.sequence(fields) + } + }.flatMap { sequenceF ⇒ + onComplete(sequenceF).flatMap { + case Success(x) ⇒ provide(x) + case Failure(x: UnsupportedContentTypeException) ⇒ reject(UnsupportedRequestContentTypeRejection(x.supported)) + case Failure(_) ⇒ reject // TODO Use correct rejections + } + } + } + + private val _formFieldMultiMap: Directive1[Map[String, List[String]]] = { + @tailrec def append( + map: Map[String, List[String]], + fields: immutable.Seq[(String, String)]): Map[String, List[String]] = { + if (fields.isEmpty) { + map + } else { + val (key, value) = fields.head + append(map.updated(key, value :: map.getOrElse(key, Nil)), fields.tail) + } + } + + _formFieldSeq.map { + case seq ⇒ + append(Map.empty, seq) + } + } + + private val _formFieldMap: Directive1[Map[String, String]] = _formFieldSeq.map(_.toMap) + sealed trait FieldMagnet { type Out def apply(): Out @@ -64,10 +127,10 @@ object FormFieldDirectives extends FormFieldDirectives { def apply(value: A) = f(value) } - import akka.http.scaladsl.unmarshalling.{ FromStrictFormFieldUnmarshaller ⇒ FSFFU, _ } import BasicDirectives._ - import RouteDirectives._ import FutureDirectives._ + import RouteDirectives._ + import akka.http.scaladsl.unmarshalling.{ FromStrictFormFieldUnmarshaller ⇒ FSFFU, _ } type SFU = FromEntityUnmarshaller[StrictForm] type FSFFOU[T] = Unmarshaller[Option[StrictForm.Field], T] @@ -82,8 +145,7 @@ object FormFieldDirectives extends FormFieldDirectives { //////////////////// "regular" formField extraction //////////////////// private def fieldOfForm[T](fieldName: String, fu: Unmarshaller[Option[StrictForm.Field], T])(implicit sfu: SFU): RequestContext ⇒ Future[T] = { ctx ⇒ - import ctx.executionContext - import ctx.materializer + import ctx.{ executionContext, materializer } sfu(ctx.request.entity).fast.flatMap(form ⇒ fu(form field fieldName)) } private def filter[T](fieldName: String, fu: FSFFOU[T])(implicit sfu: SFU): Directive1[T] = @@ -124,8 +186,7 @@ object FormFieldDirectives extends FormFieldDirectives { private def repeatedFilter[T](fieldName: String, fu: FSFFU[T])(implicit sfu: SFU): Directive1[Iterable[T]] = extract { ctx ⇒ - import ctx.executionContext - import ctx.materializer + import ctx.{ executionContext, materializer } sfu(ctx.request.entity).fast.flatMap(form ⇒ Future.sequence(form.fields.collect { case (`fieldName`, value) ⇒ fu(value) })) }.flatMap { result ⇒ handleFieldResult(fieldName, result) @@ -137,8 +198,8 @@ object FormFieldDirectives extends FormFieldDirectives { //////////////////// tuple support //////////////////// - import akka.http.scaladsl.server.util.TupleOps._ import akka.http.scaladsl.server.util.BinaryPolyFunc + import akka.http.scaladsl.server.util.TupleOps._ implicit def forTuple[T](implicit fold: FoldLeft[Directive0, T, ConvertFieldDefAndConcatenate.type]): FieldDefAux[T, fold.Out] = fieldDef[T, fold.Out](fold(pass, _)) diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/FutureDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/FutureDirectives.scala index d16de93fc0..eafbbb1a72 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/FutureDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/FutureDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/HeaderDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/HeaderDirectives.scala index 35988246a5..d8e97ddc91 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/HeaderDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/HeaderDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/HostDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/HostDirectives.scala index 9d23e93e48..1da48db60d 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/HostDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/HostDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/MarshallingDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/MarshallingDirectives.scala index d5c6678fcf..fe44221e59 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/MarshallingDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/MarshallingDirectives.scala @@ -1,14 +1,19 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server package directives +import akka.actor.ActorSystem +import akka.http.scaladsl.settings.ParserSettings +import akka.http.scaladsl.model.Multipart +import akka.http.scaladsl.model.Multipart.ByteRanges + import scala.concurrent.Promise import scala.util.{ Failure, Success } import akka.http.scaladsl.marshalling.ToResponseMarshaller -import akka.http.scaladsl.unmarshalling.{ Unmarshaller, FromRequestUnmarshaller } +import akka.http.scaladsl.unmarshalling.{ FromEntityUnmarshaller, MultipartUnmarshallers, Unmarshaller, FromRequestUnmarshaller } import akka.http.impl.util._ trait MarshallingDirectives { diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/MethodDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/MethodDirectives.scala index 2e552f1dac..3036ff5890 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/MethodDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/MethodDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/MiscDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/MiscDirectives.scala index 2b2689607e..5071a7ac41 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/MiscDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/MiscDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/ParameterDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/ParameterDirectives.scala index 2880e4c951..51c7253bc4 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/ParameterDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/ParameterDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/PathDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/PathDirectives.scala index c233ff6204..6d53e7bf09 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/PathDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/PathDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/RangeDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/RangeDirectives.scala index c2dc41b51d..024605c244 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/RangeDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/RangeDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/RouteDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/RouteDirectives.scala index 53f889f20b..7fc6cea13c 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/RouteDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/RouteDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/SchemeDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/SchemeDirectives.scala index 33b3c5b46e..e7bcc86303 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/SchemeDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/SchemeDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/SecurityDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/SecurityDirectives.scala index 6acd75eb69..bd8b45a9a9 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/SecurityDirectives.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/SecurityDirectives.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server @@ -215,6 +215,8 @@ object Credentials { new Credentials.Provided(token) { def verify(secret: String): Boolean = secret secure_== token } + case Some(GenericHttpCredentials(scheme, token, params)) ⇒ + throw new UnsupportedOperationException("cannot verify generic HTTP credentials") case None ⇒ Credentials.Missing } } @@ -250,4 +252,4 @@ trait AuthenticationDirective[T] extends Directive1[T] { object AuthenticationDirective { implicit def apply[T](other: Directive1[T]): AuthenticationDirective[T] = new AuthenticationDirective[T] { def tapply(inner: Tuple1[T] ⇒ Route) = other.tapply(inner) } -} \ No newline at end of file +} diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/WebSocketDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/WebSocketDirectives.scala new file mode 100644 index 0000000000..c3382e6618 --- /dev/null +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/WebSocketDirectives.scala @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2009-2016 Typesafe Inc. + */ + +package akka.http.scaladsl.server +package directives + +import scala.collection.immutable + +import akka.http.scaladsl.model.ws.{ UpgradeToWebSocket, Message } +import akka.stream.scaladsl.Flow + +trait WebSocketDirectives { + import RouteDirectives._ + import HeaderDirectives._ + import BasicDirectives._ + + /** + * Extract the [[UpgradeToWebSocket]] header if existent. Rejects with an [[ExpectedWebSocketRequestRejection]], otherwise. + */ + def extractUpgradeToWebSocket: Directive1[UpgradeToWebSocket] = + optionalHeaderValueByType[UpgradeToWebSocket](()).flatMap { + case Some(upgrade) ⇒ provide(upgrade) + case None ⇒ reject(ExpectedWebSocketRequestRejection) + } + + /** + * Extract the list of WebSocket subprotocols as offered by the client in the [[Sec-WebSocket-Protocol]] header if + * this is a WebSocket request. Rejects with an [[ExpectedWebSocketRequestRejection]], otherwise. + */ + def extractOfferedWsProtocols: Directive1[immutable.Seq[String]] = extractUpgradeToWebSocket.map(_.requestedProtocols) + + /** + * Handles WebSocket requests with the given handler and rejects other requests with an + * [[ExpectedWebSocketRequestRejection]]. + */ + def handleWebSocketMessages(handler: Flow[Message, Message, Any]): Route = + handleWebSocketMessagesForOptionalProtocol(handler, None) + + /** + * Handles WebSocket requests with the given handler if the given subprotocol is offered in the request and + * rejects other requests with an [[ExpectedWebSocketRequestRejection]] or an [[UnsupportedWebSocketSubprotocolRejection]]. + */ + def handleWebSocketMessagesForProtocol(handler: Flow[Message, Message, Any], subprotocol: String): Route = + handleWebSocketMessagesForOptionalProtocol(handler, Some(subprotocol)) + + /** + * Handles WebSocket requests with the given handler and rejects other requests with an + * [[ExpectedWebSocketRequestRejection]]. + * + * If the `subprotocol` parameter is None any WebSocket request is accepted. If the `subprotocol` parameter is + * `Some(protocol)` a WebSocket request is only accepted if the list of subprotocols supported by the client (as + * announced in the WebSocket request) contains `protocol`. If the client did not offer the protocol in question + * the request is rejected with an [[UnsupportedWebSocketSubprotocolRejection]] rejection. + * + * To support several subprotocols you may chain several `handleWebSocketMessage` Routes. + */ + def handleWebSocketMessagesForOptionalProtocol(handler: Flow[Message, Message, Any], subprotocol: Option[String]): Route = + extractUpgradeToWebSocket { upgrade ⇒ + if (subprotocol.forall(sub ⇒ upgrade.requestedProtocols.exists(_ equalsIgnoreCase sub))) + complete(upgrade.handleMessages(handler, subprotocol)) + else + reject(UnsupportedWebSocketSubprotocolRejection(subprotocol.get)) // None.forall == true + } +} diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/WebsocketDirectives.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/directives/WebsocketDirectives.scala deleted file mode 100644 index 5768c4ea25..0000000000 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/directives/WebsocketDirectives.scala +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (C) 2009-2015 Typesafe Inc. - */ - -package akka.http.scaladsl.server -package directives - -import scala.collection.immutable - -import akka.http.scaladsl.model.ws.{ UpgradeToWebsocket, Message } -import akka.stream.scaladsl.Flow - -trait WebsocketDirectives { - import RouteDirectives._ - import HeaderDirectives._ - import BasicDirectives._ - - /** - * Extract the [[UpgradeToWebsocket]] header if existent. Rejects with an [[ExpectedWebsocketRequestRejection]], otherwise. - */ - def extractUpgradeToWebsocket: Directive1[UpgradeToWebsocket] = - optionalHeaderValueByType[UpgradeToWebsocket](()).flatMap { - case Some(upgrade) ⇒ provide(upgrade) - case None ⇒ reject(ExpectedWebsocketRequestRejection) - } - - /** - * Extract the list of Websocket subprotocols as offered by the client in the [[Sec-Websocket-Protocol]] header if - * this is a Websocket request. Rejects with an [[ExpectedWebsocketRequestRejection]], otherwise. - */ - def extractOfferedWsProtocols: Directive1[immutable.Seq[String]] = extractUpgradeToWebsocket.map(_.requestedProtocols) - - /** - * Handles Websocket requests with the given handler and rejects other requests with an - * [[ExpectedWebsocketRequestRejection]]. - */ - def handleWebsocketMessages(handler: Flow[Message, Message, Any]): Route = - handleWebsocketMessagesForOptionalProtocol(handler, None) - - /** - * Handles Websocket requests with the given handler if the given subprotocol is offered in the request and - * rejects other requests with an [[ExpectedWebsocketRequestRejection]] or an [[UnsupportedWebsocketSubprotocolRejection]]. - */ - def handleWebsocketMessagesForProtocol(handler: Flow[Message, Message, Any], subprotocol: String): Route = - handleWebsocketMessagesForOptionalProtocol(handler, Some(subprotocol)) - - /** - * Handles Websocket requests with the given handler and rejects other requests with an - * [[ExpectedWebsocketRequestRejection]]. - * - * If the `subprotocol` parameter is None any Websocket request is accepted. If the `subprotocol` parameter is - * `Some(protocol)` a Websocket request is only accepted if the list of subprotocols supported by the client (as - * announced in the Websocket request) contains `protocol`. If the client did not offer the protocol in question - * the request is rejected with an [[UnsupportedWebsocketSubprotocolRejection]] rejection. - * - * To support several subprotocols you may chain several `handleWebsocketMessage` Routes. - */ - def handleWebsocketMessagesForOptionalProtocol(handler: Flow[Message, Message, Any], subprotocol: Option[String]): Route = - extractUpgradeToWebsocket { upgrade ⇒ - if (subprotocol.forall(sub ⇒ upgrade.requestedProtocols.exists(_ equalsIgnoreCase sub))) - complete(upgrade.handleMessages(handler, subprotocol)) - else - reject(UnsupportedWebsocketSubprotocolRejection(subprotocol.get)) // None.forall == true - } -} diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/package.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/package.scala index a28e0f9565..123df45b6d 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/package.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/package.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/util/ApplyConverter.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/util/ApplyConverter.scala index 36ae741324..c07f7e6ed9 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/util/ApplyConverter.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/util/ApplyConverter.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server.util diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/util/BinaryPolyFunc.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/util/BinaryPolyFunc.scala index 851e85912d..2989d927e2 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/util/BinaryPolyFunc.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/util/BinaryPolyFunc.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server.util diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/util/ClassMagnet.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/util/ClassMagnet.scala index 38d0140e47..950b5a371a 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/util/ClassMagnet.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/util/ClassMagnet.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server.util diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/util/ConstructFromTuple.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/util/ConstructFromTuple.scala index 0e0f372dae..f47698a348 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/util/ConstructFromTuple.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/util/ConstructFromTuple.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server.util diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/util/Tuple.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/util/Tuple.scala index 7ab96e8886..378c046ab9 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/util/Tuple.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/util/Tuple.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server.util diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/util/TupleOps.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/util/TupleOps.scala index 1da0800a4a..ac89f86a4e 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/util/TupleOps.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/util/TupleOps.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server.util diff --git a/akka-http/src/main/scala/akka/http/scaladsl/server/util/Tupler.scala b/akka-http/src/main/scala/akka/http/scaladsl/server/util/Tupler.scala index 2ef444f300..7cddd2a690 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/server/util/Tupler.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/server/util/Tupler.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.server.util diff --git a/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/GenericUnmarshallers.scala b/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/GenericUnmarshallers.scala index 992a314daf..883f65ce4a 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/GenericUnmarshallers.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/GenericUnmarshallers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.unmarshalling diff --git a/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/MultipartUnmarshallers.scala b/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/MultipartUnmarshallers.scala index 8410bca780..7d522bcae7 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/MultipartUnmarshallers.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/MultipartUnmarshallers.scala @@ -1,15 +1,18 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.unmarshalling +import akka.actor.ActorSystem +import akka.http.scaladsl.settings.ParserSettings + import scala.collection.immutable import scala.collection.immutable.VectorBuilder import akka.util.ByteString import akka.event.{ NoLogging, LoggingAdapter } -import akka.stream.OverflowStrategy -import akka.stream.impl.fusing.{ GraphInterpreter, IteratorInterpreter } +import akka.stream.{ActorMaterializer, OverflowStrategy} +import akka.stream.impl.fusing.IteratorInterpreter import akka.stream.scaladsl._ import akka.http.impl.engine.parsing.BodyPartParser import akka.http.impl.util._ @@ -20,11 +23,16 @@ import MediaTypes._ import HttpCharsets._ import akka.stream.impl.fusing.SubSource +/** + * Provides [[Multipart]] marshallers. + * It is possible to configure the default parsing mode by providing an implicit [[ParserSettings]] instance. + */ trait MultipartUnmarshallers { - implicit def defaultMultipartGeneralUnmarshaller(implicit log: LoggingAdapter = NoLogging): FromEntityUnmarshaller[Multipart.General] = + implicit def defaultMultipartGeneralUnmarshaller(implicit log: LoggingAdapter = NoLogging, parserSettings: ParserSettings = null): FromEntityUnmarshaller[Multipart.General] = multipartGeneralUnmarshaller(`UTF-8`) - def multipartGeneralUnmarshaller(defaultCharset: HttpCharset)(implicit log: LoggingAdapter = NoLogging): FromEntityUnmarshaller[Multipart.General] = + + def multipartGeneralUnmarshaller(defaultCharset: HttpCharset)(implicit log: LoggingAdapter = NoLogging, parserSettings: ParserSettings = null): FromEntityUnmarshaller[Multipart.General] = multipartUnmarshaller[Multipart.General, Multipart.General.BodyPart, Multipart.General.BodyPart.Strict]( mediaRange = `multipart/*`, defaultContentType = MediaTypes.`text/plain` withCharset defaultCharset, @@ -33,7 +41,7 @@ trait MultipartUnmarshallers { createStrictBodyPart = Multipart.General.BodyPart.Strict, createStrict = Multipart.General.Strict) - implicit def multipartFormDataUnmarshaller(implicit log: LoggingAdapter = NoLogging): FromEntityUnmarshaller[Multipart.FormData] = + implicit def multipartFormDataUnmarshaller(implicit log: LoggingAdapter = NoLogging, parserSettings: ParserSettings = null): FromEntityUnmarshaller[Multipart.FormData] = multipartUnmarshaller[Multipart.FormData, Multipart.FormData.BodyPart, Multipart.FormData.BodyPart.Strict]( mediaRange = `multipart/form-data`, defaultContentType = ContentTypes.`application/octet-stream`, @@ -42,9 +50,10 @@ trait MultipartUnmarshallers { createStrictBodyPart = (entity, headers) ⇒ Multipart.General.BodyPart.Strict(entity, headers).toFormDataBodyPart.get, createStrict = (_, parts) ⇒ Multipart.FormData.Strict(parts)) - implicit def defaultMultipartByteRangesUnmarshaller(implicit log: LoggingAdapter = NoLogging): FromEntityUnmarshaller[Multipart.ByteRanges] = + implicit def defaultMultipartByteRangesUnmarshaller(implicit log: LoggingAdapter = NoLogging, parserSettings: ParserSettings = null): FromEntityUnmarshaller[Multipart.ByteRanges] = multipartByteRangesUnmarshaller(`UTF-8`) - def multipartByteRangesUnmarshaller(defaultCharset: HttpCharset)(implicit log: LoggingAdapter = NoLogging): FromEntityUnmarshaller[Multipart.ByteRanges] = + + def multipartByteRangesUnmarshaller(defaultCharset: HttpCharset)(implicit log: LoggingAdapter = NoLogging, parserSettings: ParserSettings = null): FromEntityUnmarshaller[Multipart.ByteRanges] = multipartUnmarshaller[Multipart.ByteRanges, Multipart.ByteRanges.BodyPart, Multipart.ByteRanges.BodyPart.Strict]( mediaRange = `multipart/byteranges`, defaultContentType = MediaTypes.`text/plain` withCharset defaultCharset, @@ -53,13 +62,14 @@ trait MultipartUnmarshallers { createStrictBodyPart = (entity, headers) ⇒ Multipart.General.BodyPart.Strict(entity, headers).toByteRangesBodyPart.get, createStrict = (_, parts) ⇒ Multipart.ByteRanges.Strict(parts)) - def multipartUnmarshaller[T <: Multipart, BP <: Multipart.BodyPart, BPS <: Multipart.BodyPart.Strict](mediaRange: MediaRange, - defaultContentType: ContentType, - createBodyPart: (BodyPartEntity, List[HttpHeader]) ⇒ BP, - createStreamed: (MediaType.Multipart, Source[BP, Any]) ⇒ T, - createStrictBodyPart: (HttpEntity.Strict, List[HttpHeader]) ⇒ BPS, - createStrict: (MediaType.Multipart, immutable.Seq[BPS]) ⇒ T)(implicit log: LoggingAdapter = NoLogging): FromEntityUnmarshaller[T] = - Unmarshaller { implicit ec ⇒ + def multipartUnmarshaller[T <: Multipart, BP <: Multipart.BodyPart, BPS <: Multipart.BodyPart.Strict]( + mediaRange: MediaRange, + defaultContentType: ContentType, + createBodyPart: (BodyPartEntity, List[HttpHeader]) ⇒ BP, + createStreamed: (MediaType.Multipart, Source[BP, Any]) ⇒ T, + createStrictBodyPart: (HttpEntity.Strict, List[HttpHeader]) ⇒ BPS, + createStrict: (MediaType.Multipart, immutable.Seq[BPS]) ⇒ T)(implicit log: LoggingAdapter = NoLogging, parserSettings: ParserSettings = null): FromEntityUnmarshaller[T] = + Unmarshaller.withMaterializer { implicit ec ⇒ mat => entity ⇒ if (entity.contentType.mediaType.isMultipart && mediaRange.matches(entity.contentType.mediaType)) { entity.contentType.mediaType.params.get("boundary") match { @@ -67,7 +77,8 @@ trait MultipartUnmarshallers { FastFuture.failed(new RuntimeException("Content-Type with a multipart media type must have a 'boundary' parameter")) case Some(boundary) ⇒ import BodyPartParser._ - val parser = new BodyPartParser(defaultContentType, boundary, log) + val effectiveParserSettings = Option(parserSettings).getOrElse(ParserSettings(ActorMaterializer.downcast(mat).system)) + val parser = new BodyPartParser(defaultContentType, boundary, log, effectiveParserSettings) FastFuture.successful { entity match { case HttpEntity.Strict(ContentType(mediaType: MediaType.Multipart, _), data) ⇒ diff --git a/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/PredefinedFromEntityUnmarshallers.scala b/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/PredefinedFromEntityUnmarshallers.scala index ffc64d564b..17a5b698d4 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/PredefinedFromEntityUnmarshallers.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/PredefinedFromEntityUnmarshallers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.unmarshalling diff --git a/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/PredefinedFromStringUnmarshallers.scala b/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/PredefinedFromStringUnmarshallers.scala index 5776ef36b9..0672a380d0 100755 --- a/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/PredefinedFromStringUnmarshallers.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/PredefinedFromStringUnmarshallers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.unmarshalling diff --git a/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/Unmarshal.scala b/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/Unmarshal.scala index c577221926..5e079df805 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/Unmarshal.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/Unmarshal.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.unmarshalling diff --git a/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/Unmarshaller.scala b/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/Unmarshaller.scala index 1e9ca40175..dc86f5e126 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/Unmarshaller.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/Unmarshaller.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl.unmarshalling diff --git a/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/package.scala b/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/package.scala index f0b56e89a7..e6a5cac0ae 100644 --- a/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/package.scala +++ b/akka-http/src/main/scala/akka/http/scaladsl/unmarshalling/package.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.http.scaladsl diff --git a/akka-kernel/src/main/scala/akka/kernel/Main.scala b/akka-kernel/src/main/scala/akka/kernel/Main.scala index efb63ab5a0..9419e47353 100644 --- a/akka-kernel/src/main/scala/akka/kernel/Main.scala +++ b/akka-kernel/src/main/scala/akka/kernel/Main.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.kernel diff --git a/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/Conductor.scala b/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/Conductor.scala index 0e17bdee6c..b9398e61f2 100644 --- a/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/Conductor.scala +++ b/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/Conductor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.testconductor diff --git a/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/DataTypes.scala b/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/DataTypes.scala index 5e1648f286..dff50ed0a0 100644 --- a/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/DataTypes.scala +++ b/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/DataTypes.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.testconductor diff --git a/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/Extension.scala b/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/Extension.scala index 173a2f4a29..c74f74eca0 100644 --- a/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/Extension.scala +++ b/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/Extension.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.testconductor diff --git a/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/Player.scala b/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/Player.scala index fc9803afc9..c6678a77a3 100644 --- a/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/Player.scala +++ b/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/Player.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.testconductor diff --git a/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/RemoteConnection.scala b/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/RemoteConnection.scala index 3ee0a3ec55..bf52294616 100644 --- a/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/RemoteConnection.scala +++ b/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/RemoteConnection.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.testconductor diff --git a/akka-multi-node-testkit/src/main/scala/akka/remote/testkit/MultiNodeSpec.scala b/akka-multi-node-testkit/src/main/scala/akka/remote/testkit/MultiNodeSpec.scala index 9fc9e79b6b..c4db74b4ad 100644 --- a/akka-multi-node-testkit/src/main/scala/akka/remote/testkit/MultiNodeSpec.scala +++ b/akka-multi-node-testkit/src/main/scala/akka/remote/testkit/MultiNodeSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.testkit diff --git a/akka-osgi/build.sbt b/akka-osgi/build.sbt index e80269b856..785258c953 100644 --- a/akka-osgi/build.sbt +++ b/akka-osgi/build.sbt @@ -10,3 +10,5 @@ OSGi.osgi Dependencies.osgi parallelExecution in Test := false + +MimaKeys.previousArtifacts := akkaPreviousArtifacts("akka-osgi").value diff --git a/akka-osgi/src/main/scala/akka/osgi/ActorSystemActivator.scala b/akka-osgi/src/main/scala/akka/osgi/ActorSystemActivator.scala index 0a5ebbad16..4a4af39e4c 100644 --- a/akka-osgi/src/main/scala/akka/osgi/ActorSystemActivator.scala +++ b/akka-osgi/src/main/scala/akka/osgi/ActorSystemActivator.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.osgi diff --git a/akka-osgi/src/main/scala/akka/osgi/BundleDelegatingClassLoader.scala b/akka-osgi/src/main/scala/akka/osgi/BundleDelegatingClassLoader.scala index 92c7d1305b..2c2fc84a0a 100644 --- a/akka-osgi/src/main/scala/akka/osgi/BundleDelegatingClassLoader.scala +++ b/akka-osgi/src/main/scala/akka/osgi/BundleDelegatingClassLoader.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.osgi diff --git a/akka-osgi/src/main/scala/akka/osgi/DefaultOSGiLogger.scala b/akka-osgi/src/main/scala/akka/osgi/DefaultOSGiLogger.scala index 68a380eeb9..31f4902515 100644 --- a/akka-osgi/src/main/scala/akka/osgi/DefaultOSGiLogger.scala +++ b/akka-osgi/src/main/scala/akka/osgi/DefaultOSGiLogger.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.osgi diff --git a/akka-osgi/src/main/scala/akka/osgi/OsgiActorSystemFactory.scala b/akka-osgi/src/main/scala/akka/osgi/OsgiActorSystemFactory.scala index 57c8bc5749..681bd9549c 100644 --- a/akka-osgi/src/main/scala/akka/osgi/OsgiActorSystemFactory.scala +++ b/akka-osgi/src/main/scala/akka/osgi/OsgiActorSystemFactory.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.osgi diff --git a/akka-osgi/src/test/scala/akka/osgi/ActorSystemActivatorTest.scala b/akka-osgi/src/test/scala/akka/osgi/ActorSystemActivatorTest.scala index c81738abe6..e828746cf0 100644 --- a/akka-osgi/src/test/scala/akka/osgi/ActorSystemActivatorTest.scala +++ b/akka-osgi/src/test/scala/akka/osgi/ActorSystemActivatorTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.osgi diff --git a/akka-osgi/src/test/scala/akka/osgi/PojoSRTestSupport.scala b/akka-osgi/src/test/scala/akka/osgi/PojoSRTestSupport.scala index f8308fc321..5aaaa28eba 100644 --- a/akka-osgi/src/test/scala/akka/osgi/PojoSRTestSupport.scala +++ b/akka-osgi/src/test/scala/akka/osgi/PojoSRTestSupport.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.osgi diff --git a/akka-osgi/src/test/scala/akka/osgi/test/PingPong.scala b/akka-osgi/src/test/scala/akka/osgi/test/PingPong.scala index 9853836adc..e10dcc8ed7 100644 --- a/akka-osgi/src/test/scala/akka/osgi/test/PingPong.scala +++ b/akka-osgi/src/test/scala/akka/osgi/test/PingPong.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.osgi.test diff --git a/akka-osgi/src/test/scala/akka/osgi/test/TestActivators.scala b/akka-osgi/src/test/scala/akka/osgi/test/TestActivators.scala index 79dc2d97f8..c66ba276ef 100644 --- a/akka-osgi/src/test/scala/akka/osgi/test/TestActivators.scala +++ b/akka-osgi/src/test/scala/akka/osgi/test/TestActivators.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.osgi.test diff --git a/akka-parsing/build.sbt b/akka-parsing/build.sbt new file mode 100644 index 0000000000..b8251b13f2 --- /dev/null +++ b/akka-parsing/build.sbt @@ -0,0 +1,16 @@ +import akka._ +import com.typesafe.tools.mima.plugin.MimaKeys + +AkkaBuild.defaultSettings +Formatting.docFormatSettings +site.settings +OSGi.parsing +Dependencies.parsing + +unmanagedSourceDirectories in ScalariformKeys.format in Test <<= unmanagedSourceDirectories in Test +scalacOptions += "-language:_" + +// ScalaDoc doesn't like the macros +sources in doc in Compile := List() + +enablePlugins(ScaladocNoVerificationOfDiagrams) diff --git a/akka-parsing/src/main/java/akka/parboiled2/util/Base64.java b/akka-parsing/src/main/java/akka/parboiled2/util/Base64.java index 5ec91e12f1..5d91450fc2 100644 --- a/akka-parsing/src/main/java/akka/parboiled2/util/Base64.java +++ b/akka-parsing/src/main/java/akka/parboiled2/util/Base64.java @@ -73,7 +73,6 @@ package akka.parboiled2.util; import java.util.Arrays; -@SuppressWarnings({"UnnecessaryParentheses"}) public class Base64 { // -------- FIELDS ------------------------------------------------------------------------------------------------- diff --git a/akka-parsing/src/main/scala/akka/parboiled2/CharPredicate.scala b/akka-parsing/src/main/scala/akka/parboiled2/CharPredicate.scala index 3bc5971645..477790ab9c 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/CharPredicate.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/CharPredicate.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/CharUtils.scala b/akka-parsing/src/main/scala/akka/parboiled2/CharUtils.scala index fca40c5a45..36bba80960 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/CharUtils.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/CharUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/DynamicRuleDispatch.scala b/akka-parsing/src/main/scala/akka/parboiled2/DynamicRuleDispatch.scala index ced8d94b4a..49683dd5cf 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/DynamicRuleDispatch.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/DynamicRuleDispatch.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/ErrorFormatter.scala b/akka-parsing/src/main/scala/akka/parboiled2/ErrorFormatter.scala index 42941676f2..33ae7284e1 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/ErrorFormatter.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/ErrorFormatter.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/ParseError.scala b/akka-parsing/src/main/scala/akka/parboiled2/ParseError.scala index b30c1734db..7c62c2f0d7 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/ParseError.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/ParseError.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/Parser.scala b/akka-parsing/src/main/scala/akka/parboiled2/Parser.scala index 9c2607ddad..5471a30fc9 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/Parser.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/Parser.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/ParserInput.scala b/akka-parsing/src/main/scala/akka/parboiled2/ParserInput.scala index 02c2b531bb..82f2b73250 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/ParserInput.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/ParserInput.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/Rule.scala b/akka-parsing/src/main/scala/akka/parboiled2/Rule.scala index 751310c758..50bcc68db8 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/Rule.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/Rule.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLActions.scala b/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLActions.scala index 545105ba10..5b4a1c9d79 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLActions.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLActions.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLBasics.scala b/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLBasics.scala index 896a69df34..ec35ecdec0 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLBasics.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLBasics.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLCombinators.scala b/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLCombinators.scala index 7a7868f13c..6baf603a02 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLCombinators.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/RuleDSLCombinators.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/ValueStack.scala b/akka-parsing/src/main/scala/akka/parboiled2/ValueStack.scala index a9141bd0b5..feb9033f03 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/ValueStack.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/ValueStack.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/package.scala b/akka-parsing/src/main/scala/akka/parboiled2/package.scala index ef8a425308..85285d8462 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/package.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/package.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/support/ActionOps.scala b/akka-parsing/src/main/scala/akka/parboiled2/support/ActionOps.scala index 6e968d398b..b468ae2cb3 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/support/ActionOps.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/support/ActionOps.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/support/ActionOpsSupport.scala b/akka-parsing/src/main/scala/akka/parboiled2/support/ActionOpsSupport.scala index 0577342ccb..f7261b5152 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/support/ActionOpsSupport.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/support/ActionOpsSupport.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/support/HListable.scala b/akka-parsing/src/main/scala/akka/parboiled2/support/HListable.scala index e528549a63..237199dcb0 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/support/HListable.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/support/HListable.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/support/Lifter.scala b/akka-parsing/src/main/scala/akka/parboiled2/support/Lifter.scala index 1e30e764b6..5278bd87da 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/support/Lifter.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/support/Lifter.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/support/OpTreeContext.scala b/akka-parsing/src/main/scala/akka/parboiled2/support/OpTreeContext.scala index 01c9e7d907..7c7b40eb6b 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/support/OpTreeContext.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/support/OpTreeContext.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/support/RunResult.scala b/akka-parsing/src/main/scala/akka/parboiled2/support/RunResult.scala index 06e1f1b7be..8dabf6243e 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/support/RunResult.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/support/RunResult.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/support/TailSwitch.scala b/akka-parsing/src/main/scala/akka/parboiled2/support/TailSwitch.scala index 05fa9094c3..b4a61e8316 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/support/TailSwitch.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/support/TailSwitch.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/support/Unpack.scala b/akka-parsing/src/main/scala/akka/parboiled2/support/Unpack.scala index a16697fe43..068257cbc3 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/support/Unpack.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/support/Unpack.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-parsing/src/main/scala/akka/parboiled2/support/package.scala b/akka-parsing/src/main/scala/akka/parboiled2/support/package.scala index f00a0bb687..2599a5b3d3 100644 --- a/akka-parsing/src/main/scala/akka/parboiled2/support/package.scala +++ b/akka-parsing/src/main/scala/akka/parboiled2/support/package.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev + * Copyright (C) 2009-2016 Mathias Doenitz, Alexander Myltsev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/EventEnvelope.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/EventEnvelope.scala index fce035160f..4472089889 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/EventEnvelope.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/EventEnvelope.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.query diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/PersistenceQuery.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/PersistenceQuery.scala index 51b31d2c96..ae747161d3 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/PersistenceQuery.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/PersistenceQuery.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.query diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/ReadJournalProvider.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/ReadJournalProvider.scala index ee23f30353..f14756ef64 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/ReadJournalProvider.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/ReadJournalProvider.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/AllPersistenceIdsQuery.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/AllPersistenceIdsQuery.scala index 10bd575ff3..6e6083f6fc 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/AllPersistenceIdsQuery.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/AllPersistenceIdsQuery.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.javadsl +import akka.NotUsed import akka.stream.javadsl.Source /** @@ -19,6 +20,6 @@ trait AllPersistenceIdsQuery extends ReadJournal { * Corresponding query that is completed when it reaches the end of the currently * currently used `persistenceIds` is provided by [[CurrentPersistenceIdsQuery#currentPersistenceIds]]. */ - def allPersistenceIds(): Source[String, Unit] + def allPersistenceIds(): Source[String, NotUsed] } diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/CurrentEventsByPersistenceIdQuery.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/CurrentEventsByPersistenceIdQuery.scala index fd53a9a56b..5cb92132b4 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/CurrentEventsByPersistenceIdQuery.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/CurrentEventsByPersistenceIdQuery.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.javadsl +import akka.NotUsed import akka.stream.javadsl.Source import akka.persistence.query.EventEnvelope @@ -18,6 +19,6 @@ trait CurrentEventsByPersistenceIdQuery extends ReadJournal { * not included in the event stream. */ def currentEventsByPersistenceId(persistenceId: String, fromSequenceNr: Long, - toSequenceNr: Long): Source[EventEnvelope, Unit] + toSequenceNr: Long): Source[EventEnvelope, NotUsed] } diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/CurrentEventsByTagQuery.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/CurrentEventsByTagQuery.scala index 1cb2658359..a9dbc4744c 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/CurrentEventsByTagQuery.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/CurrentEventsByTagQuery.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.javadsl +import akka.NotUsed import akka.stream.javadsl.Source import akka.persistence.query.EventEnvelope @@ -16,7 +17,7 @@ trait CurrentEventsByTagQuery extends ReadJournal { * is completed immediately when it reaches the end of the "result set". Events that are * stored after the query is completed are not included in the event stream. */ - def currentEventsByTag(tag: String, offset: Long): Source[EventEnvelope, Unit] + def currentEventsByTag(tag: String, offset: Long): Source[EventEnvelope, NotUsed] } diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/CurrentPersistenceIdsQuery.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/CurrentPersistenceIdsQuery.scala index 107a55119a..1cd9bda342 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/CurrentPersistenceIdsQuery.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/CurrentPersistenceIdsQuery.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.javadsl +import akka.NotUsed import akka.stream.javadsl.Source /** @@ -15,6 +16,6 @@ trait CurrentPersistenceIdsQuery extends ReadJournal { * is completed immediately when it reaches the end of the "result set". Persistent * actors that are created after the query is completed are not included in the stream. */ - def currentPersistenceIds(): Source[String, Unit] + def currentPersistenceIds(): Source[String, NotUsed] } diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/EventsByPersistenceIdQuery.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/EventsByPersistenceIdQuery.scala index 553b7d5312..3be77f627c 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/EventsByPersistenceIdQuery.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/EventsByPersistenceIdQuery.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.javadsl +import akka.NotUsed import akka.stream.javadsl.Source import akka.persistence.query.EventEnvelope @@ -25,6 +26,6 @@ trait EventsByPersistenceIdQuery extends ReadJournal { * stored events is provided by [[CurrentEventsByPersistenceIdQuery#currentEventsByPersistenceId]]. */ def eventsByPersistenceId(persistenceId: String, fromSequenceNr: Long, - toSequenceNr: Long): Source[EventEnvelope, Unit] + toSequenceNr: Long): Source[EventEnvelope, NotUsed] } diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/EventsByTagQuery.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/EventsByTagQuery.scala index f011d572bf..6e86562798 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/EventsByTagQuery.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/EventsByTagQuery.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.javadsl +import akka.NotUsed import akka.stream.javadsl.Source import akka.persistence.query.EventEnvelope @@ -34,6 +35,6 @@ trait EventsByTagQuery extends ReadJournal { * Corresponding query that is completed when it reaches the end of the currently * stored events is provided by [[CurrentEventsByTagQuery#currentEventsByTag]]. */ - def eventsByTag(tag: String, offset: Long): Source[EventEnvelope, Unit] + def eventsByTag(tag: String, offset: Long): Source[EventEnvelope, NotUsed] } diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/ReadJournal.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/ReadJournal.scala index b2edab6e07..5582957140 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/ReadJournal.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/javadsl/ReadJournal.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.query.javadsl diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/AllPersistenceIdsPublisher.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/AllPersistenceIdsPublisher.scala index f48b9ef5db..e11812deba 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/AllPersistenceIdsPublisher.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/AllPersistenceIdsPublisher.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.journal.leveldb diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/DeliveryBuffer.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/DeliveryBuffer.scala index 47f0c79897..a88ba4b1cd 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/DeliveryBuffer.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/DeliveryBuffer.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.query.journal.leveldb diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByPersistenceIdPublisher.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByPersistenceIdPublisher.scala index db504616dc..75ab10f5ad 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByPersistenceIdPublisher.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByPersistenceIdPublisher.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.journal.leveldb diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByTagPublisher.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByTagPublisher.scala index 22518fbbb4..4f07f848a6 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByTagPublisher.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByTagPublisher.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.journal.leveldb diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/LeveldbReadJournalProvider.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/LeveldbReadJournalProvider.scala index 9d9ff96708..24e6900a34 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/LeveldbReadJournalProvider.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/LeveldbReadJournalProvider.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.journal.leveldb diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/javadsl/LeveldbReadJournal.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/javadsl/LeveldbReadJournal.scala index e6a7e80a08..f8dd97654a 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/javadsl/LeveldbReadJournal.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/javadsl/LeveldbReadJournal.scala @@ -1,8 +1,10 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.journal.leveldb.javadsl +import akka.NotUsed + import scala.concurrent.duration._ import akka.persistence.query.EventEnvelope @@ -52,7 +54,7 @@ class LeveldbReadJournal(scaladslReadJournal: akka.persistence.query.journal.lev * The stream is completed with failure if there is a failure in executing the query in the * backend journal. */ - override def allPersistenceIds(): Source[String, Unit] = + override def allPersistenceIds(): Source[String, NotUsed] = scaladslReadJournal.allPersistenceIds().asJava /** @@ -60,7 +62,7 @@ class LeveldbReadJournal(scaladslReadJournal: akka.persistence.query.journal.lev * is completed immediately when it reaches the end of the "result set". Persistent * actors that are created after the query is completed are not included in the stream. */ - override def currentPersistenceIds(): Source[String, Unit] = + override def currentPersistenceIds(): Source[String, NotUsed] = scaladslReadJournal.currentPersistenceIds().asJava /** @@ -90,7 +92,7 @@ class LeveldbReadJournal(scaladslReadJournal: akka.persistence.query.journal.lev * backend journal. */ override def eventsByPersistenceId(persistenceId: String, fromSequenceNr: Long, - toSequenceNr: Long): Source[EventEnvelope, Unit] = + toSequenceNr: Long): Source[EventEnvelope, NotUsed] = scaladslReadJournal.eventsByPersistenceId(persistenceId, fromSequenceNr, toSequenceNr).asJava /** @@ -99,7 +101,7 @@ class LeveldbReadJournal(scaladslReadJournal: akka.persistence.query.journal.lev * stored after the query is completed are not included in the event stream. */ override def currentEventsByPersistenceId(persistenceId: String, fromSequenceNr: Long, - toSequenceNr: Long): Source[EventEnvelope, Unit] = + toSequenceNr: Long): Source[EventEnvelope, NotUsed] = scaladslReadJournal.currentEventsByPersistenceId(persistenceId, fromSequenceNr, toSequenceNr).asJava /** @@ -137,7 +139,7 @@ class LeveldbReadJournal(scaladslReadJournal: akka.persistence.query.journal.lev * The stream is completed with failure if there is a failure in executing the query in the * backend journal. */ - override def eventsByTag(tag: String, offset: Long): Source[EventEnvelope, Unit] = + override def eventsByTag(tag: String, offset: Long): Source[EventEnvelope, NotUsed] = scaladslReadJournal.eventsByTag(tag, offset).asJava /** @@ -145,7 +147,7 @@ class LeveldbReadJournal(scaladslReadJournal: akka.persistence.query.journal.lev * is completed immediately when it reaches the end of the "result set". Events that are * stored after the query is completed are not included in the event stream. */ - override def currentEventsByTag(tag: String, offset: Long): Source[EventEnvelope, Unit] = + override def currentEventsByTag(tag: String, offset: Long): Source[EventEnvelope, NotUsed] = scaladslReadJournal.currentEventsByTag(tag, offset).asJava } diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/scaladsl/LeveldbReadJournal.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/scaladsl/LeveldbReadJournal.scala index 8fe119d1ef..79cf037f52 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/scaladsl/LeveldbReadJournal.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/scaladsl/LeveldbReadJournal.scala @@ -1,10 +1,12 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.journal.leveldb.scaladsl import java.net.URLEncoder +import akka.NotUsed + import scala.concurrent.duration._ import akka.actor.ExtendedActorSystem @@ -64,11 +66,11 @@ class LeveldbReadJournal(system: ExtendedActorSystem, config: Config) extends Re * The stream is completed with failure if there is a failure in executing the query in the * backend journal. */ - override def allPersistenceIds(): Source[String, Unit] = { + override def allPersistenceIds(): Source[String, NotUsed] = { // no polling for this query, the write journal will push all changes, i.e. // no refreshInterval Source.actorPublisher[String](AllPersistenceIdsPublisher.props(liveQuery = true, maxBufSize, writeJournalPluginId)) - .mapMaterializedValue(_ ⇒ ()) + .mapMaterializedValue(_ ⇒ NotUsed) .named("allPersistenceIds") } @@ -77,9 +79,9 @@ class LeveldbReadJournal(system: ExtendedActorSystem, config: Config) extends Re * is completed immediately when it reaches the end of the "result set". Persistent * actors that are created after the query is completed are not included in the stream. */ - override def currentPersistenceIds(): Source[String, Unit] = { + override def currentPersistenceIds(): Source[String, NotUsed] = { Source.actorPublisher[String](AllPersistenceIdsPublisher.props(liveQuery = false, maxBufSize, writeJournalPluginId)) - .mapMaterializedValue(_ ⇒ ()) + .mapMaterializedValue(_ ⇒ NotUsed) .named("currentPersistenceIds") } @@ -110,9 +112,9 @@ class LeveldbReadJournal(system: ExtendedActorSystem, config: Config) extends Re * backend journal. */ override def eventsByPersistenceId(persistenceId: String, fromSequenceNr: Long = 0L, - toSequenceNr: Long = Long.MaxValue): Source[EventEnvelope, Unit] = { + toSequenceNr: Long = Long.MaxValue): Source[EventEnvelope, NotUsed] = { Source.actorPublisher[EventEnvelope](EventsByPersistenceIdPublisher.props(persistenceId, fromSequenceNr, toSequenceNr, - refreshInterval, maxBufSize, writeJournalPluginId)).mapMaterializedValue(_ ⇒ ()) + refreshInterval, maxBufSize, writeJournalPluginId)).mapMaterializedValue(_ ⇒ NotUsed) .named("eventsByPersistenceId-" + persistenceId) } @@ -122,9 +124,9 @@ class LeveldbReadJournal(system: ExtendedActorSystem, config: Config) extends Re * stored after the query is completed are not included in the event stream. */ override def currentEventsByPersistenceId(persistenceId: String, fromSequenceNr: Long = 0L, - toSequenceNr: Long = Long.MaxValue): Source[EventEnvelope, Unit] = { + toSequenceNr: Long = Long.MaxValue): Source[EventEnvelope, NotUsed] = { Source.actorPublisher[EventEnvelope](EventsByPersistenceIdPublisher.props(persistenceId, fromSequenceNr, toSequenceNr, - None, maxBufSize, writeJournalPluginId)).mapMaterializedValue(_ ⇒ ()) + None, maxBufSize, writeJournalPluginId)).mapMaterializedValue(_ ⇒ NotUsed) .named("currentEventsByPersistenceId-" + persistenceId) } @@ -163,9 +165,9 @@ class LeveldbReadJournal(system: ExtendedActorSystem, config: Config) extends Re * The stream is completed with failure if there is a failure in executing the query in the * backend journal. */ - override def eventsByTag(tag: String, offset: Long = 0L): Source[EventEnvelope, Unit] = { + override def eventsByTag(tag: String, offset: Long = 0L): Source[EventEnvelope, NotUsed] = { Source.actorPublisher[EventEnvelope](EventsByTagPublisher.props(tag, offset, Long.MaxValue, - refreshInterval, maxBufSize, writeJournalPluginId)).mapMaterializedValue(_ ⇒ ()) + refreshInterval, maxBufSize, writeJournalPluginId)).mapMaterializedValue(_ ⇒ NotUsed) .named("eventsByTag-" + URLEncoder.encode(tag, ByteString.UTF_8)) } @@ -174,9 +176,9 @@ class LeveldbReadJournal(system: ExtendedActorSystem, config: Config) extends Re * is completed immediately when it reaches the end of the "result set". Events that are * stored after the query is completed are not included in the event stream. */ - override def currentEventsByTag(tag: String, offset: Long = 0L): Source[EventEnvelope, Unit] = { + override def currentEventsByTag(tag: String, offset: Long = 0L): Source[EventEnvelope, NotUsed] = { Source.actorPublisher[EventEnvelope](EventsByTagPublisher.props(tag, offset, Long.MaxValue, - None, maxBufSize, writeJournalPluginId)).mapMaterializedValue(_ ⇒ ()) + None, maxBufSize, writeJournalPluginId)).mapMaterializedValue(_ ⇒ NotUsed) .named("currentEventsByTag-" + URLEncoder.encode(tag, ByteString.UTF_8)) } diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/AllPersistenceIdsQuery.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/AllPersistenceIdsQuery.scala index cc4e72bfb5..997b4266d1 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/AllPersistenceIdsQuery.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/AllPersistenceIdsQuery.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.scaladsl +import akka.NotUsed import akka.stream.scaladsl.Source /** @@ -19,6 +20,6 @@ trait AllPersistenceIdsQuery extends ReadJournal { * Corresponding query that is completed when it reaches the end of the currently * currently used `persistenceIds` is provided by [[CurrentPersistenceIdsQuery#currentPersistenceIds]]. */ - def allPersistenceIds(): Source[String, Unit] + def allPersistenceIds(): Source[String, NotUsed] } diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/CurrentEventsByPersistenceIdQuery.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/CurrentEventsByPersistenceIdQuery.scala index 3f35391aab..fbee08865c 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/CurrentEventsByPersistenceIdQuery.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/CurrentEventsByPersistenceIdQuery.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.scaladsl +import akka.NotUsed import akka.stream.scaladsl.Source import akka.persistence.query.EventEnvelope @@ -18,6 +19,6 @@ trait CurrentEventsByPersistenceIdQuery extends ReadJournal { * not included in the event stream. */ def currentEventsByPersistenceId(persistenceId: String, fromSequenceNr: Long, - toSequenceNr: Long): Source[EventEnvelope, Unit] + toSequenceNr: Long): Source[EventEnvelope, NotUsed] } diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/CurrentEventsByTagQuery.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/CurrentEventsByTagQuery.scala index ce825bf7cd..c91e556d3e 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/CurrentEventsByTagQuery.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/CurrentEventsByTagQuery.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.scaladsl +import akka.NotUsed import akka.stream.scaladsl.Source import akka.persistence.query.EventEnvelope @@ -16,7 +17,7 @@ trait CurrentEventsByTagQuery extends ReadJournal { * is completed immediately when it reaches the end of the "result set". Events that are * stored after the query is completed are not included in the event stream. */ - def currentEventsByTag(tag: String, offset: Long): Source[EventEnvelope, Unit] + def currentEventsByTag(tag: String, offset: Long): Source[EventEnvelope, NotUsed] } diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/CurrentPersistenceIdsQuery.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/CurrentPersistenceIdsQuery.scala index f355b3aa06..6bc802043a 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/CurrentPersistenceIdsQuery.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/CurrentPersistenceIdsQuery.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.scaladsl +import akka.NotUsed import akka.stream.scaladsl.Source /** @@ -15,6 +16,6 @@ trait CurrentPersistenceIdsQuery extends ReadJournal { * is completed immediately when it reaches the end of the "result set". Persistent * actors that are created after the query is completed are not included in the stream. */ - def currentPersistenceIds(): Source[String, Unit] + def currentPersistenceIds(): Source[String, NotUsed] } diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/EventsByPersistenceIdQuery.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/EventsByPersistenceIdQuery.scala index 1c2e473bbc..1762cd145c 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/EventsByPersistenceIdQuery.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/EventsByPersistenceIdQuery.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.scaladsl +import akka.NotUsed import akka.stream.scaladsl.Source import akka.persistence.query.EventEnvelope @@ -25,6 +26,6 @@ trait EventsByPersistenceIdQuery extends ReadJournal { * stored events is provided by [[CurrentEventsByPersistenceIdQuery#currentEventsByPersistenceId]]. */ def eventsByPersistenceId(persistenceId: String, fromSequenceNr: Long, - toSequenceNr: Long): Source[EventEnvelope, Unit] + toSequenceNr: Long): Source[EventEnvelope, NotUsed] } diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/EventsByTagQuery.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/EventsByTagQuery.scala index a15e676b20..004480e481 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/EventsByTagQuery.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/EventsByTagQuery.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.scaladsl +import akka.NotUsed import akka.stream.scaladsl.Source import akka.persistence.query.EventEnvelope @@ -34,7 +35,7 @@ trait EventsByTagQuery extends ReadJournal { * Corresponding query that is completed when it reaches the end of the currently * stored events is provided by [[CurrentEventsByTagQuery#currentEventsByTag]]. */ - def eventsByTag(tag: String, offset: Long): Source[EventEnvelope, Unit] + def eventsByTag(tag: String, offset: Long): Source[EventEnvelope, NotUsed] } diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/ReadJournal.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/ReadJournal.scala index 395454cbdf..897ff2ade6 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/ReadJournal.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/scaladsl/ReadJournal.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.query.scaladsl diff --git a/akka-persistence-query/src/test/java/akka/persistence/query/DummyJavaReadJournal.java b/akka-persistence-query/src/test/java/akka/persistence/query/DummyJavaReadJournal.java index fcab48ffd7..bfcc277860 100644 --- a/akka-persistence-query/src/test/java/akka/persistence/query/DummyJavaReadJournal.java +++ b/akka-persistence-query/src/test/java/akka/persistence/query/DummyJavaReadJournal.java @@ -1,13 +1,12 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.query; -import scala.runtime.BoxedUnit; - import java.util.Iterator; +import akka.NotUsed; import akka.persistence.query.javadsl.AllPersistenceIdsQuery; import akka.persistence.query.javadsl.ReadJournal; import akka.stream.javadsl.Source; @@ -21,7 +20,7 @@ public class DummyJavaReadJournal implements ReadJournal, AllPersistenceIdsQuery @Override - public Source allPersistenceIds() { + public Source allPersistenceIds() { return Source.fromIterator(() -> new Iterator() { private int i = 0; @Override public boolean hasNext() { return true; } diff --git a/akka-persistence-query/src/test/java/akka/persistence/query/DummyJavaReadJournalForScala.java b/akka-persistence-query/src/test/java/akka/persistence/query/DummyJavaReadJournalForScala.java index 2bfa9c66d4..329e20ab01 100644 --- a/akka-persistence-query/src/test/java/akka/persistence/query/DummyJavaReadJournalForScala.java +++ b/akka-persistence-query/src/test/java/akka/persistence/query/DummyJavaReadJournalForScala.java @@ -1,11 +1,10 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.query; -import scala.runtime.BoxedUnit; - +import akka.NotUsed; /** * Use for tests only! @@ -23,7 +22,7 @@ public class DummyJavaReadJournalForScala implements akka.persistence.query.scal } @Override - public akka.stream.scaladsl.Source allPersistenceIds() { + public akka.stream.scaladsl.Source allPersistenceIds() { return readJournal.allPersistenceIds().asScala(); } diff --git a/akka-persistence-query/src/test/java/akka/persistence/query/DummyJavaReadJournalProvider.java b/akka-persistence-query/src/test/java/akka/persistence/query/DummyJavaReadJournalProvider.java index 6bce25aa3a..d19fa5131e 100644 --- a/akka-persistence-query/src/test/java/akka/persistence/query/DummyJavaReadJournalProvider.java +++ b/akka-persistence-query/src/test/java/akka/persistence/query/DummyJavaReadJournalProvider.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query; diff --git a/akka-persistence-query/src/test/java/akka/persistence/query/PersistenceQueryTest.java b/akka-persistence-query/src/test/java/akka/persistence/query/PersistenceQueryTest.java index 2da3884560..319e8602f3 100644 --- a/akka-persistence-query/src/test/java/akka/persistence/query/PersistenceQueryTest.java +++ b/akka-persistence-query/src/test/java/akka/persistence/query/PersistenceQueryTest.java @@ -1,13 +1,14 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.query; +import akka.NotUsed; import akka.actor.ActorSystem; import akka.testkit.AkkaJUnitActorSystemResource; import org.junit.ClassRule; -import scala.runtime.BoxedUnit; + public class PersistenceQueryTest { @@ -22,6 +23,6 @@ public class PersistenceQueryTest { public void shouldExposeJavaDSLFriendlyQueryJournal() throws Exception { final DummyJavaReadJournal readJournal = PersistenceQuery.get(system).getReadJournalFor(DummyJavaReadJournal.class, "noop-journal"); - final akka.stream.javadsl.Source ids = readJournal.allPersistenceIds(); + final akka.stream.javadsl.Source ids = readJournal.allPersistenceIds(); } } diff --git a/akka-persistence-query/src/test/scala/akka/persistence/query/DummyReadJournal.scala b/akka-persistence-query/src/test/scala/akka/persistence/query/DummyReadJournal.scala index 6879530545..5be4da327b 100644 --- a/akka-persistence-query/src/test/scala/akka/persistence/query/DummyReadJournal.scala +++ b/akka-persistence-query/src/test/scala/akka/persistence/query/DummyReadJournal.scala @@ -1,9 +1,10 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.query +import akka.NotUsed import akka.stream.scaladsl.Source import com.typesafe.config.{ Config, ConfigFactory } import scala.runtime.BoxedUnit @@ -13,7 +14,7 @@ import scala.runtime.BoxedUnit * Emits infinite stream of strings (representing queried for events). */ class DummyReadJournal extends scaladsl.ReadJournal with scaladsl.AllPersistenceIdsQuery { - override def allPersistenceIds(): Source[String, Unit] = + override def allPersistenceIds(): Source[String, NotUsed] = Source.fromIterator(() ⇒ Iterator.from(0)).map(_.toString) } @@ -22,7 +23,7 @@ object DummyReadJournal { } class DummyReadJournalForJava(readJournal: DummyReadJournal) extends javadsl.ReadJournal with javadsl.AllPersistenceIdsQuery { - override def allPersistenceIds(): akka.stream.javadsl.Source[String, Unit] = + override def allPersistenceIds(): akka.stream.javadsl.Source[String, NotUsed] = readJournal.allPersistenceIds().asJava } diff --git a/akka-persistence-query/src/test/scala/akka/persistence/query/PersistenceQuerySpec.scala b/akka-persistence-query/src/test/scala/akka/persistence/query/PersistenceQuerySpec.scala index be0714963e..6bc99e70c5 100644 --- a/akka-persistence-query/src/test/scala/akka/persistence/query/PersistenceQuerySpec.scala +++ b/akka-persistence-query/src/test/scala/akka/persistence/query/PersistenceQuerySpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.query diff --git a/akka-persistence-query/src/test/scala/akka/persistence/query/journal/leveldb/AllPersistenceIdsSpec.scala b/akka-persistence-query/src/test/scala/akka/persistence/query/journal/leveldb/AllPersistenceIdsSpec.scala index 15af8e2b79..5a53d79115 100644 --- a/akka-persistence-query/src/test/scala/akka/persistence/query/journal/leveldb/AllPersistenceIdsSpec.scala +++ b/akka-persistence-query/src/test/scala/akka/persistence/query/journal/leveldb/AllPersistenceIdsSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.journal.leveldb diff --git a/akka-persistence-query/src/test/scala/akka/persistence/query/journal/leveldb/Cleanup.scala b/akka-persistence-query/src/test/scala/akka/persistence/query/journal/leveldb/Cleanup.scala index c19ec4702a..a44a98f04c 100644 --- a/akka-persistence-query/src/test/scala/akka/persistence/query/journal/leveldb/Cleanup.scala +++ b/akka-persistence-query/src/test/scala/akka/persistence/query/journal/leveldb/Cleanup.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.journal.leveldb diff --git a/akka-persistence-query/src/test/scala/akka/persistence/query/journal/leveldb/EventsByPersistenceIdSpec.scala b/akka-persistence-query/src/test/scala/akka/persistence/query/journal/leveldb/EventsByPersistenceIdSpec.scala index 580f9c25ce..1d4b401e44 100644 --- a/akka-persistence-query/src/test/scala/akka/persistence/query/journal/leveldb/EventsByPersistenceIdSpec.scala +++ b/akka-persistence-query/src/test/scala/akka/persistence/query/journal/leveldb/EventsByPersistenceIdSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.journal.leveldb diff --git a/akka-persistence-query/src/test/scala/akka/persistence/query/journal/leveldb/EventsByTagSpec.scala b/akka-persistence-query/src/test/scala/akka/persistence/query/journal/leveldb/EventsByTagSpec.scala index 278785c6b9..ad27996dbf 100644 --- a/akka-persistence-query/src/test/scala/akka/persistence/query/journal/leveldb/EventsByTagSpec.scala +++ b/akka-persistence-query/src/test/scala/akka/persistence/query/journal/leveldb/EventsByTagSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.journal.leveldb diff --git a/akka-persistence-query/src/test/scala/akka/persistence/query/journal/leveldb/TestActor.scala b/akka-persistence-query/src/test/scala/akka/persistence/query/journal/leveldb/TestActor.scala index 5eec6123aa..90400fbbf7 100644 --- a/akka-persistence-query/src/test/scala/akka/persistence/query/journal/leveldb/TestActor.scala +++ b/akka-persistence-query/src/test/scala/akka/persistence/query/journal/leveldb/TestActor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.query.journal.leveldb diff --git a/akka-persistence-tck/src/main/scala/akka/persistence/CapabilityFlags.scala b/akka-persistence-tck/src/main/scala/akka/persistence/CapabilityFlags.scala index 44bfae0209..bbaf113418 100644 --- a/akka-persistence-tck/src/main/scala/akka/persistence/CapabilityFlags.scala +++ b/akka-persistence-tck/src/main/scala/akka/persistence/CapabilityFlags.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence-tck/src/main/scala/akka/persistence/PluginSpec.scala b/akka-persistence-tck/src/main/scala/akka/persistence/PluginSpec.scala index cbca14fc65..1c87cf7937 100644 --- a/akka-persistence-tck/src/main/scala/akka/persistence/PluginSpec.scala +++ b/akka-persistence-tck/src/main/scala/akka/persistence/PluginSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence-tck/src/main/scala/akka/persistence/japi/journal/JavaJournalPerfSpec.scala b/akka-persistence-tck/src/main/scala/akka/persistence/japi/journal/JavaJournalPerfSpec.scala index 2536442df9..6f96704f42 100644 --- a/akka-persistence-tck/src/main/scala/akka/persistence/japi/journal/JavaJournalPerfSpec.scala +++ b/akka-persistence-tck/src/main/scala/akka/persistence/japi/journal/JavaJournalPerfSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.persistence.japi.journal diff --git a/akka-persistence-tck/src/main/scala/akka/persistence/japi/journal/JavaJournalSpec.scala b/akka-persistence-tck/src/main/scala/akka/persistence/japi/journal/JavaJournalSpec.scala index 7f400109f4..e1b77da97e 100644 --- a/akka-persistence-tck/src/main/scala/akka/persistence/japi/journal/JavaJournalSpec.scala +++ b/akka-persistence-tck/src/main/scala/akka/persistence/japi/journal/JavaJournalSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.persistence.japi.journal diff --git a/akka-persistence-tck/src/main/scala/akka/persistence/japi/snapshot/JavaSnapshotStoreSpec.scala b/akka-persistence-tck/src/main/scala/akka/persistence/japi/snapshot/JavaSnapshotStoreSpec.scala index 3ebe07be19..0d750062b4 100644 --- a/akka-persistence-tck/src/main/scala/akka/persistence/japi/snapshot/JavaSnapshotStoreSpec.scala +++ b/akka-persistence-tck/src/main/scala/akka/persistence/japi/snapshot/JavaSnapshotStoreSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.persistence.japi.snapshot diff --git a/akka-persistence-tck/src/main/scala/akka/persistence/journal/JournalPerfSpec.scala b/akka-persistence-tck/src/main/scala/akka/persistence/journal/JournalPerfSpec.scala index a830bf48d1..85574184fb 100644 --- a/akka-persistence-tck/src/main/scala/akka/persistence/journal/JournalPerfSpec.scala +++ b/akka-persistence-tck/src/main/scala/akka/persistence/journal/JournalPerfSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.persistence.journal diff --git a/akka-persistence-tck/src/main/scala/akka/persistence/journal/JournalSpec.scala b/akka-persistence-tck/src/main/scala/akka/persistence/journal/JournalSpec.scala index 68f1917b21..91edb93763 100644 --- a/akka-persistence-tck/src/main/scala/akka/persistence/journal/JournalSpec.scala +++ b/akka-persistence-tck/src/main/scala/akka/persistence/journal/JournalSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.journal diff --git a/akka-persistence-tck/src/main/scala/akka/persistence/scalatest/MayVerb.scala b/akka-persistence-tck/src/main/scala/akka/persistence/scalatest/MayVerb.scala index ffceeff042..da8ca45fb2 100644 --- a/akka-persistence-tck/src/main/scala/akka/persistence/scalatest/MayVerb.scala +++ b/akka-persistence-tck/src/main/scala/akka/persistence/scalatest/MayVerb.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.persistence.scalatest diff --git a/akka-persistence-tck/src/main/scala/akka/persistence/scalatest/OptionalTests.scala b/akka-persistence-tck/src/main/scala/akka/persistence/scalatest/OptionalTests.scala index a1d7ab4f3d..6f1d7a4053 100644 --- a/akka-persistence-tck/src/main/scala/akka/persistence/scalatest/OptionalTests.scala +++ b/akka-persistence-tck/src/main/scala/akka/persistence/scalatest/OptionalTests.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.scalatest diff --git a/akka-persistence-tck/src/main/scala/akka/persistence/snapshot/SnapshotStoreSpec.scala b/akka-persistence-tck/src/main/scala/akka/persistence/snapshot/SnapshotStoreSpec.scala index 4bd7d3674d..b981213a87 100644 --- a/akka-persistence-tck/src/main/scala/akka/persistence/snapshot/SnapshotStoreSpec.scala +++ b/akka-persistence-tck/src/main/scala/akka/persistence/snapshot/SnapshotStoreSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.snapshot diff --git a/akka-persistence-tck/src/test/scala/akka/persistence/japi/JavaJournalSpecSpec.scala b/akka-persistence-tck/src/test/scala/akka/persistence/japi/JavaJournalSpecSpec.scala index febf085bf5..b65760dd88 100644 --- a/akka-persistence-tck/src/test/scala/akka/persistence/japi/JavaJournalSpecSpec.scala +++ b/akka-persistence-tck/src/test/scala/akka/persistence/japi/JavaJournalSpecSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.persistence.japi diff --git a/akka-persistence-tck/src/test/scala/akka/persistence/journal/leveldb/LeveldbJournalNativePerfSpec.scala b/akka-persistence-tck/src/test/scala/akka/persistence/journal/leveldb/LeveldbJournalNativePerfSpec.scala index db112f9e64..fe6fa4ce10 100644 --- a/akka-persistence-tck/src/test/scala/akka/persistence/journal/leveldb/LeveldbJournalNativePerfSpec.scala +++ b/akka-persistence-tck/src/test/scala/akka/persistence/journal/leveldb/LeveldbJournalNativePerfSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.persistence.journal.leveldb diff --git a/akka-persistence/src/main/java/akka/persistence/fsm/japi/pf/FSMStateFunctionBuilder.java b/akka-persistence/src/main/java/akka/persistence/fsm/japi/pf/FSMStateFunctionBuilder.java index b3e3b94e4f..518535ff7f 100644 --- a/akka-persistence/src/main/java/akka/persistence/fsm/japi/pf/FSMStateFunctionBuilder.java +++ b/akka-persistence/src/main/java/akka/persistence/fsm/japi/pf/FSMStateFunctionBuilder.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.fsm.japi.pf; diff --git a/akka-persistence/src/main/java/akka/persistence/fsm/japi/pf/FSMStopBuilder.java b/akka-persistence/src/main/java/akka/persistence/fsm/japi/pf/FSMStopBuilder.java index d5a8cd1dae..4c4be75066 100644 --- a/akka-persistence/src/main/java/akka/persistence/fsm/japi/pf/FSMStopBuilder.java +++ b/akka-persistence/src/main/java/akka/persistence/fsm/japi/pf/FSMStopBuilder.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.fsm.japi.pf; diff --git a/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncRecoveryPlugin.java b/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncRecoveryPlugin.java index 553dd47af2..c78478cb23 100644 --- a/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncRecoveryPlugin.java +++ b/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncRecoveryPlugin.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.journal.japi; diff --git a/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncWritePlugin.java b/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncWritePlugin.java index 8e1c4822b9..e378d480c5 100644 --- a/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncWritePlugin.java +++ b/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncWritePlugin.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.journal.japi; diff --git a/akka-persistence/src/main/java/akka/persistence/snapshot/japi/SnapshotStorePlugin.java b/akka-persistence/src/main/java/akka/persistence/snapshot/japi/SnapshotStorePlugin.java index ec7d8895ae..9e89527f36 100644 --- a/akka-persistence/src/main/java/akka/persistence/snapshot/japi/SnapshotStorePlugin.java +++ b/akka-persistence/src/main/java/akka/persistence/snapshot/japi/SnapshotStorePlugin.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.snapshot.japi; diff --git a/akka-persistence/src/main/scala/akka/persistence/AtLeastOnceDelivery.scala b/akka-persistence/src/main/scala/akka/persistence/AtLeastOnceDelivery.scala index 5ab6032f01..b1f19164bd 100644 --- a/akka-persistence/src/main/scala/akka/persistence/AtLeastOnceDelivery.scala +++ b/akka-persistence/src/main/scala/akka/persistence/AtLeastOnceDelivery.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/main/scala/akka/persistence/Eventsourced.scala b/akka-persistence/src/main/scala/akka/persistence/Eventsourced.scala index 1e165c4ddb..754f7ef578 100644 --- a/akka-persistence/src/main/scala/akka/persistence/Eventsourced.scala +++ b/akka-persistence/src/main/scala/akka/persistence/Eventsourced.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/main/scala/akka/persistence/JournalProtocol.scala b/akka-persistence/src/main/scala/akka/persistence/JournalProtocol.scala index 6f2b12b7af..8145b7afde 100644 --- a/akka-persistence/src/main/scala/akka/persistence/JournalProtocol.scala +++ b/akka-persistence/src/main/scala/akka/persistence/JournalProtocol.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/main/scala/akka/persistence/Persistence.scala b/akka-persistence/src/main/scala/akka/persistence/Persistence.scala index b88c5ef320..e4a9f01f56 100644 --- a/akka-persistence/src/main/scala/akka/persistence/Persistence.scala +++ b/akka-persistence/src/main/scala/akka/persistence/Persistence.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/main/scala/akka/persistence/Persistent.scala b/akka-persistence/src/main/scala/akka/persistence/Persistent.scala index d7e1d39cdb..82b51b09ec 100644 --- a/akka-persistence/src/main/scala/akka/persistence/Persistent.scala +++ b/akka-persistence/src/main/scala/akka/persistence/Persistent.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/main/scala/akka/persistence/PersistentActor.scala b/akka-persistence/src/main/scala/akka/persistence/PersistentActor.scala index 337fe0c40a..b7d78d4921 100644 --- a/akka-persistence/src/main/scala/akka/persistence/PersistentActor.scala +++ b/akka-persistence/src/main/scala/akka/persistence/PersistentActor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/main/scala/akka/persistence/PersistentView.scala b/akka-persistence/src/main/scala/akka/persistence/PersistentView.scala index b0d48ea3b2..75ed99c3cc 100644 --- a/akka-persistence/src/main/scala/akka/persistence/PersistentView.scala +++ b/akka-persistence/src/main/scala/akka/persistence/PersistentView.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/main/scala/akka/persistence/Protocol.scala b/akka-persistence/src/main/scala/akka/persistence/Protocol.scala index 4baa08f772..2895900394 100644 --- a/akka-persistence/src/main/scala/akka/persistence/Protocol.scala +++ b/akka-persistence/src/main/scala/akka/persistence/Protocol.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/main/scala/akka/persistence/SnapshotProtocol.scala b/akka-persistence/src/main/scala/akka/persistence/SnapshotProtocol.scala index a2682610e6..fe627ed221 100644 --- a/akka-persistence/src/main/scala/akka/persistence/SnapshotProtocol.scala +++ b/akka-persistence/src/main/scala/akka/persistence/SnapshotProtocol.scala @@ -1,6 +1,6 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. - * Copyright (C) 2012-2013 Eligotech BV. + * Copyright (C) 2009-2016 Typesafe Inc. + * Copyright (C) 2012-2016 Eligotech BV. */ package akka.persistence diff --git a/akka-persistence/src/main/scala/akka/persistence/Snapshotter.scala b/akka-persistence/src/main/scala/akka/persistence/Snapshotter.scala index 31b9312723..93339d4e80 100644 --- a/akka-persistence/src/main/scala/akka/persistence/Snapshotter.scala +++ b/akka-persistence/src/main/scala/akka/persistence/Snapshotter.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/main/scala/akka/persistence/fsm/PersistentFSM.scala b/akka-persistence/src/main/scala/akka/persistence/fsm/PersistentFSM.scala index 5c3fa7d747..b6914e5cc1 100644 --- a/akka-persistence/src/main/scala/akka/persistence/fsm/PersistentFSM.scala +++ b/akka-persistence/src/main/scala/akka/persistence/fsm/PersistentFSM.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.fsm diff --git a/akka-persistence/src/main/scala/akka/persistence/fsm/PersistentFSMBase.scala b/akka-persistence/src/main/scala/akka/persistence/fsm/PersistentFSMBase.scala index 2bb7df4b04..05653d979a 100644 --- a/akka-persistence/src/main/scala/akka/persistence/fsm/PersistentFSMBase.scala +++ b/akka-persistence/src/main/scala/akka/persistence/fsm/PersistentFSMBase.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.fsm diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncRecovery.scala b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncRecovery.scala index fa4d4d7f77..9648f7b6e5 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncRecovery.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncRecovery.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.journal diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteJournal.scala b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteJournal.scala index 4b76d8acb7..745cae10cf 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteJournal.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteJournal.scala @@ -1,6 +1,6 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. - * Copyright (C) 2012-2013 Eligotech BV. + * Copyright (C) 2009-2016 Typesafe Inc. + * Copyright (C) 2012-2016 Eligotech BV. */ package akka.persistence.journal @@ -47,7 +47,6 @@ trait AsyncWriteJournal extends Actor with WriteJournalBase with AsyncRecovery { private def isReplayFilterEnabled: Boolean = replayFilterMode != ReplayFilter.Disabled private val replayFilterWindowSize: Int = config.getInt("replay-filter.window-size") private val replayFilterMaxOldWriters: Int = config.getInt("replay-filter.max-old-writers") - private val replayDebugEnabled: Boolean = config.getBoolean("replay-filter.debug") private val resequencer = context.actorOf(Props[Resequencer]()) private var resequencerCounter = 1L @@ -55,109 +54,114 @@ trait AsyncWriteJournal extends Actor with WriteJournalBase with AsyncRecovery { final def receive = receiveWriteJournal.orElse[Any, Unit](receivePluginInternal) final val receiveWriteJournal: Actor.Receive = { - case WriteMessages(messages, persistentActor, actorInstanceId) ⇒ - val cctr = resequencerCounter - resequencerCounter += messages.foldLeft(0)((acc, m) ⇒ acc + m.size) + 1 + // cannot be a val in the trait due to binary compatibility + val replayDebugEnabled: Boolean = config.getBoolean("replay-filter.debug") - val atomicWriteCount = messages.count(_.isInstanceOf[AtomicWrite]) - val prepared = Try(preparePersistentBatch(messages)) - val writeResult = (prepared match { - case Success(prep) ⇒ - // try in case the asyncWriteMessages throws - try breaker.withCircuitBreaker(asyncWriteMessages(prep)) - catch { case NonFatal(e) ⇒ Future.failed(e) } - case f @ Failure(_) ⇒ - // exception from preparePersistentBatch => rejected - Future.successful(messages.collect { case a: AtomicWrite ⇒ f }) - }).map { results ⇒ - if (results.nonEmpty && results.size != atomicWriteCount) - throw new IllegalStateException("asyncWriteMessages returned invalid number of results. " + - s"Expected [${prepared.get.size}], but got [${results.size}]") - results - } + { + case WriteMessages(messages, persistentActor, actorInstanceId) ⇒ + val cctr = resequencerCounter + resequencerCounter += messages.foldLeft(0)((acc, m) ⇒ acc + m.size) + 1 - writeResult.onComplete { - case Success(results) ⇒ - resequencer ! Desequenced(WriteMessagesSuccessful, cctr, persistentActor, self) - - val resultsIter = - if (results.isEmpty) Iterator.fill(atomicWriteCount)(AsyncWriteJournal.successUnit) - else results.iterator - var n = cctr + 1 - messages.foreach { - case a: AtomicWrite ⇒ - resultsIter.next() match { - case Success(_) ⇒ - a.payload.foreach { p ⇒ - resequencer ! Desequenced(WriteMessageSuccess(p, actorInstanceId), n, persistentActor, p.sender) - n += 1 - } - case Failure(e) ⇒ - a.payload.foreach { p ⇒ - resequencer ! Desequenced(WriteMessageRejected(p, e, actorInstanceId), n, persistentActor, p.sender) - n += 1 - } - } - - case r: NonPersistentRepr ⇒ - resequencer ! Desequenced(LoopMessageSuccess(r.payload, actorInstanceId), n, persistentActor, r.sender) - n += 1 - } - - case Failure(e) ⇒ - resequencer ! Desequenced(WriteMessagesFailed(e), cctr, persistentActor, self) - var n = cctr + 1 - messages.foreach { - case a: AtomicWrite ⇒ - a.payload.foreach { p ⇒ - resequencer ! Desequenced(WriteMessageFailure(p, e, actorInstanceId), n, persistentActor, p.sender) - n += 1 - } - case r: NonPersistentRepr ⇒ - resequencer ! Desequenced(LoopMessageSuccess(r.payload, actorInstanceId), n, persistentActor, r.sender) - n += 1 - } - } - - case r @ ReplayMessages(fromSequenceNr, toSequenceNr, max, persistenceId, persistentActor) ⇒ - val replyTo = - if (isReplayFilterEnabled) context.actorOf(ReplayFilter.props(persistentActor, replayFilterMode, - replayFilterWindowSize, replayFilterMaxOldWriters, replayDebugEnabled)) - else persistentActor - - val readHighestSequenceNrFrom = math.max(0L, fromSequenceNr - 1) - breaker.withCircuitBreaker(asyncReadHighestSequenceNr(persistenceId, readHighestSequenceNrFrom)) - .flatMap { highSeqNr ⇒ - val toSeqNr = math.min(toSequenceNr, highSeqNr) - if (highSeqNr == 0L || fromSequenceNr > toSeqNr) - Future.successful(highSeqNr) - else { - // Send replayed messages and replay result to persistentActor directly. No need - // to resequence replayed messages relative to written and looped messages. - // not possible to use circuit breaker here - asyncReplayMessages(persistenceId, fromSequenceNr, toSeqNr, max) { p ⇒ - if (!p.deleted) // old records from 2.3 may still have the deleted flag - adaptFromJournal(p).foreach { adaptedPersistentRepr ⇒ - replyTo.tell(ReplayedMessage(adaptedPersistentRepr), Actor.noSender) - } - }.map(_ ⇒ highSeqNr) - } - }.map { - highSeqNr ⇒ RecoverySuccess(highSeqNr) - }.recover { - case e ⇒ ReplayMessagesFailure(e) - }.pipeTo(replyTo).onSuccess { - case _ ⇒ if (publish) context.system.eventStream.publish(r) + val atomicWriteCount = messages.count(_.isInstanceOf[AtomicWrite]) + val prepared = Try(preparePersistentBatch(messages)) + val writeResult = (prepared match { + case Success(prep) ⇒ + // try in case the asyncWriteMessages throws + try breaker.withCircuitBreaker(asyncWriteMessages(prep)) + catch { case NonFatal(e) ⇒ Future.failed(e) } + case f @ Failure(_) ⇒ + // exception from preparePersistentBatch => rejected + Future.successful(messages.collect { case a: AtomicWrite ⇒ f }) + }).map { results ⇒ + if (results.nonEmpty && results.size != atomicWriteCount) + throw new IllegalStateException("asyncWriteMessages returned invalid number of results. " + + s"Expected [${prepared.get.size}], but got [${results.size}]") + results } - case d @ DeleteMessagesTo(persistenceId, toSequenceNr, persistentActor) ⇒ - breaker.withCircuitBreaker(asyncDeleteMessagesTo(persistenceId, toSequenceNr)) map { - case _ ⇒ DeleteMessagesSuccess(toSequenceNr) - } recover { - case e ⇒ DeleteMessagesFailure(e, toSequenceNr) - } pipeTo persistentActor onComplete { - case _ ⇒ if (publish) context.system.eventStream.publish(d) - } + writeResult.onComplete { + case Success(results) ⇒ + resequencer ! Desequenced(WriteMessagesSuccessful, cctr, persistentActor, self) + + val resultsIter = + if (results.isEmpty) Iterator.fill(atomicWriteCount)(AsyncWriteJournal.successUnit) + else results.iterator + var n = cctr + 1 + messages.foreach { + case a: AtomicWrite ⇒ + resultsIter.next() match { + case Success(_) ⇒ + a.payload.foreach { p ⇒ + resequencer ! Desequenced(WriteMessageSuccess(p, actorInstanceId), n, persistentActor, p.sender) + n += 1 + } + case Failure(e) ⇒ + a.payload.foreach { p ⇒ + resequencer ! Desequenced(WriteMessageRejected(p, e, actorInstanceId), n, persistentActor, p.sender) + n += 1 + } + } + + case r: NonPersistentRepr ⇒ + resequencer ! Desequenced(LoopMessageSuccess(r.payload, actorInstanceId), n, persistentActor, r.sender) + n += 1 + } + + case Failure(e) ⇒ + resequencer ! Desequenced(WriteMessagesFailed(e), cctr, persistentActor, self) + var n = cctr + 1 + messages.foreach { + case a: AtomicWrite ⇒ + a.payload.foreach { p ⇒ + resequencer ! Desequenced(WriteMessageFailure(p, e, actorInstanceId), n, persistentActor, p.sender) + n += 1 + } + case r: NonPersistentRepr ⇒ + resequencer ! Desequenced(LoopMessageSuccess(r.payload, actorInstanceId), n, persistentActor, r.sender) + n += 1 + } + } + + case r @ ReplayMessages(fromSequenceNr, toSequenceNr, max, persistenceId, persistentActor) ⇒ + val replyTo = + if (isReplayFilterEnabled) context.actorOf(ReplayFilter.props(persistentActor, replayFilterMode, + replayFilterWindowSize, replayFilterMaxOldWriters, replayDebugEnabled)) + else persistentActor + + val readHighestSequenceNrFrom = math.max(0L, fromSequenceNr - 1) + breaker.withCircuitBreaker(asyncReadHighestSequenceNr(persistenceId, readHighestSequenceNrFrom)) + .flatMap { highSeqNr ⇒ + val toSeqNr = math.min(toSequenceNr, highSeqNr) + if (highSeqNr == 0L || fromSequenceNr > toSeqNr) + Future.successful(highSeqNr) + else { + // Send replayed messages and replay result to persistentActor directly. No need + // to resequence replayed messages relative to written and looped messages. + // not possible to use circuit breaker here + asyncReplayMessages(persistenceId, fromSequenceNr, toSeqNr, max) { p ⇒ + if (!p.deleted) // old records from 2.3 may still have the deleted flag + adaptFromJournal(p).foreach { adaptedPersistentRepr ⇒ + replyTo.tell(ReplayedMessage(adaptedPersistentRepr), Actor.noSender) + } + }.map(_ ⇒ highSeqNr) + } + }.map { + highSeqNr ⇒ RecoverySuccess(highSeqNr) + }.recover { + case e ⇒ ReplayMessagesFailure(e) + }.pipeTo(replyTo).onSuccess { + case _ ⇒ if (publish) context.system.eventStream.publish(r) + } + + case d @ DeleteMessagesTo(persistenceId, toSequenceNr, persistentActor) ⇒ + breaker.withCircuitBreaker(asyncDeleteMessagesTo(persistenceId, toSequenceNr)) map { + case _ ⇒ DeleteMessagesSuccess(toSequenceNr) + } recover { + case e ⇒ DeleteMessagesFailure(e, toSequenceNr) + } pipeTo persistentActor onComplete { + case _ ⇒ if (publish) context.system.eventStream.publish(d) + } + } } //#journal-plugin-api @@ -274,4 +278,3 @@ private[persistence] object AsyncWriteJournal { } } } - diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteProxy.scala b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteProxy.scala index c0e8232278..3f1f2731ad 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteProxy.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteProxy.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.journal diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/EventAdapter.scala b/akka-persistence/src/main/scala/akka/persistence/journal/EventAdapter.scala index bdbf36457c..15044fe8ef 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/EventAdapter.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/EventAdapter.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.journal diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/EventAdapters.scala b/akka-persistence/src/main/scala/akka/persistence/journal/EventAdapters.scala index 3d5183d160..5289ca9cb3 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/EventAdapters.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/EventAdapters.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.journal diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/PersistencePluginProxy.scala b/akka-persistence/src/main/scala/akka/persistence/journal/PersistencePluginProxy.scala index bba14ccf7b..eb85dda15d 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/PersistencePluginProxy.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/PersistencePluginProxy.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.journal diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/ReplayFilter.scala b/akka-persistence/src/main/scala/akka/persistence/journal/ReplayFilter.scala index b32e990f31..f316a9e4d9 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/ReplayFilter.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/ReplayFilter.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.journal @@ -30,6 +30,13 @@ private[akka] object ReplayFilter { Props(new ReplayFilter(persistentActor, mode, windowSize, maxOldWriters, debugEnabled)) } + // for binary compatibility + def props( + persistentActor: ActorRef, + mode: Mode, + windowSize: Int, + maxOldWriters: Int): Props = props(persistentActor, mode, windowSize, maxOldWriters, debugEnabled = false) + sealed trait Mode case object Fail extends Mode case object Warn extends Mode @@ -46,6 +53,10 @@ private[akka] class ReplayFilter(persistentActor: ActorRef, mode: ReplayFilter.M import JournalProtocol._ import ReplayFilter.{ Warn, Fail, RepairByDiscardOld, Disabled } + // for binary compatibility + def this(persistentActor: ActorRef, mode: ReplayFilter.Mode, + windowSize: Int, maxOldWriters: Int) = this(persistentActor, mode, windowSize, maxOldWriters, debugEnabled = false) + val buffer = new LinkedList[ReplayedMessage]() val oldWriters = LinkedHashSet.empty[String] var writerUuid = "" diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/Tagged.scala b/akka-persistence/src/main/scala/akka/persistence/journal/Tagged.scala index 951cb427d0..0cc31b032f 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/Tagged.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/Tagged.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.journal diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/WriteJournalBase.scala b/akka-persistence/src/main/scala/akka/persistence/journal/WriteJournalBase.scala index acc132995e..2daeba6305 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/WriteJournalBase.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/WriteJournalBase.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.persistence.journal diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/inmem/InmemJournal.scala b/akka-persistence/src/main/scala/akka/persistence/journal/inmem/InmemJournal.scala index 3339af6052..47f54cb4cf 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/inmem/InmemJournal.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/inmem/InmemJournal.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.journal.inmem diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncRecovery.scala b/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncRecovery.scala index 6ac0428bc2..0c45c03479 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncRecovery.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncRecovery.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.journal.japi diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncWriteJournal.scala b/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncWriteJournal.scala index f0507fab0f..ad90cc7d7e 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncWriteJournal.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncWriteJournal.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.journal.japi diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbIdMapping.scala b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbIdMapping.scala index fd6237ac2a..95fb027e68 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbIdMapping.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbIdMapping.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.journal.leveldb diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbJournal.scala b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbJournal.scala index 46322d48d3..f811d17c99 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbJournal.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbJournal.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.journal.leveldb diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbKey.scala b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbKey.scala index 1f46851d95..7f3011fde3 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbKey.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbKey.scala @@ -1,6 +1,6 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. - * Copyright (C) 2012-2013 Eligotech BV. + * Copyright (C) 2009-2016 Typesafe Inc. + * Copyright (C) 2012-2016 Eligotech BV. */ package akka.persistence.journal.leveldb diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbRecovery.scala b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbRecovery.scala index f7e85259fd..12871fc09e 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbRecovery.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbRecovery.scala @@ -1,6 +1,6 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. - * Copyright (C) 2012-2013 Eligotech BV. + * Copyright (C) 2009-2016 Typesafe Inc. + * Copyright (C) 2012-2016 Eligotech BV. */ package akka.persistence.journal.leveldb diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbStore.scala b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbStore.scala index 5d0fd4244e..83d1e148b2 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbStore.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbStore.scala @@ -1,6 +1,6 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. - * Copyright (C) 2012-2013 Eligotech BV. + * Copyright (C) 2009-2016 Typesafe Inc. + * Copyright (C) 2012-2016 Eligotech BV. */ package akka.persistence.journal.leveldb diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/SharedLeveldbStore.scala b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/SharedLeveldbStore.scala index ba8fc72cb3..edba10e7d8 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/SharedLeveldbStore.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/SharedLeveldbStore.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.journal.leveldb diff --git a/akka-persistence/src/main/scala/akka/persistence/serialization/MessageSerializer.scala b/akka-persistence/src/main/scala/akka/persistence/serialization/MessageSerializer.scala index e43c075373..d4fd546b02 100644 --- a/akka-persistence/src/main/scala/akka/persistence/serialization/MessageSerializer.scala +++ b/akka-persistence/src/main/scala/akka/persistence/serialization/MessageSerializer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.serialization diff --git a/akka-persistence/src/main/scala/akka/persistence/serialization/SnapshotSerializer.scala b/akka-persistence/src/main/scala/akka/persistence/serialization/SnapshotSerializer.scala index ffda65e83f..6bc2755e1e 100644 --- a/akka-persistence/src/main/scala/akka/persistence/serialization/SnapshotSerializer.scala +++ b/akka-persistence/src/main/scala/akka/persistence/serialization/SnapshotSerializer.scala @@ -1,6 +1,6 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. - * Copyright (C) 2012-2013 Eligotech BV. + * Copyright (C) 2009-2016 Typesafe Inc. + * Copyright (C) 2012-2016 Eligotech BV. */ package akka.persistence.serialization diff --git a/akka-persistence/src/main/scala/akka/persistence/serialization/package.scala b/akka-persistence/src/main/scala/akka/persistence/serialization/package.scala index 6a55f682b5..e563a8eefc 100644 --- a/akka-persistence/src/main/scala/akka/persistence/serialization/package.scala +++ b/akka-persistence/src/main/scala/akka/persistence/serialization/package.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/main/scala/akka/persistence/snapshot/NoSnapshotStore.scala b/akka-persistence/src/main/scala/akka/persistence/snapshot/NoSnapshotStore.scala index efba7d08ce..bb21fd8ffe 100644 --- a/akka-persistence/src/main/scala/akka/persistence/snapshot/NoSnapshotStore.scala +++ b/akka-persistence/src/main/scala/akka/persistence/snapshot/NoSnapshotStore.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.snapshot diff --git a/akka-persistence/src/main/scala/akka/persistence/snapshot/SnapshotStore.scala b/akka-persistence/src/main/scala/akka/persistence/snapshot/SnapshotStore.scala index 53a3bd5afe..082364d755 100644 --- a/akka-persistence/src/main/scala/akka/persistence/snapshot/SnapshotStore.scala +++ b/akka-persistence/src/main/scala/akka/persistence/snapshot/SnapshotStore.scala @@ -1,6 +1,6 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. - * Copyright (C) 2012-2013 Eligotech BV. + * Copyright (C) 2009-2016 Typesafe Inc. + * Copyright (C) 2012-2016 Eligotech BV. */ package akka.persistence.snapshot diff --git a/akka-persistence/src/main/scala/akka/persistence/snapshot/japi/SnapshotStore.scala b/akka-persistence/src/main/scala/akka/persistence/snapshot/japi/SnapshotStore.scala index 534423ca84..cd12c09a5e 100644 --- a/akka-persistence/src/main/scala/akka/persistence/snapshot/japi/SnapshotStore.scala +++ b/akka-persistence/src/main/scala/akka/persistence/snapshot/japi/SnapshotStore.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.snapshot.japi diff --git a/akka-persistence/src/main/scala/akka/persistence/snapshot/local/LocalSnapshotStore.scala b/akka-persistence/src/main/scala/akka/persistence/snapshot/local/LocalSnapshotStore.scala index 59b1eaa2fd..d0a00fb51d 100644 --- a/akka-persistence/src/main/scala/akka/persistence/snapshot/local/LocalSnapshotStore.scala +++ b/akka-persistence/src/main/scala/akka/persistence/snapshot/local/LocalSnapshotStore.scala @@ -1,6 +1,6 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. - * Copyright (C) 2012-2013 Eligotech BV. + * Copyright (C) 2009-2016 Typesafe Inc. + * Copyright (C) 2012-2016 Eligotech BV. */ package akka.persistence.snapshot.local diff --git a/akka-persistence/src/test/java/akka/persistence/fsm/AbstractPersistentFSMTest.java b/akka-persistence/src/test/java/akka/persistence/fsm/AbstractPersistentFSMTest.java index d660cc79ab..598edda8bd 100644 --- a/akka-persistence/src/test/java/akka/persistence/fsm/AbstractPersistentFSMTest.java +++ b/akka-persistence/src/test/java/akka/persistence/fsm/AbstractPersistentFSMTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.fsm; diff --git a/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliveryFailureSpec.scala b/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliveryFailureSpec.scala index 668e7a1a0a..84f2f2e0ec 100644 --- a/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliveryFailureSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliveryFailureSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliverySpec.scala b/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliverySpec.scala index 79c699486a..0334897e51 100644 --- a/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliverySpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliverySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/test/scala/akka/persistence/AtomicWriteSpec.scala b/akka-persistence/src/test/scala/akka/persistence/AtomicWriteSpec.scala index d13b49adf6..0705d30cc0 100644 --- a/akka-persistence/src/test/scala/akka/persistence/AtomicWriteSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/AtomicWriteSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/test/scala/akka/persistence/EndToEndEventAdapterSpec.scala b/akka-persistence/src/test/scala/akka/persistence/EndToEndEventAdapterSpec.scala index 7b9d0f64e6..bd39db7a3b 100644 --- a/akka-persistence/src/test/scala/akka/persistence/EndToEndEventAdapterSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/EndToEndEventAdapterSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/test/scala/akka/persistence/EventAdapterSpec.scala b/akka-persistence/src/test/scala/akka/persistence/EventAdapterSpec.scala index 9595461d9d..ed737d86d1 100644 --- a/akka-persistence/src/test/scala/akka/persistence/EventAdapterSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/EventAdapterSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/test/scala/akka/persistence/LoadPluginSpec.scala b/akka-persistence/src/test/scala/akka/persistence/LoadPluginSpec.scala index 34e6e2fc58..b772b75feb 100644 --- a/akka-persistence/src/test/scala/akka/persistence/LoadPluginSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/LoadPluginSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/test/scala/akka/persistence/OptionalSnapshotStoreSpec.scala b/akka-persistence/src/test/scala/akka/persistence/OptionalSnapshotStoreSpec.scala index 776525d00e..6b701c0b94 100644 --- a/akka-persistence/src/test/scala/akka/persistence/OptionalSnapshotStoreSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/OptionalSnapshotStoreSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/test/scala/akka/persistence/PerformanceSpec.scala b/akka-persistence/src/test/scala/akka/persistence/PerformanceSpec.scala index b927a4193a..c27a90b22e 100644 --- a/akka-persistence/src/test/scala/akka/persistence/PerformanceSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/PerformanceSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/test/scala/akka/persistence/PersistenceSpec.scala b/akka-persistence/src/test/scala/akka/persistence/PersistenceSpec.scala index fe6641daa8..1efbe4be0d 100644 --- a/akka-persistence/src/test/scala/akka/persistence/PersistenceSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/PersistenceSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/test/scala/akka/persistence/PersistentActorDeleteFailureSpec.scala b/akka-persistence/src/test/scala/akka/persistence/PersistentActorDeleteFailureSpec.scala index c4088ea80e..10328429c1 100644 --- a/akka-persistence/src/test/scala/akka/persistence/PersistentActorDeleteFailureSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/PersistentActorDeleteFailureSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/test/scala/akka/persistence/PersistentActorFailureSpec.scala b/akka-persistence/src/test/scala/akka/persistence/PersistentActorFailureSpec.scala index 0411fb7436..4319464fc6 100644 --- a/akka-persistence/src/test/scala/akka/persistence/PersistentActorFailureSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/PersistentActorFailureSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/test/scala/akka/persistence/PersistentActorSpec.scala b/akka-persistence/src/test/scala/akka/persistence/PersistentActorSpec.scala index 384c1966e2..cc5663816d 100644 --- a/akka-persistence/src/test/scala/akka/persistence/PersistentActorSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/PersistentActorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/test/scala/akka/persistence/PersistentActorStashingSpec.scala b/akka-persistence/src/test/scala/akka/persistence/PersistentActorStashingSpec.scala index b2a3a24793..7429e4e10d 100644 --- a/akka-persistence/src/test/scala/akka/persistence/PersistentActorStashingSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/PersistentActorStashingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/test/scala/akka/persistence/PersistentViewSpec.scala b/akka-persistence/src/test/scala/akka/persistence/PersistentViewSpec.scala index a9bf5d90a0..23e7cf12a5 100644 --- a/akka-persistence/src/test/scala/akka/persistence/PersistentViewSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/PersistentViewSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/test/scala/akka/persistence/SnapshotDirectoryFailureSpec.scala b/akka-persistence/src/test/scala/akka/persistence/SnapshotDirectoryFailureSpec.scala index acc9f07899..53a8f71564 100644 --- a/akka-persistence/src/test/scala/akka/persistence/SnapshotDirectoryFailureSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/SnapshotDirectoryFailureSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/test/scala/akka/persistence/SnapshotFailureRobustnessSpec.scala b/akka-persistence/src/test/scala/akka/persistence/SnapshotFailureRobustnessSpec.scala index 4856e8de0b..5fbfcdfdee 100644 --- a/akka-persistence/src/test/scala/akka/persistence/SnapshotFailureRobustnessSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/SnapshotFailureRobustnessSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/test/scala/akka/persistence/SnapshotSerializationSpec.scala b/akka-persistence/src/test/scala/akka/persistence/SnapshotSerializationSpec.scala index 01d9a644ea..01b26a0c61 100644 --- a/akka-persistence/src/test/scala/akka/persistence/SnapshotSerializationSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/SnapshotSerializationSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/test/scala/akka/persistence/SnapshotSpec.scala b/akka-persistence/src/test/scala/akka/persistence/SnapshotSpec.scala index b6b07c0574..e65150896e 100644 --- a/akka-persistence/src/test/scala/akka/persistence/SnapshotSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/SnapshotSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence diff --git a/akka-persistence/src/test/scala/akka/persistence/fsm/PersistentFSMSpec.scala b/akka-persistence/src/test/scala/akka/persistence/fsm/PersistentFSMSpec.scala index e315752949..7d8a96d39f 100644 --- a/akka-persistence/src/test/scala/akka/persistence/fsm/PersistentFSMSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/fsm/PersistentFSMSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.fsm diff --git a/akka-persistence/src/test/scala/akka/persistence/journal/InmemEventAdaptersSpec.scala b/akka-persistence/src/test/scala/akka/persistence/journal/InmemEventAdaptersSpec.scala index 8e8e0d78b4..2b2e755e92 100644 --- a/akka-persistence/src/test/scala/akka/persistence/journal/InmemEventAdaptersSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/journal/InmemEventAdaptersSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.journal diff --git a/akka-persistence/src/test/scala/akka/persistence/journal/ReplayFilterSpec.scala b/akka-persistence/src/test/scala/akka/persistence/journal/ReplayFilterSpec.scala index 00354a009c..ced8e776b8 100644 --- a/akka-persistence/src/test/scala/akka/persistence/journal/ReplayFilterSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/journal/ReplayFilterSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.persistence.journal diff --git a/akka-persistence/src/test/scala/akka/persistence/journal/SteppingInmemJournal.scala b/akka-persistence/src/test/scala/akka/persistence/journal/SteppingInmemJournal.scala index 09c3e4fb9d..edfcb73d44 100644 --- a/akka-persistence/src/test/scala/akka/persistence/journal/SteppingInmemJournal.scala +++ b/akka-persistence/src/test/scala/akka/persistence/journal/SteppingInmemJournal.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.journal diff --git a/akka-persistence/src/test/scala/akka/persistence/journal/chaos/ChaosJournal.scala b/akka-persistence/src/test/scala/akka/persistence/journal/chaos/ChaosJournal.scala index 5a28a3b1d7..e75a0fe72d 100644 --- a/akka-persistence/src/test/scala/akka/persistence/journal/chaos/ChaosJournal.scala +++ b/akka-persistence/src/test/scala/akka/persistence/journal/chaos/ChaosJournal.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.journal.chaos diff --git a/akka-persistence/src/test/scala/akka/persistence/journal/leveldb/PersistencePluginProxySpec.scala b/akka-persistence/src/test/scala/akka/persistence/journal/leveldb/PersistencePluginProxySpec.scala index 66bfa8b96d..bb2c066de7 100644 --- a/akka-persistence/src/test/scala/akka/persistence/journal/leveldb/PersistencePluginProxySpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/journal/leveldb/PersistencePluginProxySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.journal.leveldb diff --git a/akka-persistence/src/test/scala/akka/persistence/journal/leveldb/SharedLeveldbJournalSpec.scala b/akka-persistence/src/test/scala/akka/persistence/journal/leveldb/SharedLeveldbJournalSpec.scala index e270eba6e7..bd3af988bf 100644 --- a/akka-persistence/src/test/scala/akka/persistence/journal/leveldb/SharedLeveldbJournalSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/journal/leveldb/SharedLeveldbJournalSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.journal.leveldb diff --git a/akka-persistence/src/test/scala/akka/persistence/serialization/SerializerSpec.scala b/akka-persistence/src/test/scala/akka/persistence/serialization/SerializerSpec.scala index 0d4a61dcd1..c89020e9f3 100644 --- a/akka-persistence/src/test/scala/akka/persistence/serialization/SerializerSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/serialization/SerializerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.persistence.serialization diff --git a/akka-protobuf/build.sbt b/akka-protobuf/build.sbt index 0447129b55..d304a329c0 100644 --- a/akka-protobuf/build.sbt +++ b/akka-protobuf/build.sbt @@ -1,4 +1,5 @@ import akka.{ AkkaBuild, Formatting, OSGi, Unidoc, Dependencies } + import com.typesafe.tools.mima.plugin.MimaKeys AkkaBuild.defaultSettings diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/AttemptSysMsgRedeliverySpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/AttemptSysMsgRedeliverySpec.scala index 97e42951d4..74f54da9cc 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/AttemptSysMsgRedeliverySpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/AttemptSysMsgRedeliverySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/LookupRemoteActorSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/LookupRemoteActorSpec.scala index b712c1e0e7..5749d516b1 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/LookupRemoteActorSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/LookupRemoteActorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/NewRemoteActorSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/NewRemoteActorSpec.scala index 662ac1da53..561d4d5fd3 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/NewRemoteActorSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/NewRemoteActorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote @@ -52,7 +52,7 @@ class NewRemoteActorSpec extends MultiNodeSpec(NewRemoteActorMultiJvmSpec) def initialParticipants = roles.size - // ensure that system shutdown is successful + // ensure that system.terminate is successful override def verifySystemShutdown = true "A new remote actor" must { @@ -116,7 +116,7 @@ class NewRemoteActorSpec extends MultiNodeSpec(NewRemoteActorMultiJvmSpec) enterBarrier("deployed") // master system is supposed to be shutdown after slave - // this should be triggered by slave system shutdown + // this should be triggered by slave system.terminate expectMsgPF() { case Terminated(`actor`) ⇒ true } } @@ -126,7 +126,7 @@ class NewRemoteActorSpec extends MultiNodeSpec(NewRemoteActorMultiJvmSpec) // Important that this is the last test. // It should not be any barriers here. - // verifySystemShutdown = true will ensure that system shutdown is successful + // verifySystemShutdown = true will ensure that system.terminate is successful } } } diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteDeliverySpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteDeliverySpec.scala index 3e0f9521fe..b7fe745c4b 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteDeliverySpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteDeliverySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteDeploymentDeathWatchSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteDeploymentDeathWatchSpec.scala index b148cdaf12..2515594af7 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteDeploymentDeathWatchSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteDeploymentDeathWatchSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteGatePiercingSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteGatePiercingSpec.scala index f30b6c9c74..8d4362ed35 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteGatePiercingSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteGatePiercingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteNodeDeathWatchSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteNodeDeathWatchSpec.scala index e613ec9533..507c4836e4 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteNodeDeathWatchSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteNodeDeathWatchSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteNodeRestartDeathWatchSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteNodeRestartDeathWatchSpec.scala index a990e3b48a..f6d3106a47 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteNodeRestartDeathWatchSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteNodeRestartDeathWatchSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteNodeRestartGateSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteNodeRestartGateSpec.scala index 90f96e2501..092fadc74c 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteNodeRestartGateSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteNodeRestartGateSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteNodeShutdownAndComesBackSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteNodeShutdownAndComesBackSpec.scala index 7717735d31..badb5ce9ab 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteNodeShutdownAndComesBackSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteNodeShutdownAndComesBackSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteQuarantinePiercingSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteQuarantinePiercingSpec.scala index ff8d4d11e1..c44e879a44 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteQuarantinePiercingSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteQuarantinePiercingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteReDeploymentSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteReDeploymentSpec.scala index 975d653ec0..1ac2f7c04e 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteReDeploymentSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteReDeploymentSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteRestartedQuarantinedSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteRestartedQuarantinedSpec.scala index b3523e8650..b94f300db3 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteRestartedQuarantinedSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteRestartedQuarantinedSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote @@ -45,7 +45,7 @@ object RemoteRestartedQuarantinedSpec extends MultiNodeConfig { class Subject extends Actor { def receive = { - case "shutdown" ⇒ context.system.shutdown() + case "shutdown" ⇒ context.system.terminate() case "identify" ⇒ sender() ! (AddressUidExtension(context.system).addressUid, self) } } @@ -121,7 +121,7 @@ abstract class RemoteRestartedQuarantinedSpec enterBarrier("still-quarantined") - system.awaitTermination(10.seconds) + Await.result(system.whenTerminated, 10.seconds) val freshSystem = ActorSystem(system.name, ConfigFactory.parseString(s""" akka.remote.retry-gate-closed-for = 0.5 s diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/Ticket15109Spec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/Ticket15109Spec.scala index ee3217a3a2..be103b0857 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/Ticket15109Spec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/Ticket15109Spec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/routing/RemoteRandomSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/routing/RemoteRandomSpec.scala index ff7018fd45..8751a654f4 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/routing/RemoteRandomSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/routing/RemoteRandomSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.routing diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/routing/RemoteRoundRobinSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/routing/RemoteRoundRobinSpec.scala index 25d3ab6621..b2711ebd5b 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/routing/RemoteRoundRobinSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/routing/RemoteRoundRobinSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.routing diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/routing/RemoteScatterGatherSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/routing/RemoteScatterGatherSpec.scala index be2b20cf2c..aafaf161b0 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/routing/RemoteScatterGatherSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/routing/RemoteScatterGatherSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.routing diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/testconductor/TestConductorSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/testconductor/TestConductorSpec.scala index 695ef13466..f0c2fa388d 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/testconductor/TestConductorSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/testconductor/TestConductorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.testconductor diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/testkit/MultiNodeSpecSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/testkit/MultiNodeSpecSpec.scala index 5e9172de9e..6465d59186 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/testkit/MultiNodeSpecSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/testkit/MultiNodeSpecSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.testkit diff --git a/akka-remote-tests/src/test/scala/akka/remote/QuietReporter.scala b/akka-remote-tests/src/test/scala/akka/remote/QuietReporter.scala index a5dea79182..8ef398f1da 100644 --- a/akka-remote-tests/src/test/scala/akka/remote/QuietReporter.scala +++ b/akka-remote-tests/src/test/scala/akka/remote/QuietReporter.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package org.scalatest.extra diff --git a/akka-remote-tests/src/test/scala/akka/remote/SerializationChecksSpec.scala b/akka-remote-tests/src/test/scala/akka/remote/SerializationChecksSpec.scala index c68dee44dd..0f4358db22 100644 --- a/akka-remote-tests/src/test/scala/akka/remote/SerializationChecksSpec.scala +++ b/akka-remote-tests/src/test/scala/akka/remote/SerializationChecksSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote-tests/src/test/scala/akka/remote/testconductor/BarrierSpec.scala b/akka-remote-tests/src/test/scala/akka/remote/testconductor/BarrierSpec.scala index 1cd83db478..6f884b0d1e 100644 --- a/akka-remote-tests/src/test/scala/akka/remote/testconductor/BarrierSpec.scala +++ b/akka-remote-tests/src/test/scala/akka/remote/testconductor/BarrierSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.testconductor diff --git a/akka-remote-tests/src/test/scala/akka/remote/testconductor/ControllerSpec.scala b/akka-remote-tests/src/test/scala/akka/remote/testconductor/ControllerSpec.scala index a13ad67f6d..31f836a33e 100644 --- a/akka-remote-tests/src/test/scala/akka/remote/testconductor/ControllerSpec.scala +++ b/akka-remote-tests/src/test/scala/akka/remote/testconductor/ControllerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.testconductor diff --git a/akka-remote-tests/src/test/scala/akka/remote/testkit/LogRoleReplace.scala b/akka-remote-tests/src/test/scala/akka/remote/testkit/LogRoleReplace.scala index 567d6df790..3c7bfcefce 100644 --- a/akka-remote-tests/src/test/scala/akka/remote/testkit/LogRoleReplace.scala +++ b/akka-remote-tests/src/test/scala/akka/remote/testkit/LogRoleReplace.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.testkit diff --git a/akka-remote-tests/src/test/scala/akka/remote/testkit/STMultiNodeSpec.scala b/akka-remote-tests/src/test/scala/akka/remote/testkit/STMultiNodeSpec.scala index 2329501d8b..0099bf23c3 100644 --- a/akka-remote-tests/src/test/scala/akka/remote/testkit/STMultiNodeSpec.scala +++ b/akka-remote-tests/src/test/scala/akka/remote/testkit/STMultiNodeSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.testkit diff --git a/akka-remote/src/main/scala/akka/remote/AckedDelivery.scala b/akka-remote/src/main/scala/akka/remote/AckedDelivery.scala index 7a0de8f2af..10bcd1ec5e 100644 --- a/akka-remote/src/main/scala/akka/remote/AckedDelivery.scala +++ b/akka-remote/src/main/scala/akka/remote/AckedDelivery.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/main/scala/akka/remote/AddressUidExtension.scala b/akka-remote/src/main/scala/akka/remote/AddressUidExtension.scala index b175380ad1..ecd87d21ef 100644 --- a/akka-remote/src/main/scala/akka/remote/AddressUidExtension.scala +++ b/akka-remote/src/main/scala/akka/remote/AddressUidExtension.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/main/scala/akka/remote/BoundAddressesExtension.scala b/akka-remote/src/main/scala/akka/remote/BoundAddressesExtension.scala index bc34a5994e..48fd90bec1 100644 --- a/akka-remote/src/main/scala/akka/remote/BoundAddressesExtension.scala +++ b/akka-remote/src/main/scala/akka/remote/BoundAddressesExtension.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/main/scala/akka/remote/DeadlineFailureDetector.scala b/akka-remote/src/main/scala/akka/remote/DeadlineFailureDetector.scala index a8e2c83bf5..bb25d137de 100644 --- a/akka-remote/src/main/scala/akka/remote/DeadlineFailureDetector.scala +++ b/akka-remote/src/main/scala/akka/remote/DeadlineFailureDetector.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/main/scala/akka/remote/DefaultFailureDetectorRegistry.scala b/akka-remote/src/main/scala/akka/remote/DefaultFailureDetectorRegistry.scala index df92913d2a..0df2738e0d 100644 --- a/akka-remote/src/main/scala/akka/remote/DefaultFailureDetectorRegistry.scala +++ b/akka-remote/src/main/scala/akka/remote/DefaultFailureDetectorRegistry.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/main/scala/akka/remote/Endpoint.scala b/akka-remote/src/main/scala/akka/remote/Endpoint.scala index 58eb726372..a36f8f3a9b 100644 --- a/akka-remote/src/main/scala/akka/remote/Endpoint.scala +++ b/akka-remote/src/main/scala/akka/remote/Endpoint.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/main/scala/akka/remote/FailureDetector.scala b/akka-remote/src/main/scala/akka/remote/FailureDetector.scala index af57b9b220..85b4a7cc01 100644 --- a/akka-remote/src/main/scala/akka/remote/FailureDetector.scala +++ b/akka-remote/src/main/scala/akka/remote/FailureDetector.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/main/scala/akka/remote/FailureDetectorRegistry.scala b/akka-remote/src/main/scala/akka/remote/FailureDetectorRegistry.scala index 7cedd56775..8f214ff317 100644 --- a/akka-remote/src/main/scala/akka/remote/FailureDetectorRegistry.scala +++ b/akka-remote/src/main/scala/akka/remote/FailureDetectorRegistry.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/main/scala/akka/remote/MessageSerializer.scala b/akka-remote/src/main/scala/akka/remote/MessageSerializer.scala index 251fc1876b..52f915f255 100644 --- a/akka-remote/src/main/scala/akka/remote/MessageSerializer.scala +++ b/akka-remote/src/main/scala/akka/remote/MessageSerializer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/main/scala/akka/remote/PhiAccrualFailureDetector.scala b/akka-remote/src/main/scala/akka/remote/PhiAccrualFailureDetector.scala index 4d124b3b7d..fede54f546 100644 --- a/akka-remote/src/main/scala/akka/remote/PhiAccrualFailureDetector.scala +++ b/akka-remote/src/main/scala/akka/remote/PhiAccrualFailureDetector.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala b/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala index 7d65bcc1b1..2e450f2252 100644 --- a/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala +++ b/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/main/scala/akka/remote/RemoteDaemon.scala b/akka-remote/src/main/scala/akka/remote/RemoteDaemon.scala index f7e1e2a1d1..62a1e5d7c4 100644 --- a/akka-remote/src/main/scala/akka/remote/RemoteDaemon.scala +++ b/akka-remote/src/main/scala/akka/remote/RemoteDaemon.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/main/scala/akka/remote/RemoteDeployer.scala b/akka-remote/src/main/scala/akka/remote/RemoteDeployer.scala index a5f1d1faf7..a5d34b2f1f 100644 --- a/akka-remote/src/main/scala/akka/remote/RemoteDeployer.scala +++ b/akka-remote/src/main/scala/akka/remote/RemoteDeployer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/main/scala/akka/remote/RemoteDeploymentWatcher.scala b/akka-remote/src/main/scala/akka/remote/RemoteDeploymentWatcher.scala index bc8c184dc3..8b4daf2bc5 100644 --- a/akka-remote/src/main/scala/akka/remote/RemoteDeploymentWatcher.scala +++ b/akka-remote/src/main/scala/akka/remote/RemoteDeploymentWatcher.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/main/scala/akka/remote/RemoteMetricsExtension.scala b/akka-remote/src/main/scala/akka/remote/RemoteMetricsExtension.scala index 9482f88cf5..cdd9de21b2 100644 --- a/akka-remote/src/main/scala/akka/remote/RemoteMetricsExtension.scala +++ b/akka-remote/src/main/scala/akka/remote/RemoteMetricsExtension.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/main/scala/akka/remote/RemoteSettings.scala b/akka-remote/src/main/scala/akka/remote/RemoteSettings.scala index a020a800cc..b174aa3503 100644 --- a/akka-remote/src/main/scala/akka/remote/RemoteSettings.scala +++ b/akka-remote/src/main/scala/akka/remote/RemoteSettings.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/main/scala/akka/remote/RemoteTransport.scala b/akka-remote/src/main/scala/akka/remote/RemoteTransport.scala index ae95ffded1..47aa58f1f4 100644 --- a/akka-remote/src/main/scala/akka/remote/RemoteTransport.scala +++ b/akka-remote/src/main/scala/akka/remote/RemoteTransport.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/main/scala/akka/remote/RemoteWatcher.scala b/akka-remote/src/main/scala/akka/remote/RemoteWatcher.scala index ca9f8a3ee8..461688deef 100644 --- a/akka-remote/src/main/scala/akka/remote/RemoteWatcher.scala +++ b/akka-remote/src/main/scala/akka/remote/RemoteWatcher.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/main/scala/akka/remote/Remoting.scala b/akka-remote/src/main/scala/akka/remote/Remoting.scala index 9144283f8f..d75b7043f3 100644 --- a/akka-remote/src/main/scala/akka/remote/Remoting.scala +++ b/akka-remote/src/main/scala/akka/remote/Remoting.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/main/scala/akka/remote/RemotingLifecycleEvent.scala b/akka-remote/src/main/scala/akka/remote/RemotingLifecycleEvent.scala index c1b7334ee8..23b2378f76 100644 --- a/akka-remote/src/main/scala/akka/remote/RemotingLifecycleEvent.scala +++ b/akka-remote/src/main/scala/akka/remote/RemotingLifecycleEvent.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/main/scala/akka/remote/routing/RemoteRouterConfig.scala b/akka-remote/src/main/scala/akka/remote/routing/RemoteRouterConfig.scala index 58a63bb84a..7e520d67bb 100644 --- a/akka-remote/src/main/scala/akka/remote/routing/RemoteRouterConfig.scala +++ b/akka-remote/src/main/scala/akka/remote/routing/RemoteRouterConfig.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.routing diff --git a/akka-remote/src/main/scala/akka/remote/security/provider/AES128CounterInetRNG.scala b/akka-remote/src/main/scala/akka/remote/security/provider/AES128CounterInetRNG.scala index 0307c1ade1..16f3bd2d28 100644 --- a/akka-remote/src/main/scala/akka/remote/security/provider/AES128CounterInetRNG.scala +++ b/akka-remote/src/main/scala/akka/remote/security/provider/AES128CounterInetRNG.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.security.provider diff --git a/akka-remote/src/main/scala/akka/remote/security/provider/AES128CounterSecureRNG.scala b/akka-remote/src/main/scala/akka/remote/security/provider/AES128CounterSecureRNG.scala index 9d1cfd1e06..9aa7a5ef1b 100644 --- a/akka-remote/src/main/scala/akka/remote/security/provider/AES128CounterSecureRNG.scala +++ b/akka-remote/src/main/scala/akka/remote/security/provider/AES128CounterSecureRNG.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.security.provider diff --git a/akka-remote/src/main/scala/akka/remote/security/provider/AES256CounterInetRNG.scala b/akka-remote/src/main/scala/akka/remote/security/provider/AES256CounterInetRNG.scala index 126073085f..dad017d36c 100644 --- a/akka-remote/src/main/scala/akka/remote/security/provider/AES256CounterInetRNG.scala +++ b/akka-remote/src/main/scala/akka/remote/security/provider/AES256CounterInetRNG.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.security.provider diff --git a/akka-remote/src/main/scala/akka/remote/security/provider/AES256CounterSecureRNG.scala b/akka-remote/src/main/scala/akka/remote/security/provider/AES256CounterSecureRNG.scala index 556516d8d9..327854e2f8 100644 --- a/akka-remote/src/main/scala/akka/remote/security/provider/AES256CounterSecureRNG.scala +++ b/akka-remote/src/main/scala/akka/remote/security/provider/AES256CounterSecureRNG.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.security.provider diff --git a/akka-remote/src/main/scala/akka/remote/security/provider/AkkaProvider.scala b/akka-remote/src/main/scala/akka/remote/security/provider/AkkaProvider.scala index f93ce05e00..3b53d6395a 100644 --- a/akka-remote/src/main/scala/akka/remote/security/provider/AkkaProvider.scala +++ b/akka-remote/src/main/scala/akka/remote/security/provider/AkkaProvider.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.security.provider diff --git a/akka-remote/src/main/scala/akka/remote/security/provider/SeedSize.scala b/akka-remote/src/main/scala/akka/remote/security/provider/SeedSize.scala index 96b2818e25..a63f2dc21f 100644 --- a/akka-remote/src/main/scala/akka/remote/security/provider/SeedSize.scala +++ b/akka-remote/src/main/scala/akka/remote/security/provider/SeedSize.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.security.provider diff --git a/akka-remote/src/main/scala/akka/remote/serialization/DaemonMsgCreateSerializer.scala b/akka-remote/src/main/scala/akka/remote/serialization/DaemonMsgCreateSerializer.scala index b3961fd697..fba03dbb0c 100644 --- a/akka-remote/src/main/scala/akka/remote/serialization/DaemonMsgCreateSerializer.scala +++ b/akka-remote/src/main/scala/akka/remote/serialization/DaemonMsgCreateSerializer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.serialization diff --git a/akka-remote/src/main/scala/akka/remote/serialization/MessageContainerSerializer.scala b/akka-remote/src/main/scala/akka/remote/serialization/MessageContainerSerializer.scala index 387ecdd98e..563527fdf2 100644 --- a/akka-remote/src/main/scala/akka/remote/serialization/MessageContainerSerializer.scala +++ b/akka-remote/src/main/scala/akka/remote/serialization/MessageContainerSerializer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.serialization diff --git a/akka-remote/src/main/scala/akka/remote/serialization/ProtobufSerializer.scala b/akka-remote/src/main/scala/akka/remote/serialization/ProtobufSerializer.scala index 741c4438b1..c098c22d80 100644 --- a/akka-remote/src/main/scala/akka/remote/serialization/ProtobufSerializer.scala +++ b/akka-remote/src/main/scala/akka/remote/serialization/ProtobufSerializer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.serialization diff --git a/akka-remote/src/main/scala/akka/remote/transport/AbstractTransportAdapter.scala b/akka-remote/src/main/scala/akka/remote/transport/AbstractTransportAdapter.scala index 6de6cab3d5..549a4538f2 100644 --- a/akka-remote/src/main/scala/akka/remote/transport/AbstractTransportAdapter.scala +++ b/akka-remote/src/main/scala/akka/remote/transport/AbstractTransportAdapter.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.transport diff --git a/akka-remote/src/main/scala/akka/remote/transport/AkkaPduCodec.scala b/akka-remote/src/main/scala/akka/remote/transport/AkkaPduCodec.scala index 343639e183..ef119d708e 100644 --- a/akka-remote/src/main/scala/akka/remote/transport/AkkaPduCodec.scala +++ b/akka-remote/src/main/scala/akka/remote/transport/AkkaPduCodec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.transport diff --git a/akka-remote/src/main/scala/akka/remote/transport/AkkaProtocolTransport.scala b/akka-remote/src/main/scala/akka/remote/transport/AkkaProtocolTransport.scala index 97fe0410be..1a69c6f1b2 100644 --- a/akka-remote/src/main/scala/akka/remote/transport/AkkaProtocolTransport.scala +++ b/akka-remote/src/main/scala/akka/remote/transport/AkkaProtocolTransport.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.transport diff --git a/akka-remote/src/main/scala/akka/remote/transport/FailureInjectorTransportAdapter.scala b/akka-remote/src/main/scala/akka/remote/transport/FailureInjectorTransportAdapter.scala index 6eb918f9ad..82db539bcd 100644 --- a/akka-remote/src/main/scala/akka/remote/transport/FailureInjectorTransportAdapter.scala +++ b/akka-remote/src/main/scala/akka/remote/transport/FailureInjectorTransportAdapter.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.transport diff --git a/akka-remote/src/main/scala/akka/remote/transport/TestTransport.scala b/akka-remote/src/main/scala/akka/remote/transport/TestTransport.scala index 4bcb31b97b..945a55d839 100644 --- a/akka-remote/src/main/scala/akka/remote/transport/TestTransport.scala +++ b/akka-remote/src/main/scala/akka/remote/transport/TestTransport.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.transport diff --git a/akka-remote/src/main/scala/akka/remote/transport/ThrottlerTransportAdapter.scala b/akka-remote/src/main/scala/akka/remote/transport/ThrottlerTransportAdapter.scala index 31c567056a..757eeca0d8 100644 --- a/akka-remote/src/main/scala/akka/remote/transport/ThrottlerTransportAdapter.scala +++ b/akka-remote/src/main/scala/akka/remote/transport/ThrottlerTransportAdapter.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.transport diff --git a/akka-remote/src/main/scala/akka/remote/transport/Transport.scala b/akka-remote/src/main/scala/akka/remote/transport/Transport.scala index ba8b48c9dd..5bf120101b 100644 --- a/akka-remote/src/main/scala/akka/remote/transport/Transport.scala +++ b/akka-remote/src/main/scala/akka/remote/transport/Transport.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.transport diff --git a/akka-remote/src/main/scala/akka/remote/transport/netty/NettyHelpers.scala b/akka-remote/src/main/scala/akka/remote/transport/netty/NettyHelpers.scala index dfe706caf2..ebae479275 100644 --- a/akka-remote/src/main/scala/akka/remote/transport/netty/NettyHelpers.scala +++ b/akka-remote/src/main/scala/akka/remote/transport/netty/NettyHelpers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.transport.netty diff --git a/akka-remote/src/main/scala/akka/remote/transport/netty/NettySSLSupport.scala b/akka-remote/src/main/scala/akka/remote/transport/netty/NettySSLSupport.scala index 8bd0208d7f..f3e27dfcf3 100644 --- a/akka-remote/src/main/scala/akka/remote/transport/netty/NettySSLSupport.scala +++ b/akka-remote/src/main/scala/akka/remote/transport/netty/NettySSLSupport.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.transport.netty diff --git a/akka-remote/src/main/scala/akka/remote/transport/netty/NettyTransport.scala b/akka-remote/src/main/scala/akka/remote/transport/netty/NettyTransport.scala index 2257c64c7e..ef27fc69da 100644 --- a/akka-remote/src/main/scala/akka/remote/transport/netty/NettyTransport.scala +++ b/akka-remote/src/main/scala/akka/remote/transport/netty/NettyTransport.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.transport.netty @@ -240,7 +240,7 @@ private[transport] object NettyTransport { def addressFromSocketAddress(addr: SocketAddress, schemeIdentifier: String, systemName: String, hostName: Option[String], port: Option[Int]): Option[Address] = addr match { case sa: InetSocketAddress ⇒ Some(Address(schemeIdentifier, systemName, - hostName.getOrElse(sa.getAddress.getHostAddress), port.getOrElse(sa.getPort))) // perhaps use getHostString in jdk 1.7 + hostName.getOrElse(sa.getHostString), port.getOrElse(sa.getPort))) case _ ⇒ None } diff --git a/akka-remote/src/main/scala/akka/remote/transport/netty/TcpSupport.scala b/akka-remote/src/main/scala/akka/remote/transport/netty/TcpSupport.scala index 3ea97d2857..84a2b747f3 100644 --- a/akka-remote/src/main/scala/akka/remote/transport/netty/TcpSupport.scala +++ b/akka-remote/src/main/scala/akka/remote/transport/netty/TcpSupport.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.transport.netty diff --git a/akka-remote/src/main/scala/akka/remote/transport/netty/UdpSupport.scala b/akka-remote/src/main/scala/akka/remote/transport/netty/UdpSupport.scala index c9473017b4..e0b06f7019 100644 --- a/akka-remote/src/main/scala/akka/remote/transport/netty/UdpSupport.scala +++ b/akka-remote/src/main/scala/akka/remote/transport/netty/UdpSupport.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.transport.netty diff --git a/akka-remote/src/test/scala/akka/remote/AccrualFailureDetectorSpec.scala b/akka-remote/src/test/scala/akka/remote/AccrualFailureDetectorSpec.scala index 331ea26401..9e7cad1f98 100644 --- a/akka-remote/src/test/scala/akka/remote/AccrualFailureDetectorSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/AccrualFailureDetectorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/test/scala/akka/remote/AckedDeliverySpec.scala b/akka-remote/src/test/scala/akka/remote/AckedDeliverySpec.scala index 116f4a2b24..c22d3af5ba 100644 --- a/akka-remote/src/test/scala/akka/remote/AckedDeliverySpec.scala +++ b/akka-remote/src/test/scala/akka/remote/AckedDeliverySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/test/scala/akka/remote/ActorsLeakSpec.scala b/akka-remote/src/test/scala/akka/remote/ActorsLeakSpec.scala index dfb798677a..151e29b986 100644 --- a/akka-remote/src/test/scala/akka/remote/ActorsLeakSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/ActorsLeakSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/test/scala/akka/remote/DaemonicSpec.scala b/akka-remote/src/test/scala/akka/remote/DaemonicSpec.scala index 13d572f275..2671740651 100644 --- a/akka-remote/src/test/scala/akka/remote/DaemonicSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/DaemonicSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/test/scala/akka/remote/DeadlineFailureDetectorSpec.scala b/akka-remote/src/test/scala/akka/remote/DeadlineFailureDetectorSpec.scala index 81d2567bfe..ac0a347b11 100644 --- a/akka-remote/src/test/scala/akka/remote/DeadlineFailureDetectorSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/DeadlineFailureDetectorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/test/scala/akka/remote/LogSourceSpec.scala b/akka-remote/src/test/scala/akka/remote/LogSourceSpec.scala index 56d3c44f1a..bf1ea67f3a 100644 --- a/akka-remote/src/test/scala/akka/remote/LogSourceSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/LogSourceSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/test/scala/akka/remote/NetworkFailureSpec.scala b/akka-remote/src/test/scala/akka/remote/NetworkFailureSpec.scala index 638aff5d2d..81fa7da3e2 100644 --- a/akka-remote/src/test/scala/akka/remote/NetworkFailureSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/NetworkFailureSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/test/scala/akka/remote/RemoteConfigSpec.scala b/akka-remote/src/test/scala/akka/remote/RemoteConfigSpec.scala index 4d42a22290..a036cfac5b 100644 --- a/akka-remote/src/test/scala/akka/remote/RemoteConfigSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/RemoteConfigSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/test/scala/akka/remote/RemoteConsistentHashingRouterSpec.scala b/akka-remote/src/test/scala/akka/remote/RemoteConsistentHashingRouterSpec.scala index 2c0a32c9fe..e7465fce6b 100644 --- a/akka-remote/src/test/scala/akka/remote/RemoteConsistentHashingRouterSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/RemoteConsistentHashingRouterSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/test/scala/akka/remote/RemoteDeathWatchSpec.scala b/akka-remote/src/test/scala/akka/remote/RemoteDeathWatchSpec.scala index 47912880d3..81681bb539 100644 --- a/akka-remote/src/test/scala/akka/remote/RemoteDeathWatchSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/RemoteDeathWatchSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/test/scala/akka/remote/RemoteDeployerSpec.scala b/akka-remote/src/test/scala/akka/remote/RemoteDeployerSpec.scala index 386e51e3ed..8bf2a5b263 100644 --- a/akka-remote/src/test/scala/akka/remote/RemoteDeployerSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/RemoteDeployerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/test/scala/akka/remote/RemoteInitErrorSpec.scala b/akka-remote/src/test/scala/akka/remote/RemoteInitErrorSpec.scala index b40242800e..40553225bf 100644 --- a/akka-remote/src/test/scala/akka/remote/RemoteInitErrorSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/RemoteInitErrorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/test/scala/akka/remote/RemoteRouterSpec.scala b/akka-remote/src/test/scala/akka/remote/RemoteRouterSpec.scala index 5e27577fb0..49ac30b46a 100644 --- a/akka-remote/src/test/scala/akka/remote/RemoteRouterSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/RemoteRouterSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/test/scala/akka/remote/RemoteWatcherSpec.scala b/akka-remote/src/test/scala/akka/remote/RemoteWatcherSpec.scala index be68af3a1a..c71c0a1bfa 100644 --- a/akka-remote/src/test/scala/akka/remote/RemoteWatcherSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/RemoteWatcherSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/test/scala/akka/remote/RemotingSpec.scala b/akka-remote/src/test/scala/akka/remote/RemotingSpec.scala index 84f4d07b67..d3af1e0ff1 100644 --- a/akka-remote/src/test/scala/akka/remote/RemotingSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/RemotingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/test/scala/akka/remote/SerializationChecksPlainRemotingSpec.scala b/akka-remote/src/test/scala/akka/remote/SerializationChecksPlainRemotingSpec.scala index 0c7b232fc8..d28e37cc49 100644 --- a/akka-remote/src/test/scala/akka/remote/SerializationChecksPlainRemotingSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/SerializationChecksPlainRemotingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/test/scala/akka/remote/SerializeCreatorsVerificationSpec.scala b/akka-remote/src/test/scala/akka/remote/SerializeCreatorsVerificationSpec.scala index 58489233d9..956c69d399 100644 --- a/akka-remote/src/test/scala/akka/remote/SerializeCreatorsVerificationSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/SerializeCreatorsVerificationSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/test/scala/akka/remote/Ticket1978CommunicationSpec.scala b/akka-remote/src/test/scala/akka/remote/Ticket1978CommunicationSpec.scala index 2cb1bfcb93..46c2cb2be1 100644 --- a/akka-remote/src/test/scala/akka/remote/Ticket1978CommunicationSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/Ticket1978CommunicationSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/test/scala/akka/remote/TypedActorRemoteDeploySpec.scala b/akka-remote/src/test/scala/akka/remote/TypedActorRemoteDeploySpec.scala index 057933cf45..1abde86e30 100644 --- a/akka-remote/src/test/scala/akka/remote/TypedActorRemoteDeploySpec.scala +++ b/akka-remote/src/test/scala/akka/remote/TypedActorRemoteDeploySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/test/scala/akka/remote/UntrustedSpec.scala b/akka-remote/src/test/scala/akka/remote/UntrustedSpec.scala index 4e650ab2b2..7bd35e255d 100644 --- a/akka-remote/src/test/scala/akka/remote/UntrustedSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/UntrustedSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote diff --git a/akka-remote/src/test/scala/akka/remote/serialization/DaemonMsgCreateSerializerSpec.scala b/akka-remote/src/test/scala/akka/remote/serialization/DaemonMsgCreateSerializerSpec.scala index 1a081fc353..c5d213c28d 100644 --- a/akka-remote/src/test/scala/akka/remote/serialization/DaemonMsgCreateSerializerSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/serialization/DaemonMsgCreateSerializerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.serialization diff --git a/akka-remote/src/test/scala/akka/remote/serialization/MessageContainerSerializerSpec.scala b/akka-remote/src/test/scala/akka/remote/serialization/MessageContainerSerializerSpec.scala index 9fe63d6663..29dc946771 100644 --- a/akka-remote/src/test/scala/akka/remote/serialization/MessageContainerSerializerSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/serialization/MessageContainerSerializerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.serialization diff --git a/akka-remote/src/test/scala/akka/remote/serialization/ProtobufSerializerSpec.scala b/akka-remote/src/test/scala/akka/remote/serialization/ProtobufSerializerSpec.scala index bb60d7c678..eab06488af 100644 --- a/akka-remote/src/test/scala/akka/remote/serialization/ProtobufSerializerSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/serialization/ProtobufSerializerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.serialization diff --git a/akka-remote/src/test/scala/akka/remote/transport/SystemMessageDeliveryStressTest.scala b/akka-remote/src/test/scala/akka/remote/transport/SystemMessageDeliveryStressTest.scala index 8eaddbced9..999b072889 100644 --- a/akka-remote/src/test/scala/akka/remote/transport/SystemMessageDeliveryStressTest.scala +++ b/akka-remote/src/test/scala/akka/remote/transport/SystemMessageDeliveryStressTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.remote.transport diff --git a/akka-samples/akka-sample-fsm-scala/src/main/scala/sample/redelivery/FsmSimpleRedelivery.scala b/akka-samples/akka-sample-fsm-scala/src/main/scala/sample/redelivery/FsmSimpleRedelivery.scala index 406d7d9057..b72e0bc001 100644 --- a/akka-samples/akka-sample-fsm-scala/src/main/scala/sample/redelivery/FsmSimpleRedelivery.scala +++ b/akka-samples/akka-sample-fsm-scala/src/main/scala/sample/redelivery/FsmSimpleRedelivery.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package sample.redelivery @@ -136,7 +136,7 @@ object Receiver { class Receiver extends Actor { /** - * Simulate loosing 75% of all messages on the receiving end. We want to see the redelivery in action! + * Simulate losing 75% of all messages on the receiving end. We want to see the redelivery in action! */ def shouldSendAck = ThreadLocalRandom.current.nextDouble() < 0.25 diff --git a/akka-samples/akka-sample-main-java-lambda/build.sbt b/akka-samples/akka-sample-main-java-lambda/build.sbt index bd0906ce3f..33c2db7cca 100644 --- a/akka-samples/akka-sample-main-java-lambda/build.sbt +++ b/akka-samples/akka-sample-main-java-lambda/build.sbt @@ -1,10 +1,9 @@ name := "akka-sample-main-java-lambda" -version := "1.0" +version := "2.4-SNAPSHOT" scalaVersion := "2.11.7" libraryDependencies ++= Seq( "com.typesafe.akka" %% "akka-actor" % "2.4-SNAPSHOT" ) - diff --git a/akka-samples/akka-sample-main-java-lambda/pom.xml b/akka-samples/akka-sample-main-java-lambda/pom.xml index eae724100a..8a84daaa01 100644 --- a/akka-samples/akka-sample-main-java-lambda/pom.xml +++ b/akka-samples/akka-sample-main-java-lambda/pom.xml @@ -7,7 +7,7 @@ akka-sample-main-java-lambda com.typesafe.akka.samples Akka Main in Java - 1.0 + 2.4-SNAPSHOT UTF-8 diff --git a/akka-samples/akka-sample-persistence-java-lambda/build.sbt b/akka-samples/akka-sample-persistence-java-lambda/build.sbt index ec375a371e..6d51987855 100644 --- a/akka-samples/akka-sample-persistence-java-lambda/build.sbt +++ b/akka-samples/akka-sample-persistence-java-lambda/build.sbt @@ -1,6 +1,6 @@ name := "akka-sample-persistence-java-lambda" -version := "1.0" +version := "2.4-SNAPSHOT" scalaVersion := "2.11.7" @@ -13,4 +13,3 @@ libraryDependencies ++= Seq( "org.iq80.leveldb" % "leveldb" % "0.7", "org.fusesource.leveldbjni" % "leveldbjni-all" % "1.8" ) - diff --git a/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/PersistentActorExample.java b/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/PersistentActorExample.java index a8c555acbc..76b1d5e1c8 100644 --- a/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/PersistentActorExample.java +++ b/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/PersistentActorExample.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package sample.persistence; diff --git a/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/PersistentActorFailureExample.java b/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/PersistentActorFailureExample.java index c67f458c54..e7b2f3fee8 100644 --- a/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/PersistentActorFailureExample.java +++ b/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/PersistentActorFailureExample.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package sample.persistence; diff --git a/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/SnapshotExample.java b/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/SnapshotExample.java index b1df88953c..3a98fd59cc 100644 --- a/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/SnapshotExample.java +++ b/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/SnapshotExample.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package sample.persistence; diff --git a/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/ViewExample.java b/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/ViewExample.java index 2ccdd00ae8..b4c194a197 100644 --- a/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/ViewExample.java +++ b/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/ViewExample.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package sample.persistence; diff --git a/akka-samples/akka-sample-supervision-java-lambda/build.sbt b/akka-samples/akka-sample-supervision-java-lambda/build.sbt index e9eb2697d8..fb4563bbac 100644 --- a/akka-samples/akka-sample-supervision-java-lambda/build.sbt +++ b/akka-samples/akka-sample-supervision-java-lambda/build.sbt @@ -1,6 +1,6 @@ name := "akka-supervision-java-lambda" -version := "1.0" +version := "2.4-SNAPSHOT" scalaVersion := "2.11.7" diff --git a/akka-samples/akka-sample-supervision-java-lambda/pom.xml b/akka-samples/akka-sample-supervision-java-lambda/pom.xml index 639f639b32..0f69d0d122 100644 --- a/akka-samples/akka-sample-supervision-java-lambda/pom.xml +++ b/akka-samples/akka-sample-supervision-java-lambda/pom.xml @@ -11,7 +11,7 @@ sample akka-supervision-java-lambda jar - 1.0 + 2.4-SNAPSHOT diff --git a/akka-slf4j/src/main/scala/akka/event/slf4j/Slf4jLogger.scala b/akka-slf4j/src/main/scala/akka/event/slf4j/Slf4jLogger.scala index f80938bda0..461da02a45 100644 --- a/akka-slf4j/src/main/scala/akka/event/slf4j/Slf4jLogger.scala +++ b/akka-slf4j/src/main/scala/akka/event/slf4j/Slf4jLogger.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.event.slf4j diff --git a/akka-slf4j/src/test/scala/akka/event/slf4j/Slf4jLoggerSpec.scala b/akka-slf4j/src/test/scala/akka/event/slf4j/Slf4jLoggerSpec.scala index a6324fe734..079717529c 100644 --- a/akka-slf4j/src/test/scala/akka/event/slf4j/Slf4jLoggerSpec.scala +++ b/akka-slf4j/src/test/scala/akka/event/slf4j/Slf4jLoggerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.event.slf4j diff --git a/akka-slf4j/src/test/scala/akka/event/slf4j/Slf4jLoggingFilterSpec.scala b/akka-slf4j/src/test/scala/akka/event/slf4j/Slf4jLoggingFilterSpec.scala index 99b5bc2a28..91d9528add 100644 --- a/akka-slf4j/src/test/scala/akka/event/slf4j/Slf4jLoggingFilterSpec.scala +++ b/akka-slf4j/src/test/scala/akka/event/slf4j/Slf4jLoggingFilterSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.event.slf4j diff --git a/akka-stream-testkit/build.sbt b/akka-stream-testkit/build.sbt new file mode 100644 index 0000000000..1f9446d4f4 --- /dev/null +++ b/akka-stream-testkit/build.sbt @@ -0,0 +1,8 @@ +import akka._ +import com.typesafe.tools.mima.plugin.MimaKeys + +AkkaBuild.defaultSettings +Formatting.formatSettings +OSGi.streamTestkit +Dependencies.streamTestkit +MimaKeys.previousArtifacts := akkaStreamAndHttpPreviousArtifacts("akka-stream-testkit").value diff --git a/akka-stream-testkit/src/main/scala/akka/stream/testkit/StreamTestKit.scala b/akka-stream-testkit/src/main/scala/akka/stream/testkit/StreamTestKit.scala index 4bb2dc4e41..5c7246603c 100644 --- a/akka-stream-testkit/src/main/scala/akka/stream/testkit/StreamTestKit.scala +++ b/akka-stream-testkit/src/main/scala/akka/stream/testkit/StreamTestKit.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.testkit @@ -123,7 +123,7 @@ object TestPublisher { probe.receiveWhile(max, idle, messages)(f.asInstanceOf[PartialFunction[AnyRef, T]]) def expectEventPF[T](f: PartialFunction[PublisherEvent, T]): T = - probe.expectMsgPF[T](probe.remaining)(f.asInstanceOf[PartialFunction[Any, T]]) + probe.expectMsgPF[T]()(f.asInstanceOf[PartialFunction[Any, T]]) def getPublisher: Publisher[I] = this } @@ -183,7 +183,7 @@ object TestSubscriber { trait SubscriberEvent extends DeadLetterSuppression with NoSerializationVerificationNeeded final case class OnSubscribe(subscription: Subscription) extends SubscriberEvent final case class OnNext[I](element: I) extends SubscriberEvent - final case object OnComplete extends SubscriberEvent + case object OnComplete extends SubscriberEvent final case class OnError(cause: Throwable) extends SubscriberEvent { override def toString: String = { val str = new StringWriter @@ -251,9 +251,13 @@ object TestSubscriber { /** * Expect and return a stream element. */ - def expectNext(): I = probe.receiveOne(probe.remaining) match { - case OnNext(elem) ⇒ elem.asInstanceOf[I] - case other ⇒ throw new AssertionError("expected OnNext, found " + other) + def expectNext(): I = { + val t = probe.remainingOr(probe.testKitSettings.SingleExpectDefaultTimeout.dilated) + probe.receiveOne(t) match { + case null ⇒ throw new AssertionError(s"Expected OnNext(_), yet no element signaled during $t") + case OnNext(elem) ⇒ elem.asInstanceOf[I] + case other ⇒ throw new AssertionError("expected OnNext, found " + other) + } } /** @@ -393,7 +397,7 @@ object TestSubscriber { * See also [[#expectSubscriptionAndComplete(Throwable, Boolean)]] if no demand should be signalled. */ def expectSubscriptionAndError(cause: Throwable): Self = - expectSubscriptionAndError(cause, true) + expectSubscriptionAndError(cause, signalDemand = true) /** * Fluent DSL @@ -522,7 +526,7 @@ object TestSubscriber { } def expectEventPF[T](f: PartialFunction[SubscriberEvent, T]): T = - probe.expectMsgPF[T](probe.remaining)(f.asInstanceOf[PartialFunction[Any, T]]) + probe.expectMsgPF[T]()(f.asInstanceOf[PartialFunction[Any, T]]) /** * Receive messages for a given duration or until one does not match a given partial function. @@ -551,8 +555,7 @@ object TestSubscriber { @tailrec def drain(): immutable.Seq[I] = self.expectEvent(deadline.timeLeft) match { case OnError(ex) ⇒ - // TODO once on JDK7+ this could be made an AssertionError, since it can carry ex in its cause param - throw new AssertionError(s"toStrict received OnError(${ex.getMessage}) while draining stream! Accumulated elements: ${b.result()}") + throw new AssertionError(s"toStrict received OnError while draining stream! Accumulated elements: ${b.result()}", ex) case OnComplete ⇒ b.result() case OnNext(i: I @unchecked) ⇒ diff --git a/akka-stream-testkit/src/main/scala/akka/stream/testkit/javadsl/TestSink.scala b/akka-stream-testkit/src/main/scala/akka/stream/testkit/javadsl/TestSink.scala index 3a941e6705..ed35074f5e 100644 --- a/akka-stream-testkit/src/main/scala/akka/stream/testkit/javadsl/TestSink.scala +++ b/akka-stream-testkit/src/main/scala/akka/stream/testkit/javadsl/TestSink.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.testkit.javadsl diff --git a/akka-stream-testkit/src/main/scala/akka/stream/testkit/javadsl/TestSource.scala b/akka-stream-testkit/src/main/scala/akka/stream/testkit/javadsl/TestSource.scala index 5fe07a7a5b..ee104d7c5e 100644 --- a/akka-stream-testkit/src/main/scala/akka/stream/testkit/javadsl/TestSource.scala +++ b/akka-stream-testkit/src/main/scala/akka/stream/testkit/javadsl/TestSource.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.testkit.javadsl diff --git a/akka-stream-testkit/src/main/scala/akka/stream/testkit/scaladsl/TestSink.scala b/akka-stream-testkit/src/main/scala/akka/stream/testkit/scaladsl/TestSink.scala index 495b4ae58d..8ec46a3cfb 100644 --- a/akka-stream-testkit/src/main/scala/akka/stream/testkit/scaladsl/TestSink.scala +++ b/akka-stream-testkit/src/main/scala/akka/stream/testkit/scaladsl/TestSink.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.testkit.scaladsl diff --git a/akka-stream-testkit/src/main/scala/akka/stream/testkit/scaladsl/TestSource.scala b/akka-stream-testkit/src/main/scala/akka/stream/testkit/scaladsl/TestSource.scala index 55c21439fd..ef6dc16ce3 100644 --- a/akka-stream-testkit/src/main/scala/akka/stream/testkit/scaladsl/TestSource.scala +++ b/akka-stream-testkit/src/main/scala/akka/stream/testkit/scaladsl/TestSource.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.testkit.scaladsl diff --git a/akka-stream-testkit/src/test/scala/akka/stream/testkit/AkkaSpec.scala b/akka-stream-testkit/src/test/scala/akka/stream/testkit/AkkaSpec.scala index 636b40e2ac..fe75c944c2 100644 --- a/akka-stream-testkit/src/test/scala/akka/stream/testkit/AkkaSpec.scala +++ b/akka-stream-testkit/src/test/scala/akka/stream/testkit/AkkaSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.testkit diff --git a/akka-stream-testkit/src/test/scala/akka/stream/testkit/BaseTwoStreamsSetup.scala b/akka-stream-testkit/src/test/scala/akka/stream/testkit/BaseTwoStreamsSetup.scala index 21df5cd02e..4b9b08829a 100644 --- a/akka-stream-testkit/src/test/scala/akka/stream/testkit/BaseTwoStreamsSetup.scala +++ b/akka-stream-testkit/src/test/scala/akka/stream/testkit/BaseTwoStreamsSetup.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.testkit diff --git a/akka-stream-testkit/src/test/scala/akka/stream/testkit/ChainSetup.scala b/akka-stream-testkit/src/test/scala/akka/stream/testkit/ChainSetup.scala index 344583b0f9..17df41f66f 100644 --- a/akka-stream-testkit/src/test/scala/akka/stream/testkit/ChainSetup.scala +++ b/akka-stream-testkit/src/test/scala/akka/stream/testkit/ChainSetup.scala @@ -1,22 +1,22 @@ package akka.stream.testkit +import akka.NotUsed import akka.actor.{ ActorRefFactory, ActorSystem } import akka.stream.ActorMaterializerSettings import akka.stream.scaladsl._ import org.reactivestreams.Publisher import akka.stream.ActorMaterializer -import akka.stream.Attributes class ChainSetup[In, Out, M]( - stream: Flow[In, In, Unit] ⇒ Flow[In, Out, M], + stream: Flow[In, In, NotUsed] ⇒ Flow[In, Out, M], val settings: ActorMaterializerSettings, materializer: ActorMaterializer, toPublisher: (Source[Out, _], ActorMaterializer) ⇒ Publisher[Out])(implicit val system: ActorSystem) { - def this(stream: Flow[In, In, Unit] ⇒ Flow[In, Out, M], settings: ActorMaterializerSettings, toPublisher: (Source[Out, _], ActorMaterializer) ⇒ Publisher[Out])(implicit system: ActorSystem) = + def this(stream: Flow[In, In, NotUsed] ⇒ Flow[In, Out, M], settings: ActorMaterializerSettings, toPublisher: (Source[Out, _], ActorMaterializer) ⇒ Publisher[Out])(implicit system: ActorSystem) = this(stream, settings, ActorMaterializer(settings)(system), toPublisher)(system) - def this(stream: Flow[In, In, Unit] ⇒ Flow[In, Out, M], + def this(stream: Flow[In, In, NotUsed] ⇒ Flow[In, Out, M], settings: ActorMaterializerSettings, materializerCreator: (ActorMaterializerSettings, ActorRefFactory) ⇒ ActorMaterializer, toPublisher: (Source[Out, _], ActorMaterializer) ⇒ Publisher[Out])(implicit system: ActorSystem) = diff --git a/akka-stream-testkit/src/test/scala/akka/stream/testkit/Coroner.scala b/akka-stream-testkit/src/test/scala/akka/stream/testkit/Coroner.scala index c9ec2529d9..20808b4318 100644 --- a/akka-stream-testkit/src/test/scala/akka/stream/testkit/Coroner.scala +++ b/akka-stream-testkit/src/test/scala/akka/stream/testkit/Coroner.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.testkit diff --git a/akka-stream-testkit/src/test/scala/akka/stream/testkit/ScriptedTest.scala b/akka-stream-testkit/src/test/scala/akka/stream/testkit/ScriptedTest.scala index 73c6402c80..98dc4a120f 100644 --- a/akka-stream-testkit/src/test/scala/akka/stream/testkit/ScriptedTest.scala +++ b/akka-stream-testkit/src/test/scala/akka/stream/testkit/ScriptedTest.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.testkit +import akka.NotUsed import akka.actor.ActorSystem import akka.stream.testkit.TestPublisher._ import akka.stream.testkit.TestSubscriber._ @@ -87,7 +88,7 @@ trait ScriptedTest extends Matchers { } class ScriptRunner[In, Out, M]( - op: Flow[In, In, Unit] ⇒ Flow[In, Out, M], + op: Flow[In, In, NotUsed] ⇒ Flow[In, Out, M], settings: ActorMaterializerSettings, script: Script[In, Out], maximumOverrun: Int, @@ -197,7 +198,7 @@ trait ScriptedTest extends Matchers { } def runScript[In, Out, M](script: Script[In, Out], settings: ActorMaterializerSettings, maximumOverrun: Int = 3, maximumRequest: Int = 3, maximumBuffer: Int = 3)( - op: Flow[In, In, Unit] ⇒ Flow[In, Out, M])(implicit system: ActorSystem): Unit = { + op: Flow[In, In, NotUsed] ⇒ Flow[In, Out, M])(implicit system: ActorSystem): Unit = { new ScriptRunner(op, settings, script, maximumOverrun, maximumRequest, maximumBuffer).run() } diff --git a/akka-stream-testkit/src/test/scala/akka/stream/testkit/StreamTestKitSpec.scala b/akka-stream-testkit/src/test/scala/akka/stream/testkit/StreamTestKitSpec.scala index 9bdc027770..3d071347a4 100644 --- a/akka-stream-testkit/src/test/scala/akka/stream/testkit/StreamTestKitSpec.scala +++ b/akka-stream-testkit/src/test/scala/akka/stream/testkit/StreamTestKitSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.testkit @@ -22,7 +22,7 @@ class StreamTestKitSpec extends AkkaSpec { } "#toStrict with failing source" in { - val msg = intercept[AssertionError] { + val error = intercept[AssertionError] { Source.fromIterator(() ⇒ new Iterator[Int] { var i = 0 override def hasNext: Boolean = true @@ -35,10 +35,10 @@ class StreamTestKitSpec extends AkkaSpec { } }).runWith(TestSink.probe) .toStrict(300.millis) - }.getMessage + } - msg should include("Boom!") - msg should include("List(1, 2)") + error.getCause.getMessage should include("Boom!") + error.getMessage should include("List(1, 2)") } "#toStrict when subscription was already obtained" in { diff --git a/akka-stream-testkit/src/test/scala/akka/stream/testkit/TestPublisherSubscriberSpec.scala b/akka-stream-testkit/src/test/scala/akka/stream/testkit/TestPublisherSubscriberSpec.scala index 83230f3596..216b0f20ca 100644 --- a/akka-stream-testkit/src/test/scala/akka/stream/testkit/TestPublisherSubscriberSpec.scala +++ b/akka-stream-testkit/src/test/scala/akka/stream/testkit/TestPublisherSubscriberSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.testkit @@ -38,8 +38,8 @@ class TestPublisherSubscriberSpec extends AkkaSpec { upstreamSubscription.sendComplete() downstream.expectEventPF { - case c @ OnComplete ⇒ - case _ ⇒ fail() + case OnComplete ⇒ + case _ ⇒ fail() } } diff --git a/akka-stream-testkit/src/test/scala/akka/stream/testkit/TestUtils.scala b/akka-stream-testkit/src/test/scala/akka/stream/testkit/TestUtils.scala index b052280203..148fccd4aa 100644 --- a/akka-stream-testkit/src/test/scala/akka/stream/testkit/TestUtils.scala +++ b/akka-stream-testkit/src/test/scala/akka/stream/testkit/TestUtils.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.testkit diff --git a/akka-stream-tests-tck/build.sbt b/akka-stream-tests-tck/build.sbt new file mode 100644 index 0000000000..497bb32918 --- /dev/null +++ b/akka-stream-tests-tck/build.sbt @@ -0,0 +1,7 @@ +import akka._ +import com.typesafe.tools.mima.plugin.MimaKeys + +AkkaBuild.defaultSettings +AkkaBuild.experimentalSettings +Formatting.formatSettings +Dependencies.streamTestsTck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/ActorPublisherTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/ActorPublisherTest.scala similarity index 95% rename from akka-stream-tck/src/test/scala/akka/stream/tck/ActorPublisherTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/ActorPublisherTest.scala index 50ec8eff52..30e6e5319a 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/ActorPublisherTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/ActorPublisherTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/ActorSubscriberTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/ActorSubscriberTest.scala similarity index 92% rename from akka-stream-tck/src/test/scala/akka/stream/tck/ActorSubscriberTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/ActorSubscriberTest.scala index 4e7f1d351d..9e8d5e6da6 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/ActorSubscriberTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/ActorSubscriberTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/ActorSystemLifecycle.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/ActorSystemLifecycle.scala similarity index 92% rename from akka-stream-tck/src/test/scala/akka/stream/tck/ActorSystemLifecycle.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/ActorSystemLifecycle.scala index 9bbf0a805b..a80e76ee73 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/ActorSystemLifecycle.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/ActorSystemLifecycle.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.tck @@ -31,7 +31,7 @@ trait ActorSystemLifecycle { @AfterClass def shutdownActorSystem(): Unit = { try { - system.shutdown() + system.terminate() system.awaitTermination(shutdownTimeout) } catch { case _: TimeoutException ⇒ diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/AkkaIdentityProcessorVerification.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/AkkaIdentityProcessorVerification.scala similarity index 96% rename from akka-stream-tck/src/test/scala/akka/stream/tck/AkkaIdentityProcessorVerification.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/AkkaIdentityProcessorVerification.scala index 43b4ea4756..ff115c4cb4 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/AkkaIdentityProcessorVerification.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/AkkaIdentityProcessorVerification.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/AkkaPublisherVerification.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/AkkaPublisherVerification.scala similarity index 95% rename from akka-stream-tck/src/test/scala/akka/stream/tck/AkkaPublisherVerification.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/AkkaPublisherVerification.scala index 86bd21aaa2..1f60b3e2fe 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/AkkaPublisherVerification.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/AkkaPublisherVerification.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/AkkaSubscriberVerification.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/AkkaSubscriberVerification.scala similarity index 95% rename from akka-stream-tck/src/test/scala/akka/stream/tck/AkkaSubscriberVerification.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/AkkaSubscriberVerification.scala index 17175de7fa..eb1907bbd7 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/AkkaSubscriberVerification.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/AkkaSubscriberVerification.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/ConcatTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/ConcatTest.scala similarity index 84% rename from akka-stream-tck/src/test/scala/akka/stream/tck/ConcatTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/ConcatTest.scala index 138a0be139..2744f5077e 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/ConcatTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/ConcatTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/EmptyPublisherTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/EmptyPublisherTest.scala similarity index 86% rename from akka-stream-tck/src/test/scala/akka/stream/tck/EmptyPublisherTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/EmptyPublisherTest.scala index bdacc09bbc..d9fdb56e88 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/EmptyPublisherTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/EmptyPublisherTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/FanoutPublisherTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/FanoutPublisherTest.scala similarity index 88% rename from akka-stream-tck/src/test/scala/akka/stream/tck/FanoutPublisherTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/FanoutPublisherTest.scala index 2a71a2f650..e55c526eee 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/FanoutPublisherTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/FanoutPublisherTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/FilePublisherTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/FilePublisherTest.scala similarity index 95% rename from akka-stream-tck/src/test/scala/akka/stream/tck/FilePublisherTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/FilePublisherTest.scala index f36813b48c..a853e00762 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/FilePublisherTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/FilePublisherTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/FlattenTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/FlattenTest.scala similarity index 88% rename from akka-stream-tck/src/test/scala/akka/stream/tck/FlattenTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/FlattenTest.scala index 5e81535058..a1035a522a 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/FlattenTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/FlattenTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/FoldSinkSubscriberTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/FoldSinkSubscriberTest.scala similarity index 84% rename from akka-stream-tck/src/test/scala/akka/stream/tck/FoldSinkSubscriberTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/FoldSinkSubscriberTest.scala index c4cd4c4476..e65eb0fbc5 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/FoldSinkSubscriberTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/FoldSinkSubscriberTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/ForeachSinkSubscriberTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/ForeachSinkSubscriberTest.scala similarity index 84% rename from akka-stream-tck/src/test/scala/akka/stream/tck/ForeachSinkSubscriberTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/ForeachSinkSubscriberTest.scala index 7a6a9722fa..347c0ab3bb 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/ForeachSinkSubscriberTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/ForeachSinkSubscriberTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/FusableProcessorTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/FusableProcessorTest.scala similarity index 84% rename from akka-stream-tck/src/test/scala/akka/stream/tck/FusableProcessorTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/FusableProcessorTest.scala index 447a731320..1cb1569fde 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/FusableProcessorTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/FusableProcessorTest.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.tck +import akka.NotUsed import akka.stream.impl.Stages import akka.stream._ import akka.stream.impl.fusing.GraphStages @@ -18,7 +19,7 @@ class FusableProcessorTest extends AkkaIdentityProcessorVerification[Int] { implicit val materializer = ActorMaterializer(settings)(system) // withAttributes "wraps" the underlying identity and protects it from automatic removal - Flow[Int].via(GraphStages.Identity.asInstanceOf[Graph[FlowShape[Int, Int], Unit]]).named("identity").toProcessor.run() + Flow[Int].via(GraphStages.Identity.asInstanceOf[Graph[FlowShape[Int, Int], NotUsed]]).named("identity").toProcessor.run() } override def createElement(element: Int): Int = element diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/FuturePublisherTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/FuturePublisherTest.scala similarity index 87% rename from akka-stream-tck/src/test/scala/akka/stream/tck/FuturePublisherTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/FuturePublisherTest.scala index c22553f6fd..21ef5aa288 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/FuturePublisherTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/FuturePublisherTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/GroupByTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/GroupByTest.scala similarity index 91% rename from akka-stream-tck/src/test/scala/akka/stream/tck/GroupByTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/GroupByTest.scala index a8aafc44a8..d264cb8f0f 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/GroupByTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/GroupByTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/InputStreamSourceTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/InputStreamSourceTest.scala similarity index 90% rename from akka-stream-tck/src/test/scala/akka/stream/tck/InputStreamSourceTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/InputStreamSourceTest.scala index 4edda9a96b..3c175a5808 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/InputStreamSourceTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/InputStreamSourceTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/IterablePublisherTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/IterablePublisherTest.scala similarity index 84% rename from akka-stream-tck/src/test/scala/akka/stream/tck/IterablePublisherTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/IterablePublisherTest.scala index 756ef13b70..45d438a059 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/IterablePublisherTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/IterablePublisherTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/MapTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/MapTest.scala similarity index 88% rename from akka-stream-tck/src/test/scala/akka/stream/tck/MapTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/MapTest.scala index fdf4cfd394..d99c1e3809 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/MapTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/MapTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/MaybeSourceTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/MaybeSourceTest.scala similarity index 87% rename from akka-stream-tck/src/test/scala/akka/stream/tck/MaybeSourceTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/MaybeSourceTest.scala index b7b08edfa9..712c197d5d 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/MaybeSourceTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/MaybeSourceTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/PrefixAndTailTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/PrefixAndTailTest.scala similarity index 89% rename from akka-stream-tck/src/test/scala/akka/stream/tck/PrefixAndTailTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/PrefixAndTailTest.scala index d7a9a3e623..d9cd0d0f40 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/PrefixAndTailTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/PrefixAndTailTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/SingleElementSourceTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/SingleElementSourceTest.scala similarity index 84% rename from akka-stream-tck/src/test/scala/akka/stream/tck/SingleElementSourceTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/SingleElementSourceTest.scala index d2701cf474..cc36009d91 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/SingleElementSourceTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/SingleElementSourceTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/SinkholeSubscriberTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/SinkholeSubscriberTest.scala similarity index 90% rename from akka-stream-tck/src/test/scala/akka/stream/tck/SinkholeSubscriberTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/SinkholeSubscriberTest.scala index cb08df6172..54774e12c9 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/SinkholeSubscriberTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/SinkholeSubscriberTest.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.tck +import akka.Done import akka.stream.impl.SinkholeSubscriber import org.reactivestreams.tck.{ TestEnvironment, SubscriberWhiteboxVerification } import org.reactivestreams.tck.SubscriberWhiteboxVerification.{ SubscriberPuppet, WhiteboxSubscriberProbe } @@ -14,7 +15,7 @@ import org.reactivestreams.{ Subscription, Subscriber } class SinkholeSubscriberTest extends SubscriberWhiteboxVerification[JInt](new TestEnvironment()) with TestNGSuiteLike { override def createSubscriber(probe: WhiteboxSubscriberProbe[JInt]): Subscriber[JInt] = { new Subscriber[JInt] { - val hole = new SinkholeSubscriber[JInt](Promise[Unit]()) + val hole = new SinkholeSubscriber[JInt](Promise[Done]()) override def onError(t: Throwable): Unit = { hole.onError(t) diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/SplitWhenTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/SplitWhenTest.scala similarity index 91% rename from akka-stream-tck/src/test/scala/akka/stream/tck/SplitWhenTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/SplitWhenTest.scala index 1d1c44489b..067b9eb6c0 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/SplitWhenTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/SplitWhenTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/Timeouts.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/Timeouts.scala similarity index 54% rename from akka-stream-tck/src/test/scala/akka/stream/tck/Timeouts.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/Timeouts.scala index 33be0fbf05..b66f9ac667 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/Timeouts.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/Timeouts.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.tck @@ -12,8 +12,8 @@ import akka.testkit._ */ object Timeouts { - def publisherShutdownTimeoutMillis: Int = 1000 + def publisherShutdownTimeoutMillis: Int = 3000 - def defaultTimeoutMillis: Int = 500 + def defaultTimeoutMillis: Int = 800 } diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/TransformProcessorTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/TransformProcessorTest.scala similarity index 93% rename from akka-stream-tck/src/test/scala/akka/stream/tck/TransformProcessorTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/TransformProcessorTest.scala index d550551f69..ee35fd0ed2 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/TransformProcessorTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/TransformProcessorTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tck/src/test/scala/akka/stream/tck/VirtualPublisherTest.scala b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/VirtualPublisherTest.scala similarity index 93% rename from akka-stream-tck/src/test/scala/akka/stream/tck/VirtualPublisherTest.scala rename to akka-stream-tests-tck/src/test/scala/akka/stream/tck/VirtualPublisherTest.scala index c0b005c7de..0706ef23b5 100644 --- a/akka-stream-tck/src/test/scala/akka/stream/tck/VirtualPublisherTest.scala +++ b/akka-stream-tests-tck/src/test/scala/akka/stream/tck/VirtualPublisherTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.tck diff --git a/akka-stream-tests/build.sbt b/akka-stream-tests/build.sbt new file mode 100644 index 0000000000..a8ad627a33 --- /dev/null +++ b/akka-stream-tests/build.sbt @@ -0,0 +1,8 @@ +import akka._ +import com.typesafe.tools.mima.plugin.MimaKeys + +AkkaBuild.defaultSettings +AkkaBuild.dontPublishSettings +AkkaBuild.experimentalSettings +Formatting.formatSettings +Dependencies.streamTests diff --git a/akka-stream-tests/src/test/java/akka/stream/StreamTest.java b/akka-stream-tests/src/test/java/akka/stream/StreamTest.java index fe9233120c..00bd07331d 100644 --- a/akka-stream-tests/src/test/java/akka/stream/StreamTest.java +++ b/akka-stream-tests/src/test/java/akka/stream/StreamTest.java @@ -1,13 +1,15 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream; +import org.scalatest.junit.JUnitSuite; + import akka.actor.ActorSystem; import akka.stream.javadsl.AkkaJUnitActorSystemResource; -public abstract class StreamTest { +public abstract class StreamTest extends JUnitSuite { final protected ActorSystem system; final protected ActorMaterializer materializer; diff --git a/akka-stream-tests/src/test/java/akka/stream/actor/ActorPublisherTest.java b/akka-stream-tests/src/test/java/akka/stream/actor/ActorPublisherTest.java index d5672555d6..e59f4eb1bc 100644 --- a/akka-stream-tests/src/test/java/akka/stream/actor/ActorPublisherTest.java +++ b/akka-stream-tests/src/test/java/akka/stream/actor/ActorPublisherTest.java @@ -44,6 +44,7 @@ public class ActorPublisherTest extends StreamTest { final Publisher publisher = UntypedActorPublisher.create(ref); Source.fromPublisher(publisher) .runForeach(new akka.japi.function.Procedure() { + private static final long serialVersionUID = 1L; @Override public void apply(Integer elem) throws Exception { probe.getRef().tell(elem, ActorRef.noSender()); diff --git a/akka-stream-tests/src/test/java/akka/stream/io/InputStreamSinkTest.java b/akka-stream-tests/src/test/java/akka/stream/io/InputStreamSinkTest.java index bc8f3ea867..4bc6f5bf5a 100644 --- a/akka-stream-tests/src/test/java/akka/stream/io/InputStreamSinkTest.java +++ b/akka-stream-tests/src/test/java/akka/stream/io/InputStreamSinkTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.io; diff --git a/akka-stream-tests/src/test/java/akka/stream/io/OutputStreamSinkTest.java b/akka-stream-tests/src/test/java/akka/stream/io/OutputStreamSinkTest.java new file mode 100644 index 0000000000..ed78e513a4 --- /dev/null +++ b/akka-stream-tests/src/test/java/akka/stream/io/OutputStreamSinkTest.java @@ -0,0 +1,52 @@ +/** + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package akka.stream.io; + +import akka.stream.StreamTest; +import akka.stream.javadsl.AkkaJUnitActorSystemResource; +import akka.stream.javadsl.Source; +import akka.stream.javadsl.StreamConverters; +import akka.stream.testkit.Utils; +import akka.util.ByteString; +import org.junit.ClassRule; +import org.junit.Test; +import scala.concurrent.Await; +import scala.concurrent.Future; +import scala.concurrent.duration.FiniteDuration; + +import java.io.OutputStream; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; + +public class OutputStreamSinkTest extends StreamTest { + public OutputStreamSinkTest() { + super(actorSystemResource); + } + + @ClassRule + public static AkkaJUnitActorSystemResource actorSystemResource = new AkkaJUnitActorSystemResource("OutputStreamSink", + Utils.UnboundedMailboxConfig()); + @Test + public void mustSignalFailureViaIoResult() throws Exception { + + final OutputStream os = new OutputStream() { + volatile int left = 3; + public void write(int data) { + if (left == 0) { + throw new RuntimeException("Can't accept more data."); + } + left -= 1; + } + }; + final CompletionStage resultFuture = Source.single(ByteString.fromString("123456")).runWith(StreamConverters.fromOutputStream(() -> os), materializer); + final IOResult result = resultFuture.toCompletableFuture().get(300, TimeUnit.MILLISECONDS); + + assertFalse(result.wasSuccessful()); + assertTrue(result.getError().getMessage().equals("Can't accept more data.")); + } + +} diff --git a/akka-stream-tests/src/test/java/akka/stream/io/OutputStreamSourceTest.java b/akka-stream-tests/src/test/java/akka/stream/io/OutputStreamSourceTest.java index 927d3f492d..d9d6b431f7 100644 --- a/akka-stream-tests/src/test/java/akka/stream/io/OutputStreamSourceTest.java +++ b/akka-stream-tests/src/test/java/akka/stream/io/OutputStreamSourceTest.java @@ -1,28 +1,27 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.io; -import akka.actor.ActorRef; -import akka.japi.Pair; -import akka.japi.function.Procedure; -import akka.stream.StreamTest; -import akka.stream.javadsl.*; -import akka.stream.testkit.AkkaSpec; -import akka.stream.testkit.Utils; -import akka.testkit.JavaTestKit; -import akka.util.ByteString; -import com.typesafe.config.ConfigFactory; -import org.junit.ClassRule; -import org.junit.Test; -import scala.concurrent.Future; -import scala.concurrent.duration.FiniteDuration; -import scala.runtime.BoxedUnit; +import static org.junit.Assert.assertEquals; import java.io.OutputStream; import java.util.concurrent.TimeUnit; -import static org.junit.Assert.assertEquals; +import org.junit.ClassRule; +import org.junit.Test; + +import akka.actor.ActorRef; +import akka.japi.function.Procedure; +import akka.stream.StreamTest; +import akka.stream.javadsl.AkkaJUnitActorSystemResource; +import akka.stream.javadsl.Sink; +import akka.stream.javadsl.Source; +import akka.stream.javadsl.StreamConverters; +import akka.stream.testkit.Utils; +import akka.testkit.JavaTestKit; +import akka.util.ByteString; +import scala.concurrent.duration.FiniteDuration; public class OutputStreamSourceTest extends StreamTest { public OutputStreamSourceTest() { @@ -39,6 +38,7 @@ public class OutputStreamSourceTest extends StreamTest { final Source source = StreamConverters.asOutputStream(timeout); final OutputStream s = source.to(Sink.foreach(new Procedure() { + private static final long serialVersionUID = 1L; public void apply(ByteString elem) { probe.getRef().tell(elem, ActorRef.noSender()); } diff --git a/akka-stream-tests/src/test/java/akka/stream/javadsl/AkkaJUnitActorSystemResource.java b/akka-stream-tests/src/test/java/akka/stream/javadsl/AkkaJUnitActorSystemResource.java index 009a9f0f87..eedeb7f672 100644 --- a/akka-stream-tests/src/test/java/akka/stream/javadsl/AkkaJUnitActorSystemResource.java +++ b/akka-stream-tests/src/test/java/akka/stream/javadsl/AkkaJUnitActorSystemResource.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.javadsl; diff --git a/akka-stream-tests/src/test/java/akka/stream/javadsl/AttributesTest.java b/akka-stream-tests/src/test/java/akka/stream/javadsl/AttributesTest.java index caaf023fef..b298dd6deb 100644 --- a/akka-stream-tests/src/test/java/akka/stream/javadsl/AttributesTest.java +++ b/akka-stream-tests/src/test/java/akka/stream/javadsl/AttributesTest.java @@ -1,17 +1,29 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.javadsl; import static org.junit.Assert.assertEquals; + import java.util.Arrays; import java.util.Collections; +import org.junit.ClassRule; import org.junit.Test; import akka.stream.Attributes; +import akka.stream.StreamTest; +import akka.stream.testkit.AkkaSpec; -public class AttributesTest { +public class AttributesTest extends StreamTest { + + public AttributesTest() { + super(actorSystemResource); + } + + @ClassRule + public static AkkaJUnitActorSystemResource actorSystemResource = new AkkaJUnitActorSystemResource("AttributesTest", + AkkaSpec.testConf()); final Attributes attributes = Attributes.name("a") diff --git a/akka-stream-tests/src/test/java/akka/stream/javadsl/BidiFlowTest.java b/akka-stream-tests/src/test/java/akka/stream/javadsl/BidiFlowTest.java index d20f398a34..718dcd75b6 100644 --- a/akka-stream-tests/src/test/java/akka/stream/javadsl/BidiFlowTest.java +++ b/akka-stream-tests/src/test/java/akka/stream/javadsl/BidiFlowTest.java @@ -1,13 +1,15 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.javadsl; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; +import akka.NotUsed; import org.junit.ClassRule; import org.junit.Test; @@ -15,7 +17,6 @@ import scala.concurrent.Await; import scala.concurrent.Future; import scala.concurrent.duration.Duration; import scala.concurrent.duration.FiniteDuration; -import scala.runtime.BoxedUnit; import akka.japi.Pair; import akka.stream.*; import akka.stream.testkit.AkkaSpec; @@ -34,11 +35,11 @@ public class BidiFlowTest extends StreamTest { public static AkkaJUnitActorSystemResource actorSystemResource = new AkkaJUnitActorSystemResource( "FlowTest", AkkaSpec.testConf()); - private final BidiFlow bidi = BidiFlow + private final BidiFlow bidi = BidiFlow .fromGraph(GraphDSL.create( - new Function, BidiShape>() { + new Function, BidiShape>() { @Override - public BidiShape apply(Builder b) + public BidiShape apply(Builder b) throws Exception { final FlowShape top = b.add(Flow .of(Integer.class).map(new Function() { @@ -59,12 +60,12 @@ public class BidiFlowTest extends StreamTest { } })); - private final BidiFlow inverse = BidiFlow + private final BidiFlow inverse = BidiFlow .fromGraph( GraphDSL.create( - new Function, BidiShape>() { + new Function, BidiShape>() { @Override - public BidiShape apply(Builder b) + public BidiShape apply(Builder b) throws Exception { final FlowShape top = b.add(Flow.of(Long.class) .map(new Function() { @@ -85,33 +86,20 @@ public class BidiFlowTest extends StreamTest { } })); - private final BidiFlow> bidiMat = + private final BidiFlow> bidiMat = BidiFlow.fromGraph( GraphDSL.create( Sink.head(), - new Function2>, SinkShape, BidiShape>() { - @Override - public BidiShape apply(Builder> b, SinkShape sink) - throws Exception { + (b, sink) -> { b.from(b.add(Source.single(42))).to(sink); final FlowShape top = b.add(Flow - .of(Integer.class).map(new Function() { - @Override - public Long apply(Integer arg) { - return (long) ((int) arg) + 2; - } - })); + .of(Integer.class).map(i -> (long)(i + 2))); final FlowShape bottom = b.add(Flow - .of(ByteString.class).map(new Function() { - @Override - public String apply(ByteString arg) { - return arg.decodeString("UTF-8"); - } - })); + .of(ByteString.class).map(bytes -> bytes.decodeString("UTF-8"))); return new BidiShape(top .in(), top.out(), bottom.in(), bottom.out()); } - })); + )); private final String str = "Hello World"; private final ByteString bytes = ByteString.fromString(str); @@ -125,14 +113,11 @@ public class BidiFlowTest extends StreamTest { @Test public void mustWorkInIsolation() throws Exception { - final Pair, Future> p = + final Pair, CompletionStage> p = RunnableGraph.fromGraph(GraphDSL .create(Sink. head(), Sink. head(), - Keep., Future> both(), - new Function3, Future>>, SinkShape, SinkShape, ClosedShape>() { - @Override - public ClosedShape apply(Builder, Future>> b, SinkShape st, - SinkShape sb) throws Exception { + Keep.both(), + (b, st, sb) -> { final BidiShape s = b.add(bidi); b.from(b.add(Source.single(1))).toInlet(s.in1()); @@ -140,11 +125,10 @@ public class BidiFlowTest extends StreamTest { b.from(b.add(Source.single(bytes))).toInlet(s.in2()); b.from(s.out2()).to(sb); return ClosedShape.getInstance(); - } })).run(materializer); - final Long rt = Await.result(p.first(), oneSec); - final String rb = Await.result(p.second(), oneSec); + final Long rt = p.first().toCompletableFuture().get(1, TimeUnit.SECONDS); + final String rb = p.second().toCompletableFuture().get(1, TimeUnit.SECONDS); assertEquals((Long) 3L, rt); assertEquals(str, rb); @@ -152,130 +136,99 @@ public class BidiFlowTest extends StreamTest { @Test public void mustWorkAsAFlowThatIsOpenOnTheLeft() throws Exception { - final Flow f = bidi.join(Flow.of(Long.class).map( + final Flow f = bidi.join(Flow.of(Long.class).map( new Function() { @Override public ByteString apply(Long arg) { return ByteString.fromString("Hello " + arg); } })); - final Future> result = Source.from(list).via(f).grouped(10).runWith(Sink.> head(), materializer); - assertEquals(Arrays.asList("Hello 3", "Hello 4", "Hello 5"), Await.result(result, oneSec)); + final CompletionStage> result = Source.from(list).via(f).grouped(10).runWith(Sink.> head(), materializer); + assertEquals(Arrays.asList("Hello 3", "Hello 4", "Hello 5"), result.toCompletableFuture().get(1, TimeUnit.SECONDS)); } @Test public void mustWorkAsAFlowThatIsOpenOnTheRight() throws Exception { - final Flow f = Flow.of(String.class).map( + final Flow f = Flow.of(String.class).map( new Function() { @Override public Integer apply(String arg) { return Integer.valueOf(arg); } }).join(bidi); final List inputs = Arrays.asList(ByteString.fromString("1"), ByteString.fromString("2")); - final Future> result = Source.from(inputs).via(f).grouped(10).runWith(Sink.> head(), materializer); - assertEquals(Arrays.asList(3L, 4L), Await.result(result, oneSec)); + final CompletionStage> result = Source.from(inputs).via(f).grouped(10).runWith(Sink.> head(), materializer); + assertEquals(Arrays.asList(3L, 4L), result.toCompletableFuture().get(1, TimeUnit.SECONDS)); } @Test public void mustWorkWhenAtopItsInverse() throws Exception { - final Flow f = bidi.atop(inverse).join(Flow.of(Integer.class).map( + final Flow f = bidi.atop(inverse).join(Flow.of(Integer.class).map( new Function() { @Override public String apply(Integer arg) { return arg.toString(); } })); - final Future> result = Source.from(list).via(f).grouped(10).runWith(Sink.> head(), materializer); - assertEquals(Arrays.asList("5", "6", "7"), Await.result(result, oneSec)); + final CompletionStage> result = Source.from(list).via(f).grouped(10).runWith(Sink.> head(), materializer); + assertEquals(Arrays.asList("5", "6", "7"), result.toCompletableFuture().get(1, TimeUnit.SECONDS)); } @Test public void mustWorkWhenReversed() throws Exception { - final Flow f = Flow.of(Integer.class).map( + final Flow f = Flow.of(Integer.class).map( new Function() { @Override public String apply(Integer arg) { return arg.toString(); } }).join(inverse.reversed()).join(bidi.reversed()); - final Future> result = Source.from(list).via(f).grouped(10).runWith(Sink.> head(), materializer); - assertEquals(Arrays.asList("5", "6", "7"), Await.result(result, oneSec)); + final CompletionStage> result = Source.from(list).via(f).grouped(10).runWith(Sink.> head(), materializer); + assertEquals(Arrays.asList("5", "6", "7"), result.toCompletableFuture().get(1, TimeUnit.SECONDS)); } @Test public void mustMaterializeToItsValue() throws Exception { - final Future f = RunnableGraph.fromGraph( - GraphDSL.create(bidiMat, - new Function2 >, BidiShape, ClosedShape>() { - @Override - public ClosedShape apply(Builder> b, - BidiShape shape) throws Exception { - final FlowShape left = b.add(Flow.of(String.class).map( - new Function() { - @Override - public Integer apply(String arg) { - return Integer.valueOf(arg); - } - })); - final FlowShape right = b.add(Flow.of(Long.class).map( - new Function() { - @Override - public ByteString apply(Long arg) { - return ByteString.fromString("Hello " + arg); - } - })); + final CompletionStage f = RunnableGraph.fromGraph( + GraphDSL.create(bidiMat, (b, shape) -> { + final FlowShape left = b.add(Flow.of(String.class).map(Integer::valueOf)); + final FlowShape right = b.add(Flow.of(Long.class).map(s -> ByteString.fromString("Hello " + s))); b.from(shape.out2()).via(left).toInlet(shape.in1()) .from(shape.out1()).via(right).toInlet(shape.in2()); return ClosedShape.getInstance(); - } })).run(materializer); - assertEquals((Integer) 42, Await.result(f, oneSec)); + assertEquals((Integer) 42, f.toCompletableFuture().get(1, TimeUnit.SECONDS)); } @Test public void mustCombineMaterializationValues() throws Exception { - final Flow> left = Flow.fromGraph(GraphDSL.create( - Sink.head(), new Function2>, SinkShape, FlowShape>() { - @Override - public FlowShape apply(Builder> b, - SinkShape sink) throws Exception { + final Flow> left = Flow.fromGraph(GraphDSL.create( + Sink.head(), (b, sink) -> { final UniformFanOutShape bcast = b.add(Broadcast.create(2)); final UniformFanInShape merge = b.add(Merge.create(2)); - final FlowShape flow = b.add(Flow.of(String.class).map( - new Function() { - @Override - public Integer apply(String arg) { - return Integer.valueOf(arg); - } - })); + final FlowShape flow = b.add(Flow.of(String.class).map(Integer::valueOf)); b.from(bcast).to(sink) .from(b.add(Source.single(1))).viaFanOut(bcast).toFanIn(merge) .from(flow).toFanIn(merge); return new FlowShape(flow.in(), merge.out()); - } })); - final Flow>> right = Flow.fromGraph(GraphDSL.create( - Sink.>head(), new Function2>>, SinkShape>, FlowShape>() { - @Override - public FlowShape apply(Builder>> b, - SinkShape> sink) throws Exception { + final Flow>> right = Flow.fromGraph(GraphDSL.create( + Sink.>head(), (b, sink) -> { final FlowShape> flow = b.add(Flow.of(Long.class).grouped(10)); b.from(flow).to(sink); return new FlowShape(flow.in(), b.add(Source.single(ByteString.fromString("10"))).out()); - } })); - final Pair, Future>, Future>> result = - left.joinMat(bidiMat, Keep., Future> both()).joinMat(right, Keep., Future>, Future>> both()).run(materializer); - final Future l = result.first().first(); - final Future m = result.first().second(); - final Future> r = result.second(); - assertEquals((Integer) 1, Await.result(l, oneSec)); - assertEquals((Integer) 42, Await.result(m, oneSec)); - final Long[] rr = Await.result(r, oneSec).toArray(new Long[2]); + final Pair, CompletionStage>, CompletionStage>> result = + left.joinMat(bidiMat, Keep.both()).joinMat(right, Keep.both()).run(materializer); + final CompletionStage l = result.first().first(); + final CompletionStage m = result.first().second(); + final CompletionStage> r = result.second(); + assertEquals((Integer) 1, l.toCompletableFuture().get(1, TimeUnit.SECONDS)); + assertEquals((Integer) 42, m.toCompletableFuture().get(1, TimeUnit.SECONDS)); + final Long[] rr = r.toCompletableFuture().get(1, TimeUnit.SECONDS).toArray(new Long[2]); Arrays.sort(rr); assertArrayEquals(new Long[] { 3L, 12L }, rr); } public void mustSuitablyOverrideAttributeHandlingMethods() { @SuppressWarnings("unused") - final BidiFlow b = + final BidiFlow b = bidi.withAttributes(Attributes.name("")).addAttributes(Attributes.asyncBoundary()).named(""); } } diff --git a/akka-stream-tests/src/test/java/akka/stream/javadsl/FlowGraphTest.java b/akka-stream-tests/src/test/java/akka/stream/javadsl/FlowGraphTest.java index f20564ae2b..898b3877e4 100644 --- a/akka-stream-tests/src/test/java/akka/stream/javadsl/FlowGraphTest.java +++ b/akka-stream-tests/src/test/java/akka/stream/javadsl/FlowGraphTest.java @@ -1,10 +1,11 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.javadsl; +import akka.NotUsed; import akka.japi.Pair; -import akka.pattern.Patterns; +import akka.pattern.PatternsCS; import akka.japi.tuple.Tuple4; import akka.stream.*; import akka.stream.javadsl.GraphDSL.Builder; @@ -20,8 +21,9 @@ import org.reactivestreams.Publisher; import scala.concurrent.Await; import scala.concurrent.Future; import scala.concurrent.duration.Duration; -import scala.runtime.BoxedUnit; + import java.util.*; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertEquals; @@ -56,23 +58,23 @@ public class FlowGraphTest extends StreamTest { @Test public void mustBeAbleToUseMerge() throws Exception { - final Flow f1 = + final Flow f1 = Flow.of(String.class).transform(FlowGraphTest.this. op()).named("f1"); - final Flow f2 = + final Flow f2 = Flow.of(String.class).transform(FlowGraphTest.this. op()).named("f2"); @SuppressWarnings("unused") - final Flow f3 = + final Flow f3 = Flow.of(String.class).transform(FlowGraphTest.this. op()).named("f3"); - final Source in1 = Source.from(Arrays.asList("a", "b", "c")); - final Source in2 = Source.from(Arrays.asList("d", "e", "f")); + final Source in1 = Source.from(Arrays.asList("a", "b", "c")); + final Source in2 = Source.from(Arrays.asList("d", "e", "f")); - final Sink> publisher = Sink.asPublisher(false); + final Sink> publisher = Sink.asPublisher(AsPublisher.WITHOUT_FANOUT); - final Source source = Source.fromGraph( - GraphDSL.create(new Function, SourceShape>() { + final Source source = Source.fromGraph( + GraphDSL.create(new Function, SourceShape>() { @Override - public SourceShape apply(Builder b) throws Exception { + public SourceShape apply(Builder b) throws Exception { final UniformFanInShape merge = b.add(Merge.create(2)); b.from(b.add(in1)).via(b.add(f1)).toInlet(merge.in(0)); b.from(b.add(in2)).via(b.add(f2)).toInlet(merge.in(1)); @@ -82,9 +84,9 @@ public class FlowGraphTest extends StreamTest { // collecting final Publisher pub = source.runWith(publisher, materializer); - final Future> all = Source.fromPublisher(pub).grouped(100).runWith(Sink.>head(), materializer); + final CompletionStage> all = Source.fromPublisher(pub).grouped(100).runWith(Sink.>head(), materializer); - final List result = Await.result(all, Duration.apply(200, TimeUnit.MILLISECONDS)); + final List result = all.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); assertEquals(new HashSet(Arrays.asList("a", "b", "c", "d", "e", "f")), new HashSet(result)); } @@ -95,13 +97,13 @@ public class FlowGraphTest extends StreamTest { final Iterable input2 = Arrays.asList(1, 2, 3); RunnableGraph.fromGraph( GraphDSL.create( - new Function,ClosedShape>() { + new Function,ClosedShape>() { @Override - public ClosedShape apply(final Builder b) throws Exception { - final Source in1 = Source.from(input1); - final Source in2 = Source.from(input2); + public ClosedShape apply(final Builder b) throws Exception { + final Source in1 = Source.from(input1); + final Source in2 = Source.from(input2); final FanInShape2> zip = b.add(Zip.create()); - final Sink, BoxedUnit> out = createSink(probe); + final Sink, NotUsed> out = createSink(probe); b.from(b.add(in1)).toInlet(zip.in0()); b.from(b.add(in2)).toInlet(zip.in1()); @@ -130,9 +132,9 @@ public class FlowGraphTest extends StreamTest { final Iterable expected2 = Arrays.asList(1, 2, 3); RunnableGraph.fromGraph(GraphDSL.create( - new Function, ClosedShape>() { + new Function, ClosedShape>() { @Override - public ClosedShape apply(final Builder b) throws Exception { + public ClosedShape apply(final Builder b) throws Exception { final SourceShape> in = b.add(Source.from(input)); final FanOutShape2, String, Integer> unzip = b.add(Unzip.create()); @@ -152,7 +154,7 @@ public class FlowGraphTest extends StreamTest { assertEquals(expected2, output2); } - private static Sink createSink(final JavaTestKit probe){ + private static Sink createSink(final JavaTestKit probe){ return Sink.actorRef(probe.getRef(), "onComplete"); } @@ -162,10 +164,10 @@ public class FlowGraphTest extends StreamTest { final JavaTestKit probe2 = new JavaTestKit(system); RunnableGraph.fromGraph(GraphDSL.create( - new Function, ClosedShape>() { + new Function, ClosedShape>() { @Override - public ClosedShape apply(final Builder b) throws Exception { - final Source in = Source.single(1); + public ClosedShape apply(final Builder b) throws Exception { + final Source in = Source.single(1); final FanOutShape2 unzip = b.add(UnzipWith.create( new Function>() { @@ -205,10 +207,10 @@ public class FlowGraphTest extends StreamTest { final JavaTestKit probe4 = new JavaTestKit(system); RunnableGraph.fromGraph(GraphDSL.create( - new Function, ClosedShape>() { + new Function, ClosedShape>() { @Override - public ClosedShape apply(final Builder b) throws Exception { - final Source in = Source.single(1); + public ClosedShape apply(final Builder b) throws Exception { + final Source in = Source.single(1); final FanOutShape4 unzip = b.add(UnzipWith.create4( new Function>() { @@ -248,51 +250,45 @@ public class FlowGraphTest extends StreamTest { @Test public void mustBeAbleToUseZipWith() throws Exception { - final Source in1 = Source.single(1); - final Source in2 = Source.single(10); + final Source in1 = Source.single(1); + final Source in2 = Source.single(10); - final Graph, BoxedUnit> sumZip = ZipWith.create( + final Graph, NotUsed> sumZip = ZipWith.create( new Function2() { @Override public Integer apply(Integer l, Integer r) throws Exception { return l + r; } }); - final Future future = RunnableGraph.fromGraph(GraphDSL.create(Sink.head(), - new Function2>, SinkShape, ClosedShape>() { - @Override - public ClosedShape apply(Builder> b, SinkShape out) throws Exception { + final CompletionStage future = RunnableGraph.fromGraph(GraphDSL.create(Sink.head(), + (b, out) -> { final FanInShape2 zip = b.add(sumZip); b.from(b.add(in1)).toInlet(zip.in0()); b.from(b.add(in2)).toInlet(zip.in1()); b.from(zip.out()).to(out); return ClosedShape.getInstance(); - } })).run(materializer); - final Integer result = Await.result(future, Duration.create(300, TimeUnit.MILLISECONDS)); + final Integer result = future.toCompletableFuture().get(300, TimeUnit.MILLISECONDS); assertEquals(11, (int) result); } @Test public void mustBeAbleToUseZip4With() throws Exception { - final Source in1 = Source.single(1); - final Source in2 = Source.single(10); - final Source in3 = Source.single(100); - final Source in4 = Source.single(1000); + final Source in1 = Source.single(1); + final Source in2 = Source.single(10); + final Source in3 = Source.single(100); + final Source in4 = Source.single(1000); - final Graph, BoxedUnit> sumZip = ZipWith.create4( + final Graph, NotUsed> sumZip = ZipWith.create4( new Function4() { @Override public Integer apply(Integer i1, Integer i2, Integer i3, Integer i4) throws Exception { return i1 + i2 + i3 + i4; } }); - final Future future = RunnableGraph.fromGraph( - GraphDSL.create(Sink.head(), - new Function2>, SinkShape, ClosedShape>() { - @Override - public ClosedShape apply(Builder> b, SinkShape out) throws Exception { + final CompletionStage future = RunnableGraph.fromGraph( + GraphDSL.create(Sink.head(), (b, out) -> { final FanInShape4 zip = b.add(sumZip); b.from(b.add(in1)).toInlet(zip.in0()); b.from(b.add(in2)).toInlet(zip.in1()); @@ -300,34 +296,26 @@ public class FlowGraphTest extends StreamTest { b.from(b.add(in4)).toInlet(zip.in3()); b.from(zip.out()).to(out); return ClosedShape.getInstance(); - } })).run(materializer); - final Integer result = Await.result(future, Duration.create(300, TimeUnit.MILLISECONDS)); + final Integer result = future.toCompletableFuture().get(300, TimeUnit.MILLISECONDS); assertEquals(1111, (int) result); } @Test public void mustBeAbleToUseMatValue() throws Exception { @SuppressWarnings("unused") - final Source in1 = Source.single(1); + final Source in1 = Source.single(1); final TestProbe probe = TestProbe.apply(system); - final Future future = RunnableGraph.fromGraph( - GraphDSL.create(Sink. head(), new Function2>, SinkShape, ClosedShape>() { - @Override - public ClosedShape apply(Builder> b, SinkShape out) throws Exception { + final CompletionStage future = RunnableGraph.fromGraph( + GraphDSL.create(Sink. head(), (b, out) -> { b.from(b.add(Source.single(1))).to(out); - b.from(b.materializedValue()).to(b.add(Sink.foreach(new Procedure>(){ - public void apply(Future mat) throws Exception { - Patterns.pipe(mat, system.dispatcher()).to(probe.ref()); - } - }))); + b.from(b.materializedValue()).to(b.add(Sink.foreach(mat -> PatternsCS.pipe(mat, system.dispatcher()).to(probe.ref())))); return ClosedShape.getInstance(); - } })).run(materializer); - final Integer result = Await.result(future, Duration.create(300, TimeUnit.MILLISECONDS)); + final Integer result = future.toCompletableFuture().get(300, TimeUnit.MILLISECONDS); assertEquals(1, (int) result); probe.expectMsg(1); diff --git a/akka-stream-tests/src/test/java/akka/stream/javadsl/FlowTest.java b/akka-stream-tests/src/test/java/akka/stream/javadsl/FlowTest.java index bc1d87864e..13e89fce07 100644 --- a/akka-stream-tests/src/test/java/akka/stream/javadsl/FlowTest.java +++ b/akka-stream-tests/src/test/java/akka/stream/javadsl/FlowTest.java @@ -1,8 +1,10 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.javadsl; +import akka.Done; +import akka.NotUsed; import akka.actor.ActorRef; import akka.dispatch.Foreach; import akka.dispatch.Futures; @@ -24,11 +26,14 @@ import scala.concurrent.Await; import scala.concurrent.Future; import scala.concurrent.duration.Duration; import scala.concurrent.duration.FiniteDuration; -import scala.runtime.BoxedUnit; import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; +import java.util.stream.Stream; import static akka.stream.testkit.StreamTestKit.PublisherProbeSubscription; import static org.junit.Assert.*; @@ -48,8 +53,8 @@ public class FlowTest extends StreamTest { final JavaTestKit probe = new JavaTestKit(system); final String[] lookup = { "a", "b", "c", "d", "e", "f" }; final java.lang.Iterable input = Arrays.asList(0, 1, 2, 3, 4, 5); - final Source ints = Source.from(input); - final Flow flow1 = Flow.of(Integer.class).drop(2).take(3 + final Source ints = Source.from(input); + final Flow flow1 = Flow.of(Integer.class).drop(2).take(3 ).takeWithin(FiniteDuration.create(10, TimeUnit.SECONDS )).map(new Function() { public String apply(Integer elem) { @@ -60,7 +65,7 @@ public class FlowTest extends StreamTest { return !elem.equals("c"); } }); - final Flow flow2 = Flow.of(String.class).grouped(2 + final Flow flow2 = Flow.of(String.class).grouped(2 ).mapConcat(new Function, java.lang.Iterable>() { public java.util.List apply(java.util.List elem) { return elem; @@ -72,16 +77,9 @@ public class FlowTest extends StreamTest { } }); - ints.via(flow1.via(flow2)).runFold("", new Function2() { - public String apply(String acc, String elem) { - return acc + elem; - } - }, materializer - ).foreach(new Foreach() { // Scala Future - public void each(String elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }, system.dispatcher()); + ints.via(flow1.via(flow2)) + .runFold("", (acc, elem) -> acc + elem, materializer) + .thenAccept(elem -> probe.getRef().tell(elem, ActorRef.noSender())); probe.expectMsgEquals("de"); } @@ -89,36 +87,26 @@ public class FlowTest extends StreamTest { @Test public void mustBeAbleToUseDropWhile() throws Exception { final JavaTestKit probe = new JavaTestKit(system); - final Source source = Source.from(Arrays.asList(0, 1, 2, 3)); - final Flow flow = Flow.of(Integer.class).dropWhile - (new Predicate() { - public boolean test(Integer elem) { - return elem < 2; - } - }); + final Source source = Source.from(Arrays.asList(0, 1, 2, 3)); + final Flow flow = + Flow.of(Integer.class).dropWhile(elem -> elem < 2); - final Future future = source.via(flow).runWith(Sink.foreach(new Procedure() { // Scala Future - public void apply(Integer elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }), materializer); + final CompletionStage future = + source.via(flow).runWith(Sink.foreach(elem -> probe.getRef().tell(elem, ActorRef.noSender())), materializer); probe.expectMsgEquals(2); probe.expectMsgEquals(3); - Await.ready(future, Duration.apply(200, TimeUnit.MILLISECONDS)); + future.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); } @Test public void mustBeAbleToUseIntersperse() throws Exception { final JavaTestKit probe = new JavaTestKit(system); - final Source source = Source.from(Arrays.asList("0", "1", "2", "3")); - final Flow flow = Flow.of(String.class).intersperse("[", ",", "]"); + final Source source = Source.from(Arrays.asList("0", "1", "2", "3")); + final Flow flow = Flow.of(String.class).intersperse("[", ",", "]"); - final Future future = source.via(flow).runWith(Sink.foreach(new Procedure() { // Scala Future - public void apply(String elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }), materializer); + final CompletionStage future = + source.via(flow).runWith(Sink.foreach(elem -> probe.getRef().tell(elem, ActorRef.noSender())), materializer); probe.expectMsgEquals("["); probe.expectMsgEquals("0"); @@ -129,20 +117,17 @@ public class FlowTest extends StreamTest { probe.expectMsgEquals(","); probe.expectMsgEquals("3"); probe.expectMsgEquals("]"); - Await.ready(future, Duration.apply(200, TimeUnit.MILLISECONDS)); + future.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); } @Test public void mustBeAbleToUseIntersperseAndConcat() throws Exception { final JavaTestKit probe = new JavaTestKit(system); - final Source source = Source.from(Arrays.asList("0", "1", "2", "3")); - final Flow flow = Flow.of(String.class).intersperse(","); + final Source source = Source.from(Arrays.asList("0", "1", "2", "3")); + final Flow flow = Flow.of(String.class).intersperse(","); - final Future future = Source.single(">> ").concat(source.via(flow)).runWith(Sink.foreach(new Procedure() { // Scala Future - public void apply(String elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }), materializer); + final CompletionStage future = + Source.single(">> ").concat(source.via(flow)).runWith(Sink.foreach(elem -> probe.getRef().tell(elem, ActorRef.noSender())), materializer); probe.expectMsgEquals(">> "); probe.expectMsgEquals("0"); @@ -152,25 +137,22 @@ public class FlowTest extends StreamTest { probe.expectMsgEquals("2"); probe.expectMsgEquals(","); probe.expectMsgEquals("3"); - Await.ready(future, Duration.apply(200, TimeUnit.MILLISECONDS)); + future.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); } @Test public void mustBeAbleToUseTakeWhile() throws Exception { final JavaTestKit probe = new JavaTestKit(system); - final Source source = Source.from(Arrays.asList(0, 1, 2, 3)); - final Flow flow = Flow.of(Integer.class).takeWhile + final Source source = Source.from(Arrays.asList(0, 1, 2, 3)); + final Flow flow = Flow.of(Integer.class).takeWhile (new Predicate() { public boolean test(Integer elem) { return elem < 2; } }); - final Future future = source.via(flow).runWith(Sink.foreach(new Procedure() { // Scala Future - public void apply(Integer elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }), materializer); + final CompletionStage future = + source.via(flow).runWith(Sink.foreach(elem -> probe.getRef().tell(elem, ActorRef.noSender())), materializer); probe.expectMsgEquals(0); probe.expectMsgEquals(1); @@ -178,7 +160,7 @@ public class FlowTest extends StreamTest { FiniteDuration duration = Duration.apply(200, TimeUnit.MILLISECONDS); probe.expectNoMsg(duration); - Await.ready(future, duration); + future.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); } @@ -187,7 +169,7 @@ public class FlowTest extends StreamTest { final JavaTestKit probe = new JavaTestKit(system); final Iterable input = Arrays.asList(0, 1, 2, 3, 4, 5, 6, 7); // duplicate each element, stop after 4 elements, and emit sum to the end - final Flow flow = Flow.of(Integer.class).transform(new Creator>() { + final Flow flow = Flow.of(Integer.class).transform(new Creator>() { @Override public PushPullStage create() throws Exception { return new StatefulStage() { @@ -240,7 +222,7 @@ public class FlowTest extends StreamTest { @Test public void mustBeAbleToUseGroupBy() throws Exception { final Iterable input = Arrays.asList("Aaa", "Abb", "Bcc", "Cdd", "Cee"); - final Flow, BoxedUnit> flow = Flow + final Flow, NotUsed> flow = Flow .of(String.class) .groupBy(3, new Function() { public String apply(String elem) { @@ -250,9 +232,9 @@ public class FlowTest extends StreamTest { .grouped(10) .mergeSubstreams(); - final Future>> future = + final CompletionStage>> future = Source.from(input).via(flow).grouped(10).runWith(Sink.>> head(), materializer); - final Object[] result = Await.result(future, Duration.create(1, TimeUnit.SECONDS)).toArray(); + final Object[] result = future.toCompletableFuture().get(1, TimeUnit.SECONDS).toArray(); Arrays.sort(result, (Comparator)(Object) new Comparator>() { @Override public int compare(List o1, List o2) { @@ -266,7 +248,7 @@ public class FlowTest extends StreamTest { @Test public void mustBeAbleToUseSplitWhen() throws Exception { final Iterable input = Arrays.asList("A", "B", "C", ".", "D", ".", "E", "F"); - final Flow, BoxedUnit> flow = Flow + final Flow, NotUsed> flow = Flow .of(String.class) .splitWhen(new Predicate() { public boolean test(String elem) { @@ -276,9 +258,9 @@ public class FlowTest extends StreamTest { .grouped(10) .concatSubstreams(); - final Future>> future = + final CompletionStage>> future = Source.from(input).via(flow).grouped(10).runWith(Sink.>> head(), materializer); - final List> result = Await.result(future, Duration.create(1, TimeUnit.SECONDS)); + final List> result = future.toCompletableFuture().get(1, TimeUnit.SECONDS); assertEquals(Arrays.asList(Arrays.asList("A", "B", "C"), Arrays.asList(".", "D"), Arrays.asList(".", "E", "F")), result); } @@ -286,7 +268,7 @@ public class FlowTest extends StreamTest { @Test public void mustBeAbleToUseSplitAfter() throws Exception { final Iterable input = Arrays.asList("A", "B", "C", ".", "D", ".", "E", "F"); - final Flow, BoxedUnit> flow = Flow + final Flow, NotUsed> flow = Flow .of(String.class) .splitAfter(new Predicate() { public boolean test(String elem) { @@ -296,9 +278,9 @@ public class FlowTest extends StreamTest { .grouped(10) .concatSubstreams(); - final Future>> future = + final CompletionStage>> future = Source.from(input).via(flow).grouped(10).runWith(Sink.>> head(), materializer); - final List> result = Await.result(future, Duration.create(1, TimeUnit.SECONDS)); + final List> result = future.toCompletableFuture().get(1, TimeUnit.SECONDS); assertEquals(Arrays.asList(Arrays.asList("A", "B", "C", "."), Arrays.asList("D", "."), Arrays.asList("E", "F")), result); } @@ -324,23 +306,23 @@ public class FlowTest extends StreamTest { @Test public void mustBeAbleToUseMerge() throws Exception { - final Flow f1 = + final Flow f1 = Flow.of(String.class).transform(FlowTest.this. op()).named("f1"); - final Flow f2 = + final Flow f2 = Flow.of(String.class).transform(FlowTest.this. op()).named("f2"); @SuppressWarnings("unused") - final Flow f3 = + final Flow f3 = Flow.of(String.class).transform(FlowTest.this. op()).named("f3"); - final Source in1 = Source.from(Arrays.asList("a", "b", "c")); - final Source in2 = Source.from(Arrays.asList("d", "e", "f")); + final Source in1 = Source.from(Arrays.asList("a", "b", "c")); + final Source in2 = Source.from(Arrays.asList("d", "e", "f")); - final Sink> publisher = Sink.asPublisher(false); + final Sink> publisher = Sink.asPublisher(AsPublisher.WITHOUT_FANOUT); - final Source source = Source.fromGraph( - GraphDSL.create(new Function, SourceShape>() { + final Source source = Source.fromGraph( + GraphDSL.create(new Function, SourceShape>() { @Override - public SourceShape apply(Builder b) throws Exception { + public SourceShape apply(Builder b) throws Exception { final UniformFanInShape merge = b.add(Merge.create(2)); b.from(b.add(in1)).via(b.add(f1)).toInlet(merge.in(0)); b.from(b.add(in2)).via(b.add(f2)).toInlet(merge.in(1)); @@ -350,9 +332,9 @@ public class FlowTest extends StreamTest { // collecting final Publisher pub = source.runWith(publisher, materializer); - final Future> all = Source.fromPublisher(pub).grouped(100).runWith(Sink.>head(), materializer); + final CompletionStage> all = Source.fromPublisher(pub).grouped(100).runWith(Sink.>head(), materializer); - final List result = Await.result(all, Duration.apply(200, TimeUnit.MILLISECONDS)); + final List result = all.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); assertEquals(new HashSet(Arrays.asList("a", "b", "c", "d", "e", "f")), new HashSet(result)); } @@ -362,8 +344,8 @@ public class FlowTest extends StreamTest { final Iterable input1 = Arrays.asList("A", "B", "C"); final Iterable input2 = Arrays.asList(1, 2, 3); - RunnableGraph.fromGraph(GraphDSL.create(new Function, ClosedShape>(){ - public ClosedShape apply(Builder b) { + RunnableGraph.fromGraph(GraphDSL.create(new Function, ClosedShape>(){ + public ClosedShape apply(Builder b) { final Outlet in1 = b.add(Source.from(input1)).out(); final Outlet in2 = b.add(Source.from(input2)).out(); final FanInShape2> zip = b.add(Zip.create()); @@ -383,7 +365,6 @@ public class FlowTest extends StreamTest { })).run(materializer); List output = Arrays.asList(probe.receiveN(3)); - @SuppressWarnings("unchecked") List> expected = Arrays.asList(new Pair("A", 1), new Pair( "B", 2), new Pair("C", 3)); assertEquals(expected, output); @@ -395,9 +376,9 @@ public class FlowTest extends StreamTest { final Iterable input1 = Arrays.asList("A", "B", "C"); final Iterable input2 = Arrays.asList("D", "E", "F"); - final Source in1 = Source.from(input1); - final Source in2 = Source.from(input2); - final Flow flow = Flow.of(String.class); + final Source in1 = Source.from(input1); + final Source in2 = Source.from(input2); + final Flow flow = Flow.of(String.class); in1.via(flow.concat(in2)).runForeach(new Procedure() { public void apply(String elem) { probe.getRef().tell(elem, ActorRef.noSender()); @@ -414,9 +395,9 @@ public class FlowTest extends StreamTest { final Iterable input1 = Arrays.asList("A", "B", "C"); final Iterable input2 = Arrays.asList("D", "E", "F"); - final Source in1 = Source.from(input1); - final Source in2 = Source.from(input2); - final Flow flow = Flow.of(String.class); + final Source in1 = Source.from(input1); + final Source in2 = Source.from(input2); + final Flow flow = Flow.of(String.class); in2.via(flow.prepend(in1)).runForeach(new Procedure() { public void apply(String elem) { probe.getRef().tell(elem, ActorRef.noSender()); @@ -431,15 +412,14 @@ public class FlowTest extends StreamTest { public void mustBeAbleToUsePrefixAndTail() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final Iterable input = Arrays.asList(1, 2, 3, 4, 5, 6); - final Flow, Source>, ?> flow = Flow.of(Integer.class).prefixAndTail(3); - Future, Source>> future = - Source.from(input).via(flow).runWith(Sink., Source>>head(), materializer); - Pair, Source> result = Await.result(future, - probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + final Flow, Source>, NotUsed> flow = Flow.of(Integer.class).prefixAndTail(3); + CompletionStage, Source>> future = + Source.from(input).via(flow).runWith(Sink., Source>>head(), materializer); + Pair, Source> result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(Arrays.asList(1, 2, 3), result.first()); - Future> tailFuture = result.second().grouped(4).runWith(Sink.>head(), materializer); - List tailResult = Await.result(tailFuture, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + CompletionStage> tailFuture = result.second().grouped(4).runWith(Sink.>head(), materializer); + List tailResult = tailFuture.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(Arrays.asList(4, 5, 6), tailResult); } @@ -449,16 +429,16 @@ public class FlowTest extends StreamTest { final Iterable input1 = Arrays.asList(1, 2, 3); final Iterable input2 = Arrays.asList(4, 5); - final List> mainInputs = new ArrayList>(); + final List> mainInputs = new ArrayList>(); mainInputs.add(Source.from(input1)); mainInputs.add(Source.from(input2)); - final Flow, List, ?> flow = Flow.>create(). - flatMapConcat(ConstantFun.>javaIdentityFunction()).grouped(6); - Future> future = Source.from(mainInputs).via(flow) + final Flow, List, NotUsed> flow = Flow.>create(). + flatMapConcat(ConstantFun.>javaIdentityFunction()).grouped(6); + CompletionStage> future = Source.from(mainInputs).via(flow) .runWith(Sink.>head(), materializer); - List result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + List result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(Arrays.asList(1, 2, 3, 4, 5), result); } @@ -471,18 +451,18 @@ public class FlowTest extends StreamTest { final Iterable input3 = Arrays.asList(20, 21, 22, 23, 24, 25, 26, 27, 28, 29); final Iterable input4 = Arrays.asList(30, 31, 32, 33, 34, 35, 36, 37, 38, 39); - final List> mainInputs = new ArrayList>(); + final List> mainInputs = new ArrayList>(); mainInputs.add(Source.from(input1)); mainInputs.add(Source.from(input2)); mainInputs.add(Source.from(input3)); mainInputs.add(Source.from(input4)); - final Flow, List, ?> flow = Flow.>create(). - flatMapMerge(3, ConstantFun.>javaIdentityFunction()).grouped(60); - Future> future = Source.from(mainInputs).via(flow) + final Flow, List, NotUsed> flow = Flow.>create(). + flatMapMerge(3, ConstantFun.>javaIdentityFunction()).grouped(60); + CompletionStage> future = Source.from(mainInputs).via(flow) .runWith(Sink.>head(), materializer); - List result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + List result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); final Set set = new HashSet(); for (Integer i: result) { set.add(i); @@ -499,19 +479,29 @@ public class FlowTest extends StreamTest { public void mustBeAbleToUseBuffer() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final List input = Arrays.asList("A", "B", "C"); - final Flow, BoxedUnit> flow = Flow.of(String.class).buffer(2, OverflowStrategy.backpressure()).grouped(4); - Future> future = Source.from(input).via(flow) + final Flow, NotUsed> flow = Flow.of(String.class).buffer(2, OverflowStrategy.backpressure()).grouped(4); + final CompletionStage> future = Source.from(input).via(flow) .runWith(Sink.>head(), materializer); - List result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + List result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(input, result); } + @Test + public void mustBeAbleToUseWatchTermination() throws Exception { + final List input = Arrays.asList("A", "B", "C"); + CompletionStage future = Source.from(input) + .watchTermination(Keep.right()) + .to(Sink.ignore()).run(materializer); + + assertEquals(Done.getInstance(), future.toCompletableFuture().get(3, TimeUnit.SECONDS)); + } + @Test public void mustBeAbleToUseConflate() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final List input = Arrays.asList("A", "B", "C"); - final Flow flow = Flow.of(String.class).conflate(new Function() { + final Flow flow = Flow.of(String.class).conflate(new Function() { @Override public String apply(String s) throws Exception { return s; @@ -522,13 +512,53 @@ public class FlowTest extends StreamTest { return aggr + in; } }); - Future future = Source.from(input).via(flow).runFold("", new Function2() { + CompletionStage future = Source.from(input).via(flow).runFold("", (aggr, in) -> aggr + in, materializer); + String result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); + assertEquals("ABC", result); + } + + @Test + public void mustBeAbleToUseBatch() throws Exception { + final JavaTestKit probe = new JavaTestKit(system); + final List input = Arrays.asList("A", "B", "C"); + final Flow flow = Flow.of(String.class).batch(3L, new Function() { + @Override + public String apply(String s) throws Exception { + return s; + } + }, new Function2() { @Override public String apply(String aggr, String in) throws Exception { return aggr + in; } - }, materializer); - String result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + }); + CompletionStage future = Source.from(input).via(flow).runFold("", (aggr, in) -> aggr + in, materializer); + String result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); + assertEquals("ABC", result); + } + + @Test + public void mustBeAbleToUseBatchWeighted() throws Exception { + final JavaTestKit probe = new JavaTestKit(system); + final List input = Arrays.asList("A", "B", "C"); + final Flow flow = Flow.of(String.class).batchWeighted(3L, new Function() { + @Override + public Object apply(String s) throws Exception { + return 1L; + } + }, new Function() { + @Override + public String apply(String s) throws Exception { + return s; + } + }, new Function2() { + @Override + public String apply(String aggr, String in) throws Exception { + return aggr + in; + } + }); + CompletionStage future = Source.from(input).via(flow).runFold("", (aggr, in) -> aggr + in, materializer); + String result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals("ABC", result); } @@ -536,20 +566,10 @@ public class FlowTest extends StreamTest { public void mustBeAbleToUseExpand() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final List input = Arrays.asList("A", "B", "C"); - final Flow flow = Flow.of(String.class).expand(new Function() { - @Override - public String apply(String in) throws Exception { - return in; - } - }, new Function>() { - @Override - public Pair apply(String in) throws Exception { - return new Pair(in, in); - } - }); - final Sink> sink = Sink.head(); - Future future = Source.from(input).via(flow).runWith(sink, materializer); - String result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + final Flow flow = Flow.of(String.class).expand(in -> Stream.iterate(in, i -> i).iterator()); + final Sink> sink = Sink.head(); + CompletionStage future = Source.from(input).via(flow).runWith(sink, materializer); + String result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals("A", result); } @@ -557,11 +577,7 @@ public class FlowTest extends StreamTest { public void mustBeAbleToUseMapAsync() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final Iterable input = Arrays.asList("a", "b", "c"); - final Flow flow = Flow.of(String.class).mapAsync(4, new Function>() { - public Future apply(String elem) { - return Futures.successful(elem.toUpperCase()); - } - }); + final Flow flow = Flow.of(String.class).mapAsync(4, elem -> CompletableFuture.completedFuture(elem.toUpperCase())); Source.from(input).via(flow).runForeach(new Procedure() { public void apply(String elem) { probe.getRef().tell(elem, ActorRef.noSender()); @@ -577,8 +593,8 @@ public class FlowTest extends StreamTest { final TestPublisher.ManualProbe publisherProbe = TestPublisher.manualProbe(true,system); final JavaTestKit probe = new JavaTestKit(system); - final Source source = Source.fromPublisher(publisherProbe); - final Flow flow = Flow.of(Integer.class).map( + final Source source = Source.fromPublisher(publisherProbe); + final Flow flow = Flow.of(Integer.class).map( new Function() { public Integer apply(Integer elem) { if (elem == 2) throw new RuntimeException("ex"); @@ -592,11 +608,8 @@ public class FlowTest extends StreamTest { } }); - final Future future = source.via(flow).runWith(Sink.foreach(new Procedure() { - public void apply(Integer elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }), materializer); + final CompletionStage future = + source.via(flow).runWith(Sink.foreach(elem -> probe.getRef().tell(elem, ActorRef.noSender())), materializer); final PublisherProbeSubscription s = publisherProbe.expectSubscription(); @@ -606,7 +619,7 @@ public class FlowTest extends StreamTest { probe.expectMsgEquals(1); s.sendNext(2); probe.expectMsgEquals(0); - Await.ready(future, Duration.apply(200, TimeUnit.MILLISECONDS)); + future.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); } @Test @@ -614,9 +627,9 @@ public class FlowTest extends StreamTest { final JavaTestKit probe = new JavaTestKit(system); final List input = Arrays.asList("A", "B", "C"); - Flow otherFlow = Flow.of(String.class); + Flow otherFlow = Flow.of(String.class); - Flow myFlow = Flow.of(String.class).via(otherFlow); + Flow myFlow = Flow.of(String.class).via(otherFlow); Source.from(input).via(myFlow).runWith(Sink.foreach(new Procedure() { // Scala Future public void apply(String elem) { probe.getRef().tell(elem, ActorRef.noSender()); @@ -630,9 +643,9 @@ public class FlowTest extends StreamTest { public void mustBeAbleToMaterializeIdentityToJavaSink() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final List input = Arrays.asList("A", "B", "C"); - Flow otherFlow = Flow.of(String.class); + Flow otherFlow = Flow.of(String.class); - Sink sink = Flow.of(String.class).to(otherFlow.to(Sink.foreach(new Procedure() { // Scala Future + Sink sink = Flow.of(String.class).to(otherFlow.to(Sink.foreach(new Procedure() { // Scala Future public void apply(String elem) { probe.getRef().tell(elem, ActorRef.noSender()); } @@ -644,10 +657,10 @@ public class FlowTest extends StreamTest { @Test public void mustBeAbleToBroadcastEagerCancel() throws Exception { - final Sink sink = Sink.fromGraph( - GraphDSL.create(new Function, SinkShape>() { + final Sink sink = Sink.fromGraph( + GraphDSL.create(new Function, SinkShape>() { @Override - public SinkShape apply(Builder b) throws Exception { + public SinkShape apply(Builder b) throws Exception { final UniformFanOutShape broadcast = b.add(Broadcast.create(2, true)); final SinkShape out1 = b.add(Sink.cancelled()); final SinkShape out2 = b.add(Sink.ignore()); @@ -659,7 +672,7 @@ public class FlowTest extends StreamTest { final JavaTestKit probe = new JavaTestKit(system); Source source = Source.actorRef(1, OverflowStrategy.dropNew()); - final ActorRef actor = source.toMat(sink, Keep.left()).run(materializer); + final ActorRef actor = source.toMat(sink, Keep.left()).run(materializer); probe.watch(actor); probe.expectTerminated(actor); } @@ -721,54 +734,54 @@ public class FlowTest extends StreamTest { } @Test - public void mustBeAbleToUseInitialTimeout() throws Exception { + public void mustBeAbleToUseInitialTimeout() throws Throwable { try { - Await.result( - Source.maybe() - .via(Flow.of(Integer.class).initialTimeout(Duration.create(1, "second"))) - .runWith(Sink.head(), materializer), - Duration.create(3, "second") - ); - fail("A TimeoutException was expected"); - } catch(TimeoutException e) { + try { + Source. maybe().via(Flow.of(Integer.class).initialTimeout(Duration.create(1, "second"))) + .runWith(Sink. head(), materializer).toCompletableFuture().get(3, TimeUnit.SECONDS); + fail("A TimeoutException was expected"); + } catch (ExecutionException e) { + throw e.getCause(); + } + } catch (TimeoutException e) { // expected } } @Test - public void mustBeAbleToUseCompletionTimeout() throws Exception { + public void mustBeAbleToUseCompletionTimeout() throws Throwable { try { - Await.result( - Source.maybe() - .via(Flow.of(Integer.class).completionTimeout(Duration.create(1, "second"))) - .runWith(Sink.head(), materializer), - Duration.create(3, "second") - ); - fail("A TimeoutException was expected"); - } catch(TimeoutException e) { + try { + Source. maybe().via(Flow.of(Integer.class).completionTimeout(Duration.create(1, "second"))) + .runWith(Sink. head(), materializer).toCompletableFuture().get(3, TimeUnit.SECONDS); + fail("A TimeoutException was expected"); + } catch (ExecutionException e) { + throw e.getCause(); + } + } catch (TimeoutException e) { // expected } } @Test - public void mustBeAbleToUseIdleTimeout() throws Exception { + public void mustBeAbleToUseIdleTimeout() throws Throwable { try { - Await.result( - Source.maybe() - .via(Flow.of(Integer.class).idleTimeout(Duration.create(1, "second"))) - .runWith(Sink.head(), materializer), - Duration.create(3, "second") - ); - fail("A TimeoutException was expected"); - } catch(TimeoutException e) { + try { + Source. maybe().via(Flow.of(Integer.class).idleTimeout(Duration.create(1, "second"))) + .runWith(Sink. head(), materializer).toCompletableFuture().get(3, TimeUnit.SECONDS); + fail("A TimeoutException was expected"); + } catch (ExecutionException e) { + throw e.getCause(); + } + } catch (TimeoutException e) { // expected } } @Test public void mustBeAbleToUseKeepAlive() throws Exception { - Integer result = Await.result( + Integer result = Source.maybe() .via(Flow.of(Integer.class) .keepAlive(Duration.create(1, "second"), new Creator() { @@ -778,16 +791,15 @@ public class FlowTest extends StreamTest { }) ) .takeWithin(Duration.create(1500, "milliseconds")) - .runWith(Sink.head(), materializer), - Duration.create(3, "second") - ); + .runWith(Sink.head(), materializer) + .toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals((Object) 0, result); } public void mustSuitablyOverrideAttributeHandlingMethods() { @SuppressWarnings("unused") - final Flow f = + final Flow f = Flow.of(Integer.class).withAttributes(Attributes.name("")).addAttributes(Attributes.asyncBoundary()).named(""); } } diff --git a/akka-stream-tests/src/test/java/akka/stream/javadsl/SinkTest.java b/akka-stream-tests/src/test/java/akka/stream/javadsl/SinkTest.java index 80b688a549..54d9fab3ec 100644 --- a/akka-stream-tests/src/test/java/akka/stream/javadsl/SinkTest.java +++ b/akka-stream-tests/src/test/java/akka/stream/javadsl/SinkTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.javadsl; @@ -8,11 +8,11 @@ import java.util.Arrays; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; -import akka.actor.ActorRef; +import akka.NotUsed; import akka.japi.function.Function; -import akka.japi.function.Procedure; import akka.stream.*; import org.junit.ClassRule; import org.junit.Test; @@ -23,7 +23,6 @@ import scala.concurrent.duration.Duration; import akka.japi.function.Function2; import akka.stream.testkit.AkkaSpec; import akka.testkit.JavaTestKit; -import scala.runtime.BoxedUnit; public class SinkTest extends StreamTest { public SinkTest() { @@ -36,28 +35,24 @@ public class SinkTest extends StreamTest { @Test public void mustBeAbleToUseFanoutPublisher() throws Exception { - final Sink> pubSink = Sink.asPublisher(true); + final Sink> pubSink = Sink.asPublisher(AsPublisher.WITH_FANOUT); @SuppressWarnings("unused") final Publisher publisher = Source.from(new ArrayList()).runWith(pubSink, materializer); } @Test public void mustBeAbleToUseFuture() throws Exception { - final Sink> futSink = Sink.head(); + final Sink> futSink = Sink.head(); final List list = Collections.singletonList(1); - final Future future = Source.from(list).runWith(futSink, materializer); - assert Await.result(future, Duration.create("1 second")).equals(1); + final CompletionStage future = Source.from(list).runWith(futSink, materializer); + assert future.toCompletableFuture().get(1, TimeUnit.SECONDS).equals(1); } @Test public void mustBeAbleToUseFold() throws Exception { - Sink> foldSink = Sink.fold(0, new Function2() { - @Override public Integer apply(Integer arg1, Integer arg2) throws Exception { - return arg1 + arg2; - } - }); + Sink> foldSink = Sink.fold(0, (arg1, arg2) -> arg1 + arg2); @SuppressWarnings("unused") - Future integerFuture = Source.from(new ArrayList()).runWith(foldSink, materializer); + CompletionStage integerFuture = Source.from(new ArrayList()).runWith(foldSink, materializer); } @Test @@ -80,8 +75,8 @@ public class SinkTest extends StreamTest { final Sink sink2 = Sink.actorRef(probe2.getRef(), "done2"); final Sink sink = Sink.combine(sink1, sink2, new ArrayList>(), - new Function, BoxedUnit>>() { - public Graph, BoxedUnit> apply(Integer elem) { + new Function, NotUsed>>() { + public Graph, NotUsed> apply(Integer elem) { return Broadcast.create(elem); } } @@ -100,7 +95,7 @@ public class SinkTest extends StreamTest { public void mustSuitablyOverrideAttributeHandlingMethods() { @SuppressWarnings("unused") - final Sink> s = + final Sink> s = Sink. head().withAttributes(Attributes.name("")).addAttributes(Attributes.asyncBoundary()).named(""); } } diff --git a/akka-stream-tests/src/test/java/akka/stream/javadsl/SourceTest.java b/akka-stream-tests/src/test/java/akka/stream/javadsl/SourceTest.java index cca8422953..4b697d1b6b 100644 --- a/akka-stream-tests/src/test/java/akka/stream/javadsl/SourceTest.java +++ b/akka-stream-tests/src/test/java/akka/stream/javadsl/SourceTest.java @@ -1,8 +1,10 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.javadsl; +import akka.Done; +import akka.NotUsed; import akka.actor.ActorRef; import akka.actor.Cancellable; import akka.dispatch.Foreach; @@ -11,6 +13,7 @@ import akka.dispatch.OnSuccess; import akka.japi.JavaPartialFunction; import akka.japi.Pair; import akka.japi.function.*; +import akka.japi.pf.PFBuilder; import akka.stream.*; import akka.stream.impl.ConstantFun; import akka.stream.stage.*; @@ -24,12 +27,15 @@ import scala.concurrent.Await; import scala.concurrent.Future; import scala.concurrent.duration.Duration; import scala.concurrent.duration.FiniteDuration; -import scala.runtime.BoxedUnit; import scala.util.Try; import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; +import java.util.stream.Stream; import static akka.stream.testkit.StreamTestKit.PublisherProbeSubscription; import static akka.stream.testkit.TestPublisher.ManualProbe; @@ -50,34 +56,20 @@ public class SourceTest extends StreamTest { final JavaTestKit probe = new JavaTestKit(system); final String[] lookup = {"a", "b", "c", "d", "e", "f"}; final java.lang.Iterable input = Arrays.asList(0, 1, 2, 3, 4, 5); - final Source ints = Source.from(input); + final Source ints = Source.from(input); - ints.drop(2).take(3).takeWithin(FiniteDuration.create(10, TimeUnit.SECONDS)).map(new Function() { - public String apply(Integer elem) { - return lookup[elem]; - } - }).filter(new Predicate() { - public boolean test(String elem) { - return !elem.equals("c"); - } - }).grouped(2).mapConcat(new Function, java.util.List>() { - public java.util.List apply(java.util.List elem) { - return elem; - } - }).groupedWithin(100, FiniteDuration.create(50, TimeUnit.MILLISECONDS)) - .mapConcat(new Function, java.util.List>() { - public java.util.List apply(java.util.List elem) { - return elem; - } - }).runFold("", new Function2() { - public String apply(String acc, String elem) { - return acc + elem; - } - }, materializer).foreach(new Foreach() { // Scala Future - public void each(String elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }, system.dispatcher()); + ints + .drop(2) + .take(3) + .takeWithin(FiniteDuration.create(10, TimeUnit.SECONDS)) + .map(elem -> lookup[elem]) + .filter(elem -> !elem.equals("c")) + .grouped(2) + .mapConcat(elem -> elem) + .groupedWithin(100, FiniteDuration.create(50, TimeUnit.MILLISECONDS)) + .mapConcat(elem -> elem) + .runFold("", (acc, elem) -> acc + elem, materializer) + .thenAccept(elem -> probe.getRef().tell(elem, ActorRef.noSender())); probe.expectMsgEquals("de"); } @@ -86,25 +78,16 @@ public class SourceTest extends StreamTest { public void mustBeAbleToUseVoidTypeInForeach() { final JavaTestKit probe = new JavaTestKit(system); final java.lang.Iterable input = Arrays.asList("a", "b", "c"); - Source ints = Source.from(input); + Source ints = Source.from(input); - Future completion = ints.runForeach(new Procedure() { - public void apply(String elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }, materializer); + final CompletionStage completion = ints.runForeach(elem -> probe.getRef().tell(elem, ActorRef.noSender()), materializer); - completion.onSuccess(new OnSuccess() { - @Override - public void onSuccess(BoxedUnit elem) throws Throwable { - probe.getRef().tell(String.valueOf(elem), ActorRef.noSender()); - } - }, system.dispatcher()); + completion.thenAccept(elem -> probe.getRef().tell(String.valueOf(elem), ActorRef.noSender())); probe.expectMsgEquals("a"); probe.expectMsgEquals("b"); probe.expectMsgEquals("c"); - probe.expectMsgEquals("()"); + probe.expectMsgEquals("Done"); } @Ignore("StatefulStage to be converted to GraphStage when Java Api is available (#18817)") @Test @@ -164,7 +147,7 @@ public class SourceTest extends StreamTest { @Test public void mustBeAbleToUseGroupBy() throws Exception { final Iterable input = Arrays.asList("Aaa", "Abb", "Bcc", "Cdd", "Cee"); - final Source, BoxedUnit> source = Source + final Source, NotUsed> source = Source .from(input) .groupBy(3, new Function() { public String apply(String elem) { @@ -174,9 +157,9 @@ public class SourceTest extends StreamTest { .grouped(10) .mergeSubstreams(); - final Future>> future = + final CompletionStage>> future = source.grouped(10).runWith(Sink.>> head(), materializer); - final Object[] result = Await.result(future, Duration.create(1, TimeUnit.SECONDS)).toArray(); + final Object[] result = future.toCompletableFuture().get(1, TimeUnit.SECONDS).toArray(); Arrays.sort(result, (Comparator)(Object) new Comparator>() { @Override public int compare(List o1, List o2) { @@ -190,7 +173,7 @@ public class SourceTest extends StreamTest { @Test public void mustBeAbleToUseSplitWhen() throws Exception { final Iterable input = Arrays.asList("A", "B", "C", ".", "D", ".", "E", "F"); - final Source, BoxedUnit> source = Source + final Source, NotUsed> source = Source .from(input) .splitWhen(new Predicate() { public boolean test(String elem) { @@ -200,9 +183,9 @@ public class SourceTest extends StreamTest { .grouped(10) .concatSubstreams(); - final Future>> future = + final CompletionStage>> future = source.grouped(10).runWith(Sink.>> head(), materializer); - final List> result = Await.result(future, Duration.create(1, TimeUnit.SECONDS)); + final List> result = future.toCompletableFuture().get(1, TimeUnit.SECONDS); assertEquals(Arrays.asList(Arrays.asList("A", "B", "C"), Arrays.asList(".", "D"), Arrays.asList(".", "E", "F")), result); } @@ -210,7 +193,7 @@ public class SourceTest extends StreamTest { @Test public void mustBeAbleToUseSplitAfter() throws Exception { final Iterable input = Arrays.asList("A", "B", "C", ".", "D", ".", "E", "F"); - final Source, BoxedUnit> source = Source + final Source, NotUsed> source = Source .from(input) .splitAfter(new Predicate() { public boolean test(String elem) { @@ -220,9 +203,9 @@ public class SourceTest extends StreamTest { .grouped(10) .concatSubstreams(); - final Future>> future = + final CompletionStage>> future = source.grouped(10).runWith(Sink.>> head(), materializer); - final List> result = Await.result(future, Duration.create(1, TimeUnit.SECONDS)); + final List> result = future.toCompletableFuture().get(1, TimeUnit.SECONDS); assertEquals(Arrays.asList(Arrays.asList("A", "B", "C", "."), Arrays.asList("D", "."), Arrays.asList("E", "F")), result); } @@ -233,8 +216,8 @@ public class SourceTest extends StreamTest { final Iterable input1 = Arrays.asList("A", "B", "C"); final Iterable input2 = Arrays.asList("D", "E", "F"); - final Source in1 = Source.from(input1); - final Source in2 = Source.from(input2); + final Source in1 = Source.from(input1); + final Source in2 = Source.from(input2); in1.concat(in2).runForeach(new Procedure() { public void apply(String elem) { @@ -252,8 +235,8 @@ public class SourceTest extends StreamTest { final Iterable input1 = Arrays.asList("A", "B", "C"); final Iterable input2 = Arrays.asList("D", "E", "F"); - final Source in1 = Source.from(input1); - final Source in2 = Source.from(input2); + final Source in1 = Source.from(input1); + final Source in2 = Source.from(input2); in2.prepend(in1).runForeach(new Procedure() { public void apply(String elem) { @@ -291,14 +274,14 @@ public class SourceTest extends StreamTest { final JavaTestKit probe = new JavaTestKit(system); final Iterable input = Arrays.asList("A", "B", "C"); - Source.from(input).runWith(Sink.onComplete(new Procedure>() { + Source.from(input).runWith(Sink.onComplete(new Procedure>() { @Override - public void apply(Try param) throws Exception { + public void apply(Try param) throws Exception { probe.getRef().tell(param.get(), ActorRef.noSender()); } }), materializer); - probe.expectMsgClass(BoxedUnit.class); + probe.expectMsgClass(Done.class); } @Test @@ -306,20 +289,16 @@ public class SourceTest extends StreamTest { final JavaTestKit probe = new JavaTestKit(system); final Iterable input = Arrays.asList("A", "B", "C"); - Source.from(input).map(new Function() { - public String apply(String arg0) throws Exception { - throw new RuntimeException("simulated err"); - } - }).runWith(Sink.head(), materializer).onComplete(new OnSuccess>() { - @Override - public void onSuccess(Try e) throws Throwable { - if (e == null) { + Source.from(input) + . map(in -> { throw new RuntimeException("simulated err"); }) + .runWith(Sink.head(), materializer) + .whenComplete((s, ex) -> { + if (ex == null) { probe.getRef().tell("done", ActorRef.noSender()); } else { - probe.getRef().tell(e.failed().get().getMessage(), ActorRef.noSender()); + probe.getRef().tell(ex.getMessage(), ActorRef.noSender()); } - } - }, system.dispatcher()); + }); probe.expectMsgEquals("simulated err"); } @@ -328,8 +307,8 @@ public class SourceTest extends StreamTest { public void mustBeAbleToUseToFuture() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final Iterable input = Arrays.asList("A", "B", "C"); - Future future = Source.from(input).runWith(Sink.head(), materializer); - String result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + CompletionStage future = Source.from(input).runWith(Sink.head(), materializer); + String result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals("A", result); } @@ -337,14 +316,13 @@ public class SourceTest extends StreamTest { public void mustBeAbleToUsePrefixAndTail() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final Iterable input = Arrays.asList(1, 2, 3, 4, 5, 6); - Future, Source>> future = Source.from(input).prefixAndTail(3) - .runWith(Sink., Source>>head(), materializer); - Pair, Source> result = Await.result(future, - probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + CompletionStage, Source>> future = Source.from(input).prefixAndTail(3) + .runWith(Sink., Source>>head(), materializer); + Pair, Source> result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(Arrays.asList(1, 2, 3), result.first()); - Future> tailFuture = result.second().grouped(4).runWith(Sink.>head(), materializer); - List tailResult = Await.result(tailFuture, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + CompletionStage> tailFuture = result.second().grouped(4).runWith(Sink.>head(), materializer); + List tailResult = tailFuture.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(Arrays.asList(4, 5, 6), tailResult); } @@ -354,16 +332,16 @@ public class SourceTest extends StreamTest { final Iterable input1 = Arrays.asList(1, 2, 3); final Iterable input2 = Arrays.asList(4, 5); - final List> mainInputs = new ArrayList>(); + final List> mainInputs = new ArrayList>(); mainInputs.add(Source.from(input1)); mainInputs.add(Source.from(input2)); - Future> future = Source.from(mainInputs) - .flatMapConcat(ConstantFun.>javaIdentityFunction()) + CompletionStage> future = Source.from(mainInputs) + .flatMapConcat(ConstantFun.>javaIdentityFunction()) .grouped(6) .runWith(Sink.>head(), materializer); - List result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + List result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(Arrays.asList(1, 2, 3, 4, 5), result); } @@ -376,17 +354,17 @@ public class SourceTest extends StreamTest { final Iterable input3 = Arrays.asList(20, 21, 22, 23, 24, 25, 26, 27, 28, 29); final Iterable input4 = Arrays.asList(30, 31, 32, 33, 34, 35, 36, 37, 38, 39); - final List> mainInputs = new ArrayList>(); + final List> mainInputs = new ArrayList>(); mainInputs.add(Source.from(input1)); mainInputs.add(Source.from(input2)); mainInputs.add(Source.from(input3)); mainInputs.add(Source.from(input4)); - Future> future = Source.from(mainInputs) - .flatMapMerge(3, ConstantFun.>javaIdentityFunction()).grouped(60) + CompletionStage> future = Source.from(mainInputs) + .flatMapMerge(3, ConstantFun.>javaIdentityFunction()).grouped(60) .runWith(Sink.>head(), materializer); - List result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + List result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); final Set set = new HashSet(); for (Integer i: result) { set.add(i); @@ -403,10 +381,10 @@ public class SourceTest extends StreamTest { public void mustBeAbleToUseBuffer() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final List input = Arrays.asList("A", "B", "C"); - Future> future = Source.from(input).buffer(2, OverflowStrategy.backpressure()).grouped(4) + final CompletionStage> future = Source.from(input).buffer(2, OverflowStrategy.backpressure()).grouped(4) .runWith(Sink.>head(), materializer); - List result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + List result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(input, result); } @@ -414,23 +392,10 @@ public class SourceTest extends StreamTest { public void mustBeAbleToUseConflate() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final List input = Arrays.asList("A", "B", "C"); - Future future = Source.from(input).conflate(new Function() { - @Override - public String apply(String s) throws Exception { - return s; - } - }, new Function2() { - @Override - public String apply(String aggr, String in) throws Exception { - return aggr + in; - } - }).runFold("", new Function2() { - @Override - public String apply(String aggr, String in) throws Exception { - return aggr + in; - } - }, materializer); - String result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + CompletionStage future = Source.from(input) + .conflate(s -> s, (aggr, in) -> aggr + in) + .runFold("", (aggr, in) -> aggr + in, materializer); + String result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals("ABC", result); } @@ -438,18 +403,8 @@ public class SourceTest extends StreamTest { public void mustBeAbleToUseExpand() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final List input = Arrays.asList("A", "B", "C"); - Future future = Source.from(input).expand(new Function() { - @Override - public String apply(String in) throws Exception { - return in; - } - }, new Function>() { - @Override - public Pair apply(String in) throws Exception { - return new Pair(in, in); - } - }).runWith(Sink.head(), materializer); - String result = Await.result(future, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + CompletionStage future = Source.from(input).expand(in -> Stream.iterate(in, i -> i).iterator()).runWith(Sink.head(), materializer); + String result = future.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals("A", result); } @@ -458,6 +413,7 @@ public class SourceTest extends StreamTest { final JavaTestKit probe = new JavaTestKit(system); Source tickSource = Source.tick(FiniteDuration.create(1, TimeUnit.SECONDS), FiniteDuration.create(500, TimeUnit.MILLISECONDS), "tick"); + @SuppressWarnings("unused") Cancellable cancellable = tickSource.to(Sink.foreach(new Procedure() { public void apply(String elem) { probe.getRef().tell(elem, ActorRef.noSender()); @@ -475,15 +431,9 @@ public class SourceTest extends StreamTest { public void mustBeAbleToUseMapFuture() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final Iterable input = Arrays.asList("a", "b", "c"); - Source.from(input).mapAsync(4, new Function>() { - public Future apply(String elem) { - return Futures.successful(elem.toUpperCase()); - } - }).runForeach(new Procedure() { - public void apply(String elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }, materializer); + Source.from(input) + .mapAsync(4, elem -> CompletableFuture.completedFuture(elem.toUpperCase())) + .runForeach(elem -> probe.getRef().tell(elem, ActorRef.noSender()), materializer); probe.expectMsgEquals("A"); probe.expectMsgEquals("B"); probe.expectMsgEquals("C"); @@ -493,16 +443,16 @@ public class SourceTest extends StreamTest { public void mustWorkFromFuture() throws Exception { final JavaTestKit probe = new JavaTestKit(system); final Iterable input = Arrays.asList("A", "B", "C"); - Future future1 = Source.from(input).runWith(Sink.head(), materializer); - Future future2 = Source.fromFuture(future1).runWith(Sink.head(), materializer); - String result = Await.result(future2, probe.dilated(FiniteDuration.create(3, TimeUnit.SECONDS))); + CompletionStage future1 = Source.from(input).runWith(Sink.head(), materializer); + CompletionStage future2 = Source.fromCompletionStage(future1).runWith(Sink.head(), materializer); + String result = future2.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals("A", result); } @Test public void mustWorkFromRange() throws Exception { - Future> f = Source.range(0, 10).grouped(20).runWith(Sink.> head(), materializer); - final List result = Await.result(f, FiniteDuration.create(3, TimeUnit.SECONDS)); + CompletionStage> f = Source.range(0, 10).grouped(20).runWith(Sink.> head(), materializer); + final List result = f.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(11, result.size()); Integer counter = 0; for (Integer i: result) @@ -511,8 +461,8 @@ public class SourceTest extends StreamTest { @Test public void mustWorkFromRangeWithStep() throws Exception { - Future> f = Source.range(0, 10, 2).grouped(20).runWith(Sink.> head(), materializer); - final List result = Await.result(f, FiniteDuration.create(3, TimeUnit.SECONDS)); + CompletionStage> f = Source.range(0, 10, 2).grouped(20).runWith(Sink.> head(), materializer); + final List result = f.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(6, result.size()); Integer counter = 0; for (Integer i: result) { @@ -523,8 +473,8 @@ public class SourceTest extends StreamTest { @Test public void mustRepeat() throws Exception { - final Future> f = Source.repeat(42).grouped(10000).runWith(Sink.> head(), materializer); - final List result = Await.result(f, FiniteDuration.create(3, TimeUnit.SECONDS)); + final CompletionStage> f = Source.repeat(42).grouped(10000).runWith(Sink.> head(), materializer); + final List result = f.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(result.size(), 10000); for (Integer i: result) assertEquals(i, (Integer) 42); } @@ -547,39 +497,31 @@ public class SourceTest extends StreamTest { @Test public void mustBeAbleToUseDropWhile() throws Exception { final JavaTestKit probe = new JavaTestKit(system); - final Source source = Source.from(Arrays.asList(0, 1, 2, 3)).dropWhile + final Source source = Source.from(Arrays.asList(0, 1, 2, 3)).dropWhile (new Predicate() { public boolean test(Integer elem) { return elem < 2; } }); - final Future future = source.runWith(Sink.foreach(new Procedure() { // Scala Future - public void apply(Integer elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }), materializer); + final CompletionStage future = source.runWith(Sink.foreach(elem -> probe.getRef().tell(elem, ActorRef.noSender())), materializer); probe.expectMsgEquals(2); probe.expectMsgEquals(3); - Await.ready(future, Duration.apply(200, TimeUnit.MILLISECONDS)); + future.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); } @Test public void mustBeAbleToUseTakeWhile() throws Exception { final JavaTestKit probe = new JavaTestKit(system); - final Source source = Source.from(Arrays.asList(0, 1, 2, 3)).takeWhile + final Source source = Source.from(Arrays.asList(0, 1, 2, 3)).takeWhile (new Predicate() { public boolean test(Integer elem) { return elem < 2; } }); - final Future future = source.runWith(Sink.foreach(new Procedure() { // Scala Future - public void apply(Integer elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }), materializer); + final CompletionStage future = source.runWith(Sink.foreach(elem -> probe.getRef().tell(elem, ActorRef.noSender())), materializer); probe.expectMsgEquals(0); probe.expectMsgEquals(1); @@ -587,7 +529,7 @@ public class SourceTest extends StreamTest { FiniteDuration duration = Duration.apply(200, TimeUnit.MILLISECONDS); probe.expectNoMsg(duration); - Await.ready(future, duration); + future.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); } @Test @@ -595,56 +537,41 @@ public class SourceTest extends StreamTest { final ManualProbe publisherProbe = TestPublisher.manualProbe(true,system); final JavaTestKit probe = new JavaTestKit(system); - final Source source = Source.fromPublisher(publisherProbe).map( - new Function() { - public Integer apply(Integer elem) { + final Source source = + Source.fromPublisher(publisherProbe) + .map(elem -> { if (elem == 1) throw new RuntimeException("ex"); else return elem; - } - }) - .recover(new JavaPartialFunction() { - public Integer apply(Throwable elem, boolean isCheck) { - if (isCheck) return null; - return 0; - } - }); + }) + .recover(new PFBuilder() + .matchAny(ex -> 0) + .build()); - final Future future = source.runWith(Sink.foreach(new Procedure() { - public void apply(Integer elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }), materializer); + final CompletionStage future = source.runWith(Sink.foreach(elem -> probe.getRef().tell(elem, ActorRef.noSender())), materializer); final PublisherProbeSubscription s = publisherProbe.expectSubscription(); s.sendNext(0); probe.expectMsgEquals(0); s.sendNext(1); probe.expectMsgEquals(0); - Await.ready(future, Duration.apply(200, TimeUnit.MILLISECONDS)); + future.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); } @Test public void mustBeAbleToCombine() throws Exception { final JavaTestKit probe = new JavaTestKit(system); - final Source source1 = Source.from(Arrays.asList(0, 1)); - final Source source2 = Source.from(Arrays.asList(2, 3)); + final Source source1 = Source.from(Arrays.asList(0, 1)); + final Source source2 = Source.from(Arrays.asList(2, 3)); - final Source source = Source.combine(source1, source2, new ArrayList>(), - new Function, BoxedUnit>>() { - public Graph, BoxedUnit> apply(Integer elem) { - return Merge.create(elem); - } - }); + final Source source = Source.combine( + source1, source2, new ArrayList>(), + width -> Merge. create(width)); - final Future future = source.runWith(Sink.foreach(new Procedure() { // Scala Future - public void apply(Integer elem) { - probe.getRef().tell(elem, ActorRef.noSender()); - } - }), materializer); + final CompletionStage future = source.runWith(Sink.foreach(elem -> probe.getRef().tell(elem, ActorRef.noSender())), materializer); probe.expectMsgAllOf(0, 1, 2, 3); - Await.ready(future, Duration.apply(200, TimeUnit.MILLISECONDS)); + future.toCompletableFuture().get(200, TimeUnit.MILLISECONDS); } @Test @@ -717,48 +644,53 @@ public class SourceTest extends StreamTest { @Test - public void mustBeAbleToUseInitialTimeout() throws Exception { + public void mustBeAbleToUseInitialTimeout() throws Throwable { try { - Await.result( - Source.maybe().initialTimeout(Duration.create(1, "second")).runWith(Sink.head(), materializer), - Duration.create(3, "second") - ); - fail("A TimeoutException was expected"); - } catch(TimeoutException e) { - // expected - } - } - - - @Test - public void mustBeAbleToUseCompletionTimeout() throws Exception { - try { - Await.result( - Source.maybe().completionTimeout(Duration.create(1, "second")).runWith(Sink.head(), materializer), - Duration.create(3, "second") - ); - fail("A TimeoutException was expected"); - } catch(TimeoutException e) { + try { + Source.maybe().initialTimeout(Duration.create(1, "second")).runWith(Sink.head(), materializer) + .toCompletableFuture().get(3, TimeUnit.SECONDS); + fail("A TimeoutException was expected"); + } catch (ExecutionException e) { + throw e.getCause(); + } + } catch (TimeoutException e) { // expected } } @Test - public void mustBeAbleToUseIdleTimeout() throws Exception { + public void mustBeAbleToUseCompletionTimeout() throws Throwable { try { - Await.result( - Source.maybe().idleTimeout(Duration.create(1, "second")).runWith(Sink.head(), materializer), - Duration.create(3, "second") - ); - fail("A TimeoutException was expected"); - } catch(TimeoutException e) { + try { + Source.maybe().completionTimeout(Duration.create(1, "second")).runWith(Sink.head(), materializer) + .toCompletableFuture().get(3, TimeUnit.SECONDS); + fail("A TimeoutException was expected"); + } catch (ExecutionException e) { + throw e.getCause(); + } + } catch (TimeoutException e) { + // expected + } + } + + @Test + public void mustBeAbleToUseIdleTimeout() throws Throwable { + try { + try { + Source.maybe().idleTimeout(Duration.create(1, "second")).runWith(Sink.head(), materializer) + .toCompletableFuture().get(3, TimeUnit.SECONDS); + fail("A TimeoutException was expected"); + } catch (ExecutionException e) { + throw e.getCause(); + } + } catch (TimeoutException e) { // expected } } @Test public void mustBeAbleToUseIdleInject() throws Exception { - Integer result = Await.result( + Integer result = Source.maybe() .keepAlive(Duration.create(1, "second"), new Creator() { public Integer create() { @@ -766,16 +698,15 @@ public class SourceTest extends StreamTest { } }) .takeWithin(Duration.create(1500, "milliseconds")) - .runWith(Sink.head(), materializer), - Duration.create(3, "second") - ); + .runWith(Sink.head(), materializer) + .toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals((Object) 0, result); } public void mustSuitablyOverrideAttributeHandlingMethods() { @SuppressWarnings("unused") - final Source f = + final Source f = Source.single(42).withAttributes(Attributes.name("")).addAttributes(Attributes.asyncBoundary()).named(""); } } diff --git a/akka-stream-tests/src/test/java/akka/stream/javadsl/TcpTest.java b/akka-stream-tests/src/test/java/akka/stream/javadsl/TcpTest.java index e49d4eb134..23d6924ee1 100644 --- a/akka-stream-tests/src/test/java/akka/stream/javadsl/TcpTest.java +++ b/akka-stream-tests/src/test/java/akka/stream/javadsl/TcpTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.javadsl; @@ -8,8 +8,13 @@ import static org.junit.Assert.assertTrue; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.List; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.net.BindException; + +import akka.Done; +import akka.NotUsed; import org.junit.ClassRule; import org.junit.Test; import scala.concurrent.Await; @@ -34,7 +39,7 @@ public class TcpTest extends StreamTest { public static AkkaJUnitActorSystemResource actorSystemResource = new AkkaJUnitActorSystemResource("TcpTest", AkkaSpec.testConf()); - final Sink> echoHandler = + final Sink> echoHandler = Sink.foreach(new Procedure() { public void apply(IncomingConnection conn) { conn.handleWith(Flow.of(ByteString.class), materializer); @@ -51,17 +56,16 @@ public class TcpTest extends StreamTest { @Test public void mustWorkInHappyCase() throws Exception { final InetSocketAddress serverAddress = TestUtils.temporaryServerAddress("127.0.0.1", false); - final Source> binding = Tcp.get(system) + final Source> binding = Tcp.get(system) .bind(serverAddress.getHostName(), serverAddress.getPort()); // TODO getHostString in Java7 - final Future future = binding.to(echoHandler).run(materializer); - final ServerBinding b = Await.result(future, FiniteDuration.create(5, TimeUnit.SECONDS)); + final CompletionStage future = binding.to(echoHandler).run(materializer); + final ServerBinding b = future.toCompletableFuture().get(5, TimeUnit.SECONDS); assertEquals(b.localAddress().getPort(), serverAddress.getPort()); - final Future resultFuture = Source + final CompletionStage resultFuture = Source .from(testInput) - // TODO getHostString in Java7 - .via(Tcp.get(system).outgoingConnection(serverAddress.getHostName(), serverAddress.getPort())) + .via(Tcp.get(system).outgoingConnection(serverAddress.getHostString(), serverAddress.getPort())) .runFold(ByteString.empty(), new Function2() { public ByteString apply(ByteString acc, ByteString elem) { @@ -69,7 +73,7 @@ public class TcpTest extends StreamTest { } }, materializer); - final byte[] result = Await.result(resultFuture, FiniteDuration.create(5, TimeUnit.SECONDS)).toArray(); + final byte[] result = resultFuture.toCompletableFuture().get(5, TimeUnit.SECONDS).toArray(); for (int i = 0; i < testInput.size(); i ++) { assertEquals(testInput.get(i).head(), result[i]); } @@ -78,11 +82,11 @@ public class TcpTest extends StreamTest { @Test public void mustReportServerBindFailure() throws Exception { final InetSocketAddress serverAddress = TestUtils.temporaryServerAddress("127.0.0.1", false); - final Source> binding = Tcp.get(system) + final Source> binding = Tcp.get(system) .bind(serverAddress.getHostName(), serverAddress.getPort()); // TODO getHostString in Java7 - final Future future = binding.to(echoHandler).run(materializer); - final ServerBinding b = Await.result(future, FiniteDuration.create(5, TimeUnit.SECONDS)); + final CompletionStage future = binding.to(echoHandler).run(materializer); + final ServerBinding b = future.toCompletableFuture().get(5, TimeUnit.SECONDS); assertEquals(b.localAddress().getPort(), serverAddress.getPort()); new JavaTestKit(system) {{ @@ -90,9 +94,11 @@ public class TcpTest extends StreamTest { @Override protected Void run() { try { - Await.result(binding.to(echoHandler).run(materializer), FiniteDuration.create(5, TimeUnit.SECONDS)); + binding.to(echoHandler).run(materializer).toCompletableFuture().get(5, TimeUnit.SECONDS); assertTrue("Expected BindFailedException, but nothing was reported", false); - } catch (BindFailedException e) { + } catch (ExecutionException e) { + if (e.getCause() instanceof BindFailedException) {} // all good + else throw new AssertionError("failed", e); // expected } catch (Exception e) { throw new AssertionError("failed", e); @@ -104,19 +110,19 @@ public class TcpTest extends StreamTest { } @Test - public void mustReportClientConnectFailure() throws Exception { + public void mustReportClientConnectFailure() throws Throwable { final InetSocketAddress serverAddress = TestUtils.temporaryServerAddress( "127.0.0.1", false); try { - Await.result( - Source.from(testInput) - // TODO getHostString in Java7 - .viaMat(Tcp.get(system).outgoingConnection(serverAddress.getHostName(), serverAddress.getPort()), - Keep.> right()) - .to(Sink. ignore()) - .run(materializer), - FiniteDuration.create(5, TimeUnit.SECONDS)); - assertTrue("Expected StreamTcpException, but nothing was reported", false); + try { + Source.from(testInput) + .viaMat(Tcp.get(system).outgoingConnection(serverAddress.getHostString(), serverAddress.getPort()), + Keep.right()) + .to(Sink. ignore()).run(materializer).toCompletableFuture().get(5, TimeUnit.SECONDS); + assertTrue("Expected StreamTcpException, but nothing was reported", false); + } catch (ExecutionException e) { + throw e.getCause(); + } } catch (StreamTcpException e) { // expected } diff --git a/akka-stream-tests/src/test/java/akka/stream/stage/JavaIdentityStage.java b/akka-stream-tests/src/test/java/akka/stream/stage/JavaIdentityStage.java index e256c0d946..b01aae2760 100644 --- a/akka-stream-tests/src/test/java/akka/stream/stage/JavaIdentityStage.java +++ b/akka-stream-tests/src/test/java/akka/stream/stage/JavaIdentityStage.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.stage; diff --git a/akka-stream-tests/src/test/java/akka/stream/stage/StageTest.java b/akka-stream-tests/src/test/java/akka/stream/stage/StageTest.java index ad05c98c85..9a899278f0 100644 --- a/akka-stream-tests/src/test/java/akka/stream/stage/StageTest.java +++ b/akka-stream-tests/src/test/java/akka/stream/stage/StageTest.java @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.stage; +import akka.NotUsed; import akka.stream.StreamTest; import akka.stream.javadsl.AkkaJUnitActorSystemResource; import akka.stream.javadsl.Sink; @@ -19,6 +20,8 @@ import scala.concurrent.duration.Duration; import java.util.Arrays; import java.util.List; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; public class StageTest extends StreamTest { public StageTest() { @@ -32,16 +35,16 @@ public class StageTest extends StreamTest { @Test public void javaStageUsage() throws Exception { final java.lang.Iterable input = Arrays.asList(0, 1, 2, 3, 4, 5); - final Source ints = Source.from(input); + final Source ints = Source.from(input); final JavaIdentityStage identity = new JavaIdentityStage(); - final Future> result = + final CompletionStage> result = ints .via(identity) .via(identity) .grouped(1000) .runWith(Sink.>head(), materializer); - assertEquals(Arrays.asList(0, 1, 2, 3, 4, 5), Await.result(result, Duration.create(3, "seconds"))); + assertEquals(Arrays.asList(0, 1, 2, 3, 4, 5), result.toCompletableFuture().get(3, TimeUnit.SECONDS)); } } diff --git a/akka-stream-tests/src/test/scala/akka/stream/ActorMaterializerSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/ActorMaterializerSpec.scala index 47a6ebdcbe..70f6946a69 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/ActorMaterializerSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/ActorMaterializerSpec.scala @@ -61,8 +61,7 @@ class ActorMaterializerSpec extends AkkaSpec with ImplicitSender { "report correctly if it has been shut down from the side" in { val sys = ActorSystem() val m = ActorMaterializer.create(sys) - sys.shutdown() - sys.awaitTermination() + Await.result(sys.terminate(), Duration.Inf) m.isShutdown should ===(true) } } diff --git a/akka-stream-tests/src/test/scala/akka/stream/DslConsistencySpec.scala b/akka-stream-tests/src/test/scala/akka/stream/DslConsistencySpec.scala index 18d0f05b03..e1d63d35cf 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/DslConsistencySpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/DslConsistencySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream @@ -61,12 +61,12 @@ class DslConsistencySpec extends WordSpec with Matchers { "Java and Scala DSLs" must { - ("Source" -> List(sSourceClass, jSourceClass)) :: - ("SubSource" -> List(sSubSourceClass, jSubSourceClass)) :: - ("Flow" -> List(sFlowClass, jFlowClass)) :: - ("SubFlow" -> List(sSubFlowClass, jSubFlowClass)) :: - ("Sink" -> List(sSinkClass, jSinkClass)) :: - ("RunanbleFlow" -> List(sRunnableGraphClass, jRunnableGraphClass)) :: + ("Source" -> List[Class[_]](sSourceClass, jSourceClass)) :: + ("SubSource" -> List[Class[_]](sSubSourceClass, jSubSourceClass)) :: + ("Flow" -> List[Class[_]](sFlowClass, jFlowClass)) :: + ("SubFlow" -> List[Class[_]](sSubFlowClass, jSubFlowClass)) :: + ("Sink" -> List[Class[_]](sSinkClass, jSinkClass)) :: + ("RunanbleFlow" -> List[Class[_]](sRunnableGraphClass, jRunnableGraphClass)) :: Nil foreach { case (element, classes) ⇒ diff --git a/akka-stream-tests/src/test/scala/akka/stream/DslFactoriesConsistencySpec.scala b/akka-stream-tests/src/test/scala/akka/stream/DslFactoriesConsistencySpec.scala index 8924f4b78c..5029c36027 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/DslFactoriesConsistencySpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/DslFactoriesConsistencySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream @@ -30,6 +30,7 @@ class DslFactoriesConsistencySpec extends WordSpec with Matchers { (classOf[scala.collection.immutable.Iterable[_]], classOf[java.lang.Iterable[_]]) :: (classOf[scala.collection.Iterator[_]], classOf[java.util.Iterator[_]]) :: (classOf[scala.collection.Seq[_]], classOf[java.util.List[_]]) :: + (classOf[Boolean], classOf[akka.stream.javadsl.AsPublisher]) :: (classOf[scala.Function0[_]], classOf[akka.japi.function.Creator[_]]) :: (classOf[scala.Function0[_]], classOf[java.util.concurrent.Callable[_]]) :: (classOf[scala.Function0[_]], classOf[akka.japi.function.Creator[_]]) :: diff --git a/akka-stream-tests/src/test/scala/akka/stream/FusingSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/FusingSpec.scala index a997dab019..cf41f231b0 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/FusingSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/FusingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream @@ -10,7 +10,7 @@ import org.scalactic.ConversionCheckedTripleEquals import akka.stream.Attributes._ import akka.stream.Fusing.FusedGraph import scala.annotation.tailrec -import akka.stream.impl.StreamLayout.Module +import akka.stream.impl.StreamLayout.{ CopiedModule, Module } import org.scalatest.concurrent.ScalaFutures import scala.concurrent.duration._ import akka.stream.impl.fusing.GraphInterpreter @@ -23,7 +23,7 @@ class FusingSpec extends AkkaSpec with ScalaFutures with ConversionCheckedTriple implicit val patience = PatienceConfig(1.second) def graph(async: Boolean) = - Source.unfoldInf(1)(x ⇒ (x, x)).filter(_ % 2 == 1) + Source.unfold(1)(x ⇒ Some(x -> x)).filter(_ % 2 == 1) .alsoTo(Flow[Int].fold(0)(_ + _).to(Sink.head.named("otherSink")).addAttributes(if (async) Attributes.asyncBoundary else Attributes.none)) .via(Flow[Int].fold(1)(_ + _).named("mainSink")) @@ -36,13 +36,13 @@ class FusingSpec extends AkkaSpec with ScalaFutures with ConversionCheckedTriple @tailrec def rec(curr: Module): Unit = { if (Debug) println(extractName(curr, "unknown")) - if (curr.attributes.contains(to)) () // done - else { - val outs = curr.inPorts.map(ups) - outs.size should ===(1) - val out = outs.head - val next = owner(out) - rec(next) + curr match { + case CopiedModule(_, attributes, copyOf) if (attributes and copyOf.attributes).contains(to) ⇒ () + case other if other.attributes.contains(to) ⇒ () + case _ ⇒ + val outs = curr.inPorts.map(ups) + outs.size should ===(1) + rec(owner(outs.head)) } } @@ -57,8 +57,8 @@ class FusingSpec extends AkkaSpec with ScalaFutures with ConversionCheckedTriple module.downstreams.size should ===(modules - 1) module.info.downstreams.size should be >= downstreams module.info.upstreams.size should be >= downstreams - singlePath(fused, Attributes.Name("mainSink"), Attributes.Name("unfoldInf")) - singlePath(fused, Attributes.Name("otherSink"), Attributes.Name("unfoldInf")) + singlePath(fused, Attributes.Name("mainSink"), Attributes.Name("unfold")) + singlePath(fused, Attributes.Name("otherSink"), Attributes.Name("unfold")) } "fuse a moderately complex graph" in { diff --git a/akka-stream-tests/src/test/scala/akka/stream/actor/ActorPublisherSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/actor/ActorPublisherSpec.scala index aeebcc0090..027b49bddb 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/actor/ActorPublisherSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/actor/ActorPublisherSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.actor diff --git a/akka-stream-tests/src/test/scala/akka/stream/actor/ActorSubscriberSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/actor/ActorSubscriberSpec.scala index 32132014fe..231b8e7382 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/actor/ActorSubscriberSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/actor/ActorSubscriberSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.actor @@ -25,13 +25,13 @@ object ActorSubscriberSpec { override val requestStrategy = ZeroRequestStrategy def receive = { - case next @ OnNext(elem) ⇒ probe ! next - case complete @ OnComplete ⇒ probe ! complete - case err @ OnError(cause) ⇒ probe ! err - case "ready" ⇒ request(elements = 2) - case "boom" ⇒ throw new RuntimeException("boom") with NoStackTrace - case "requestAndCancel" ⇒ { request(1); cancel() } - case "cancel" ⇒ cancel() + case next @ OnNext(elem) ⇒ probe ! next + case OnComplete ⇒ probe ! OnComplete + case err @ OnError(cause) ⇒ probe ! err + case "ready" ⇒ request(elements = 2) + case "boom" ⇒ throw new RuntimeException("boom") with NoStackTrace + case "requestAndCancel" ⇒ { request(1); cancel() } + case "cancel" ⇒ cancel() } } @@ -55,8 +55,8 @@ object ActorSubscriberSpec { override val requestStrategy = strat def receive = { - case next @ OnNext(elem) ⇒ probe ! next - case complete @ OnComplete ⇒ probe ! complete + case next @ OnNext(elem) ⇒ probe ! next + case OnComplete ⇒ probe ! OnComplete } } diff --git a/akka-stream-tests/src/test/scala/akka/stream/extra/FlowTimedSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/extra/FlowTimedSpec.scala index c61a38888f..dbf5f4818e 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/extra/FlowTimedSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/extra/FlowTimedSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.extra diff --git a/akka-stream-tests/src/test/scala/akka/stream/impl/FixedBufferSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/impl/FixedBufferSpec.scala index 3f596f73c5..32978d281b 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/impl/FixedBufferSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/impl/FixedBufferSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream-tests/src/test/scala/akka/stream/impl/GraphStageLogicSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/impl/GraphStageLogicSpec.scala index 36cab1a259..afb98addaf 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/impl/GraphStageLogicSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/impl/GraphStageLogicSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl @@ -13,9 +13,10 @@ import akka.stream.testkit.scaladsl.TestSink import akka.stream.impl.fusing._ import akka.stream.impl.fusing.GraphInterpreter._ import org.scalactic.ConversionCheckedTripleEquals +import org.scalatest.concurrent.ScalaFutures import scala.concurrent.duration.Duration -class GraphStageLogicSpec extends AkkaSpec with GraphInterpreterSpecKit with ConversionCheckedTripleEquals { +class GraphStageLogicSpec extends AkkaSpec with GraphInterpreterSpecKit with ConversionCheckedTripleEquals with ScalaFutures { implicit val materializer = ActorMaterializer() @@ -67,6 +68,18 @@ class GraphStageLogicSpec extends AkkaSpec with GraphInterpreterSpecKit with Con } } + object emitEmptyIterable extends GraphStage[SourceShape[Int]] { + val out = Outlet[Int]("out") + override val shape = SourceShape(out) + override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { + + setHandler(out, new OutHandler { + override def onPull(): Unit = emitMultiple(out, Iterator.empty, () ⇒ emit(out, 42, () ⇒ completeStage())) + }) + + } + } + "A GraphStageLogic" must { "emit all things before completing" in assertAllStagesStopped { @@ -96,6 +109,12 @@ class GraphStageLogicSpec extends AkkaSpec with GraphInterpreterSpecKit with Con .expectComplete() } + "emit properly after empty iterable" in assertAllStagesStopped { + + Source.fromGraph(emitEmptyIterable).runWith(Sink.seq).futureValue should ===(List(42)) + + } + "invoke lifecycle hooks in the right order" in assertAllStagesStopped { val g = new GraphStage[FlowShape[Int, Int]] { val in = Inlet[Int]("in") diff --git a/akka-stream-tests/src/test/scala/akka/stream/impl/ResizableMultiReaderRingBufferSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/impl/ResizableMultiReaderRingBufferSpec.scala index dd0f91ec26..76c211d84c 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/impl/ResizableMultiReaderRingBufferSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/impl/ResizableMultiReaderRingBufferSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream-tests/src/test/scala/akka/stream/impl/StreamLayoutSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/impl/StreamLayoutSpec.scala index 2a1674a25f..bd14d3f1e8 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/impl/StreamLayoutSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/impl/StreamLayoutSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream-tests/src/test/scala/akka/stream/impl/TimeoutsSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/impl/TimeoutsSpec.scala index fdd1dfae06..9ba1b4bead 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/impl/TimeoutsSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/impl/TimeoutsSpec.scala @@ -1,11 +1,12 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.impl import java.util.concurrent.TimeoutException +import akka.Done import akka.stream.scaladsl._ import akka.stream.testkit.Utils._ import akka.stream.testkit.{ AkkaSpec, TestPublisher, TestSubscriber } @@ -147,7 +148,7 @@ class TimeoutsSpec extends AkkaSpec { val upstream = Flow.fromSinkAndSourceMat(Sink.ignore, Source.fromPublisher(upstreamWriter))(Keep.left) val downstream = Flow.fromSinkAndSourceMat(Sink.ignore, Source.fromPublisher(downstreamWriter))(Keep.left) - val assembly: RunnableGraph[(Future[Unit], Future[Unit])] = upstream + val assembly: RunnableGraph[(Future[Done], Future[Done])] = upstream .joinMat(BidiFlow.bidirectionalIdleTimeout[Int, String](2.seconds))(Keep.left) .joinMat(downstream)(Keep.both) diff --git a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/ActorGraphInterpreterSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/ActorGraphInterpreterSpec.scala index 8c806e17ab..fec7e4e93f 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/ActorGraphInterpreterSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/ActorGraphInterpreterSpec.scala @@ -1,13 +1,15 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl.fusing +import java.util.concurrent.CountDownLatch + import akka.stream._ import akka.stream.scaladsl._ import akka.stream.stage.{ GraphStage, GraphStageLogic, InHandler, OutHandler } -import akka.stream.testkit.AkkaSpec import akka.stream.testkit.Utils._ +import akka.stream.testkit.{ AkkaSpec, TestPublisher, TestSubscriber } import akka.testkit.EventFilter import scala.concurrent.Await @@ -256,5 +258,79 @@ class ActorGraphInterpreterSpec extends AkkaSpec { } + "be able to properly handle case where a stage fails before subscription happens" in assertAllStagesStopped { + + // Fuzzing needs to be off, so that the failure can propagate to the output boundary before the ExposedPublisher + // message. + val noFuzzMat = ActorMaterializer(ActorMaterializerSettings(system).withFuzzing(false)) + + val te = TE("Test failure in preStart") + + val evilLatch = new CountDownLatch(1) + + /* + * This is a somewhat tricky test setup. We need the following conditions to be met: + * - the stage should fail its output port before the ExposedPublisher message is processed + * - the enclosing actor (and therefore the stage) should be kept alive until a stray SubscribePending arrives + * that has been enqueued after ExposedPublisher message has been enqueued, but before it has been processed + * + * To achieve keeping alive the stage for long enough, we use an extra input and output port and instead + * of failing the stage, we fail only the output port under test. + * + * To delay the startup long enough, so both ExposedPublisher and SubscribePending are enqueued, we use an evil + * latch to delay the preStart() (which in turn delays the enclosing actor's preStart). + * + */ + + val failyStage = new GraphStage[FanOutShape2[Int, Int, Int]] { + override val shape: FanOutShape2[Int, Int, Int] = new FanOutShape2( + Inlet[Int]("test.in"), + Outlet[Int]("test.out0"), + Outlet[Int]("test.out1")) + + override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { + + override def preStart(): Unit = { + pull(shape.in) + evilLatch.await() + fail(shape.out0, te) + } + + setHandler(shape.out0, ignoreTerminateOutput) //We fail in preStart anyway + setHandler(shape.out1, ignoreTerminateOutput) //We fail in preStart anyway + passAlong(shape.in, shape.out1) + } + } + + val downstream0 = TestSubscriber.probe[Int]() + val downstream1 = TestSubscriber.probe[Int]() + + val upstream = TestPublisher.probe[Int]() + + RunnableGraph.fromGraph(GraphDSL.create() { implicit b ⇒ + import GraphDSL.Implicits._ + val faily = b.add(failyStage) + + Source.fromPublisher(upstream) ~> faily.in + faily.out0 ~> Sink.fromSubscriber(downstream0) + faily.out1 ~> Sink.fromSubscriber(downstream1) + + ClosedShape + }).run()(noFuzzMat) + + evilLatch.countDown() + downstream0.expectSubscriptionAndError(te) + + // If an NPE would happen due to unset exposedPublisher (see #19338) this would receive a failure instead + // of the actual element + downstream1.request(1) + upstream.sendNext(42) + downstream1.expectNext(42) + + upstream.sendComplete() + downstream1.expectComplete() + + } + } } diff --git a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/GraphInterpreterPortsSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/GraphInterpreterPortsSpec.scala index 3336dc541c..ee2871809c 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/GraphInterpreterPortsSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/GraphInterpreterPortsSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl.fusing diff --git a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/GraphInterpreterSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/GraphInterpreterSpec.scala index 06a0060251..0eb410dec9 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/GraphInterpreterSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/GraphInterpreterSpec.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl.fusing +import akka.NotUsed import akka.stream.{ OverflowStrategy, Attributes } import akka.stream.stage.AbstractStage.PushPullGraphStage import akka.stream.testkit.AkkaSpec @@ -121,10 +122,7 @@ class GraphInterpreterSpec extends AkkaSpec with GraphInterpreterSpecKit { lastEvents() should ===(Set.empty) source2.onNext("Meaning of life") - lastEvents() should ===(Set(OnNext(sink, (42, "Meaning of life")))) - - sink.requestOne() - lastEvents() should ===(Set(RequestOne(source1), RequestOne(source2))) + lastEvents() should ===(Set(OnNext(sink, (42, "Meaning of life")), RequestOne(source1), RequestOne(source2))) } "implement Broadcast" in new TestSetup { @@ -169,13 +167,11 @@ class GraphInterpreterSpec extends AkkaSpec with GraphInterpreterSpecKit { lastEvents() should ===(Set(RequestOne(source))) source.onNext(1) - lastEvents() should ===(Set(OnNext(sink, (1, 1)))) + lastEvents() should ===(Set(OnNext(sink, (1, 1)), RequestOne(source))) sink.requestOne() - lastEvents() should ===(Set(RequestOne(source))) - source.onNext(2) - lastEvents() should ===(Set(OnNext(sink, (2, 2)))) + lastEvents() should ===(Set(OnNext(sink, (2, 2)), RequestOne(source))) } @@ -198,16 +194,15 @@ class GraphInterpreterSpec extends AkkaSpec with GraphInterpreterSpecKit { lastEvents() should ===(Set.empty) sink1.requestOne() - lastEvents() should ===(Set.empty) + lastEvents() should ===(Set(RequestOne(source1), RequestOne(source2))) sink2.requestOne() - lastEvents() should ===(Set(RequestOne(source1), RequestOne(source2))) source1.onNext(1) lastEvents() should ===(Set.empty) source2.onNext(2) - lastEvents() should ===(Set(OnNext(sink1, (1, 2)), OnNext(sink2, (1, 2)))) + lastEvents() should ===(Set(OnNext(sink1, (1, 2)), OnNext(sink2, (1, 2)), RequestOne(source1), RequestOne(source2))) } @@ -346,7 +341,7 @@ class GraphInterpreterSpec extends AkkaSpec with GraphInterpreterSpecKit { "implement buffer" in new TestSetup { val source = new UpstreamProbe[String]("source") val sink = new DownstreamProbe[String]("sink") - val buffer = new PushPullGraphStage[String, String, Unit]( + val buffer = new PushPullGraphStage[String, String, NotUsed]( (_) ⇒ new Buffer[String](2, OverflowStrategy.backpressure), Attributes.none) diff --git a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/GraphInterpreterSpecKit.scala b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/GraphInterpreterSpecKit.scala index d408e5dc96..5da19e66d5 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/GraphInterpreterSpecKit.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/GraphInterpreterSpecKit.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl.fusing @@ -298,14 +298,20 @@ trait GraphInterpreterSpecKit extends AkkaSpec { .init() } - abstract class OneBoundedSetup[T](ops: Array[GraphStageWithMaterializedValue[Shape, Any]]) extends Builder { + implicit class ToGraphStage[I, O](stage: Stage[I, O]) { + def toGS: PushPullGraphStage[Any, Any, Any] = { + val s = stage + new PushPullGraphStage[Any, Any, Any]( + (_) ⇒ s.asInstanceOf[Stage[Any, Any]], + Attributes.none) + } + } - def this(ops: Iterable[Stage[_, _]]) = { - this(ops.map { op ⇒ - new PushPullGraphStage[Any, Any, Any]( - (_) ⇒ op.asInstanceOf[Stage[Any, Any]], - Attributes.none) - }.toArray.asInstanceOf[Array[GraphStageWithMaterializedValue[Shape, Any]]]) + abstract class OneBoundedSetup[T](_ops: GraphStageWithMaterializedValue[Shape, Any]*) extends Builder { + val ops = _ops.toArray + + def this(op: Seq[Stage[_, _]], dummy: Int = 42) = { + this(op.map(_.toGS): _*) } val upstream = new UpstreamOneBoundedProbe[T] @@ -339,7 +345,7 @@ trait GraphInterpreterSpecKit extends AkkaSpec { outOwners(0) = Boundary while (i < ops.length) { - val stage = ops(i).asInstanceOf[PushPullGraphStage[_, _, _]] + val stage = ops(i).asInstanceOf[GraphStageWithMaterializedValue[FlowShape[_, _], _]] ins(i) = stage.shape.in inOwners(i) = i outs(i + 1) = stage.shape.out @@ -364,8 +370,8 @@ trait GraphInterpreterSpecKit extends AkkaSpec { events } - class UpstreamOneBoundedProbe[T] extends UpstreamBoundaryStageLogic[T] { - val out = Outlet[T]("out") + class UpstreamOneBoundedProbe[TT] extends UpstreamBoundaryStageLogic[TT] { + val out = Outlet[TT]("out") out.id = 0 setHandler(out, new OutHandler { @@ -377,7 +383,7 @@ trait GraphInterpreterSpecKit extends AkkaSpec { override def onDownstreamFinish(): Unit = lastEvent += Cancel }) - def onNext(elem: T): Unit = { + def onNext(elem: TT): Unit = { push(out, elem) run() } @@ -386,7 +392,7 @@ trait GraphInterpreterSpecKit extends AkkaSpec { run() } - def onNextAndComplete(elem: T): Unit = { + def onNextAndComplete(elem: TT): Unit = { push(out, elem) complete(out) run() @@ -398,8 +404,8 @@ trait GraphInterpreterSpecKit extends AkkaSpec { } } - class DownstreamOneBoundedPortProbe[T] extends DownstreamBoundaryStageLogic[T] { - val in = Inlet[T]("in") + class DownstreamOneBoundedPortProbe[TT] extends DownstreamBoundaryStageLogic[TT] { + val in = Inlet[TT]("in") in.id = 0 setHandler(in, new InHandler { diff --git a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/InterpreterSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/InterpreterSpec.scala index fe1c3cd34b..a0299b90d0 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/InterpreterSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/InterpreterSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.impl.fusing @@ -273,9 +273,7 @@ class InterpreterSpec extends AkkaSpec with GraphInterpreterSpecKit { lastEvents() should be(Set(Cancel)) } - "implement expand" in new OneBoundedSetup[Int](Seq(Expand( - (in: Int) ⇒ in, - (agg: Int) ⇒ (agg, agg)))) { + "implement expand" in new OneBoundedSetup[Int](new Expand(Iterator.continually(_: Int))) { lastEvents() should be(Set(RequestOne)) @@ -339,13 +337,9 @@ class InterpreterSpec extends AkkaSpec with GraphInterpreterSpecKit { } - "work with expand-expand" in new OneBoundedSetup[Int](Seq( - Expand( - (in: Int) ⇒ in, - (agg: Int) ⇒ (agg, agg + 1)), - Expand( - (in: Int) ⇒ in, - (agg: Int) ⇒ (agg, agg + 1)))) { + "work with expand-expand" in new OneBoundedSetup[Int]( + new Expand(Iterator.from), + new Expand(Iterator.from)) { lastEvents() should be(Set(RequestOne)) @@ -376,14 +370,12 @@ class InterpreterSpec extends AkkaSpec with GraphInterpreterSpecKit { lastEvents() should be(Set(OnComplete, OnNext(12))) } - "implement conflate-expand" in new OneBoundedSetup[Int](Seq( + "implement conflate-expand" in new OneBoundedSetup[Int]( Conflate( (in: Int) ⇒ in, (agg: Int, x: Int) ⇒ agg + x, - stoppingDecider), - Expand( - (in: Int) ⇒ in, - (agg: Int) ⇒ (agg, agg)))) { + stoppingDecider).toGS, + new Expand(Iterator.continually(_: Int))) { lastEvents() should be(Set(RequestOne)) diff --git a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/InterpreterStressSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/InterpreterStressSpec.scala index e7f13562a3..f443c112de 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/InterpreterStressSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/InterpreterStressSpec.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.impl.fusing +import akka.NotUsed import akka.stream.{ Attributes, Shape, Supervision } import akka.stream.stage.AbstractStage.PushPullGraphStage import akka.stream.stage.GraphStageWithMaterializedValue @@ -15,15 +16,11 @@ class InterpreterStressSpec extends AkkaSpec with GraphInterpreterSpecKit { val halfLength = chainLength / 2 val repetition = 100 - val f = (x: Int) ⇒ x + 1 - - val map: GraphStageWithMaterializedValue[Shape, Any] = - new PushPullGraphStage[Int, Int, Unit]((_) ⇒ Map(f, stoppingDecider), Attributes.none) - .asInstanceOf[GraphStageWithMaterializedValue[Shape, Any]] + val map = Map((x: Int) ⇒ x + 1, stoppingDecider).toGS "Interpreter" must { - "work with a massive chain of maps" in new OneBoundedSetup[Int](Array.fill(chainLength)(map).asInstanceOf[Array[GraphStageWithMaterializedValue[Shape, Any]]]) { + "work with a massive chain of maps" in new OneBoundedSetup[Int](Vector.fill(chainLength)(map): _*) { lastEvents() should be(Set.empty) val tstamp = System.nanoTime() @@ -45,9 +42,10 @@ class InterpreterStressSpec extends AkkaSpec with GraphInterpreterSpecKit { info(s"Chain finished in $time seconds ${(chainLength * repetition) / (time * 1000 * 1000)} million maps/s") } - "work with a massive chain of maps with early complete" in new OneBoundedSetup[Int](Iterable.fill(halfLength)(Map((x: Int) ⇒ x + 1, stoppingDecider)) ++ - Seq(Take(repetition / 2)) ++ - Seq.fill(halfLength)(Map((x: Int) ⇒ x + 1, stoppingDecider))) { + "work with a massive chain of maps with early complete" in new OneBoundedSetup[Int]( + Vector.fill(halfLength)(map) ++ + Seq(Take(repetition / 2).toGS) ++ + Vector.fill(halfLength)(map): _*) { lastEvents() should be(Set.empty) val tstamp = System.nanoTime() @@ -73,7 +71,7 @@ class InterpreterStressSpec extends AkkaSpec with GraphInterpreterSpecKit { info(s"Chain finished in $time seconds ${(chainLength * repetition) / (time * 1000 * 1000)} million maps/s") } - "work with a massive chain of takes" in new OneBoundedSetup[Int](Iterable.fill(chainLength)(Take(1))) { + "work with a massive chain of takes" in new OneBoundedSetup[Int](Vector.fill(chainLength / 10)(Take(1))) { lastEvents() should be(Set.empty) downstream.requestOne() @@ -84,7 +82,7 @@ class InterpreterStressSpec extends AkkaSpec with GraphInterpreterSpecKit { } - "work with a massive chain of drops" in new OneBoundedSetup[Int](Iterable.fill(chainLength / 1000)(Drop(1))) { + "work with a massive chain of drops" in new OneBoundedSetup[Int](Vector.fill(chainLength / 1000)(Drop(1))) { lastEvents() should be(Set.empty) downstream.requestOne() @@ -102,7 +100,7 @@ class InterpreterStressSpec extends AkkaSpec with GraphInterpreterSpecKit { } - "work with a massive chain of conflates by overflowing to the heap" in new OneBoundedSetup[Int](Iterable.fill(100000)(Conflate( + "work with a massive chain of conflates by overflowing to the heap" in new OneBoundedSetup[Int](Vector.fill(chainLength / 10)(Conflate( (in: Int) ⇒ in, (agg: Int, in: Int) ⇒ agg + in, Supervision.stoppingDecider))) { diff --git a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/InterpreterSupervisionSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/InterpreterSupervisionSpec.scala index 9a5ffeb5c7..e98dd9693d 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/InterpreterSupervisionSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/InterpreterSupervisionSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl.fusing @@ -451,9 +451,8 @@ class InterpreterSupervisionSpec extends AkkaSpec with GraphInterpreterSpecKit { lastEvents() should be(Set(Cancel)) } - "fail when Expand `seed` throws" in new OneBoundedSetup[Int](Seq(Expand( - (in: Int) ⇒ if (in == 2) throw TE else in, - (agg: Int) ⇒ (agg, -math.abs(agg))))) { + "fail when Expand `seed` throws" in new OneBoundedSetup[Int]( + new Expand((in: Int) ⇒ if (in == 2) throw TE else Iterator(in) ++ Iterator.continually(-math.abs(in)))) { lastEvents() should be(Set(RequestOne)) @@ -473,9 +472,8 @@ class InterpreterSupervisionSpec extends AkkaSpec with GraphInterpreterSpecKit { lastEvents() should be(Set(OnError(TE), Cancel)) } - "fail when Expand `extrapolate` throws" in new OneBoundedSetup[Int](Seq(Expand( - (in: Int) ⇒ in, - (agg: Int) ⇒ if (agg == 2) throw TE else (agg, -math.abs(agg))))) { + "fail when Expand `extrapolate` throws" in new OneBoundedSetup[Int]( + new Expand((in: Int) ⇒ if (in == 2) Iterator.continually(throw TE) else Iterator(in) ++ Iterator.continually(-math.abs(in)))) { lastEvents() should be(Set(RequestOne)) diff --git a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/IteratorInterpreterSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/IteratorInterpreterSpec.scala index 678531f064..fd64427d97 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/IteratorInterpreterSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/IteratorInterpreterSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.impl.fusing diff --git a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/KeepGoingStageSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/KeepGoingStageSpec.scala index 74c0276647..b4fb473c34 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/KeepGoingStageSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/KeepGoingStageSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl.fusing diff --git a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/LifecycleInterpreterSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/LifecycleInterpreterSpec.scala index 5b8c260d39..0433fbf128 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/LifecycleInterpreterSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/LifecycleInterpreterSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.impl.fusing diff --git a/akka-stream-tests/src/test/scala/akka/stream/io/FileSinkSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/io/FileSinkSpec.scala index 8d7e2bfcb0..45df3f08bf 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/io/FileSinkSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/io/FileSinkSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.io @@ -9,10 +9,9 @@ import akka.actor.ActorSystem import akka.stream.impl.ActorMaterializerImpl import akka.stream.impl.StreamSupervisor import akka.stream.impl.StreamSupervisor.Children -import akka.stream.scaladsl.{ FileIO, Sink, Source } +import akka.stream.scaladsl.{ FileIO, Source } import akka.stream.testkit._ import akka.stream.testkit.Utils._ -import akka.stream.testkit.StreamTestKit import akka.stream.ActorMaterializer import akka.stream.ActorMaterializerSettings import akka.stream.ActorAttributes @@ -47,8 +46,8 @@ class FileSinkSpec extends AkkaSpec(UnboundedMailboxConfig) { val completion = Source(TestByteStrings) .runWith(FileIO.toFile(f)) - val size = Await.result(completion, 3.seconds) - size should equal(6006) + val result = Await.result(completion, 3.seconds) + result.count should equal(6006) checkFileContents(f, TestLines.mkString("")) } } @@ -65,9 +64,9 @@ class FileSinkSpec extends AkkaSpec(UnboundedMailboxConfig) { val lastWrite = List("x" * 100) val completion2 = write(lastWrite) - val written2 = Await.result(completion2, 3.seconds) + val result = Await.result(completion2, 3.seconds) - written2 should ===(lastWrite.flatten.length) + result.count should ===(lastWrite.flatten.length) checkFileContents(f, lastWrite.mkString("") + TestLines.mkString("").drop(100)) } } @@ -80,13 +79,13 @@ class FileSinkSpec extends AkkaSpec(UnboundedMailboxConfig) { .runWith(FileIO.toFile(f, append = true)) val completion1 = write() - val written1 = Await.result(completion1, 3.seconds) + val result1 = Await.result(completion1, 3.seconds) val lastWrite = List("x" * 100) val completion2 = write(lastWrite) - val written2 = Await.result(completion2, 3.seconds) + val result2 = Await.result(completion2, 3.seconds) - f.length() should ===(written1 + written2) + f.length() should ===(result1.count + result2.count) checkFileContents(f, TestLines.mkString("") + lastWrite.mkString("") + "\n") } } diff --git a/akka-stream-tests/src/test/scala/akka/stream/io/FileSourceSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/io/FileSourceSpec.scala index 9c45661c30..1867fd29e8 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/io/FileSourceSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/io/FileSourceSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.io diff --git a/akka-stream-tests/src/test/scala/akka/stream/io/FramingSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/io/FramingSpec.scala index 86f290ace8..4dccaea418 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/io/FramingSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/io/FramingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.io diff --git a/akka-stream-tests/src/test/scala/akka/stream/io/InputStreamSinkSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/io/InputStreamSinkSpec.scala index 98b8acf619..6940cf7861 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/io/InputStreamSinkSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/io/InputStreamSinkSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.io diff --git a/akka-stream-tests/src/test/scala/akka/stream/io/InputStreamSourceSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/io/InputStreamSourceSpec.scala index 532afaf3c6..323ed16bde 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/io/InputStreamSourceSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/io/InputStreamSourceSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.io diff --git a/akka-stream-tests/src/test/scala/akka/stream/io/OutputStreamSinkSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/io/OutputStreamSinkSpec.scala index 20c1a582e9..13522bc6fa 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/io/OutputStreamSinkSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/io/OutputStreamSinkSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.io diff --git a/akka-stream-tests/src/test/scala/akka/stream/io/OutputStreamSourceSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/io/OutputStreamSourceSpec.scala index 373175a83b..e18e80c224 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/io/OutputStreamSourceSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/io/OutputStreamSourceSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.io diff --git a/akka-stream-tests/src/test/scala/akka/stream/io/TcpHelper.scala b/akka-stream-tests/src/test/scala/akka/stream/io/TcpHelper.scala index 4aa4fc1698..45f00e03b2 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/io/TcpHelper.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/io/TcpHelper.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.io diff --git a/akka-stream-tests/src/test/scala/akka/stream/io/TcpSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/io/TcpSpec.scala index 620398bfdb..8564cff361 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/io/TcpSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/io/TcpSpec.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.io +import akka.NotUsed import akka.actor.{ ActorSystem, Kill } import akka.io.Tcp._ import akka.stream.scaladsl.Tcp.IncomingConnection @@ -342,7 +343,7 @@ class TcpSpec extends AkkaSpec("akka.stream.materializer.subscription-timeout.ti "properly full-close if requested" in assertAllStagesStopped { val serverAddress = temporaryServerAddress() - val writeButIgnoreRead: Flow[ByteString, ByteString, Unit] = + val writeButIgnoreRead: Flow[ByteString, ByteString, NotUsed] = Flow.fromSinkAndSourceMat(Sink.ignore, Source.single(ByteString("Early response")))(Keep.right) val binding = @@ -391,7 +392,7 @@ class TcpSpec extends AkkaSpec("akka.stream.materializer.subscription-timeout.ti val result = Source.maybe[ByteString].via(Tcp(system2).outgoingConnection(serverAddress)).runFold(0)(_ + _.size)(mat2) // Getting rid of existing connection actors by using a blunt instrument - system2.actorSelection(akka.io.Tcp(system2).getManager.path / "selectors" / "$a" / "*") ! Kill + system2.actorSelection(akka.io.Tcp(system2).getManager.path / "selectors" / s"$$a" / "*") ! Kill a[StreamTcpException] should be thrownBy Await.result(result, 3.seconds) diff --git a/akka-stream-tests/src/test/scala/akka/stream/io/TlsSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/io/TlsSpec.scala index 1458253a83..5cec762df0 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/io/TlsSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/io/TlsSpec.scala @@ -4,6 +4,8 @@ import java.security.KeyStore import java.security.SecureRandom import java.util.concurrent.TimeoutException +import akka.NotUsed + import scala.collection.immutable import scala.concurrent.Await import scala.concurrent.Future @@ -115,7 +117,7 @@ class TlsSpec extends AkkaSpec("akka.loglevel=INFO\nakka.actor.debug.receive=off trait CommunicationSetup extends Named { def decorateFlow(leftClosing: Closing, rightClosing: Closing, - rhs: Flow[SslTlsInbound, SslTlsOutbound, Any]): Flow[SslTlsOutbound, SslTlsInbound, Unit] + rhs: Flow[SslTlsInbound, SslTlsOutbound, Any]): Flow[SslTlsOutbound, SslTlsInbound, NotUsed] def cleanup(): Unit = () } diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/AcknowledgeSourceSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/AcknowledgeSourceSpec.scala deleted file mode 100644 index 8c394c9b5c..0000000000 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/AcknowledgeSourceSpec.scala +++ /dev/null @@ -1,96 +0,0 @@ -/** - * Copyright (C) 2015 Typesafe Inc. - */ -package akka.stream.scaladsl - -import akka.stream.testkit.Utils._ -import akka.stream.testkit.{ AkkaSpec, TestSubscriber } -import akka.stream.{ ActorMaterializer, OverflowStrategy } -import scala.concurrent.Future -import scala.concurrent.duration._ -import scala.concurrent._ -import akka.pattern.pipe - -class AcknowledgeSourceSpec extends AkkaSpec { - implicit val materializer = ActorMaterializer() - implicit val ec = system.dispatcher - - def assertSuccess(b: Boolean, fb: Future[Boolean]): Unit = - Await.result(fb, 1.second) should be(b) - - "A AcknowledgeSource" must { - - "emit received messages to the stream" in { - val s = TestSubscriber.manualProbe[Int]() - val queue = Source.queue(10, OverflowStrategy.fail).to(Sink.fromSubscriber(s)).run() - val sub = s.expectSubscription - sub.request(2) - assertSuccess(true, queue.offer(1)) - s.expectNext(1) - assertSuccess(true, queue.offer(2)) - s.expectNext(2) - assertSuccess(true, queue.offer(3)) - sub.cancel() - } - - "buffer when needed" in { - val s = TestSubscriber.manualProbe[Int]() - val queue = Source.queue(100, OverflowStrategy.dropHead).to(Sink.fromSubscriber(s)).run() - val sub = s.expectSubscription - for (n ← 1 to 20) assertSuccess(true, queue.offer(n)) - sub.request(10) - for (n ← 1 to 10) assertSuccess(true, queue.offer(n)) - sub.request(10) - for (n ← 11 to 20) assertSuccess(true, queue.offer(n)) - - for (n ← 200 to 399) assertSuccess(true, queue.offer(n)) - sub.request(100) - for (n ← 300 to 399) assertSuccess(true, queue.offer(n)) - sub.cancel() - } - - "not fail when 0 buffer space and demand is signalled" in assertAllStagesStopped { - val s = TestSubscriber.manualProbe[Int]() - val queue = Source.queue(0, OverflowStrategy.dropHead).to(Sink.fromSubscriber(s)).run() - val sub = s.expectSubscription - sub.request(1) - assertSuccess(true, queue.offer(1)) - s.expectNext(1) - sub.cancel() - } - - "return false when can reject element to buffer" in assertAllStagesStopped { - val s = TestSubscriber.manualProbe[Int]() - val queue = Source.queue(1, OverflowStrategy.dropNew).to(Sink.fromSubscriber(s)).run() - val sub = s.expectSubscription - assertSuccess(true, queue.offer(1)) - assertSuccess(false, queue.offer(2)) - sub.request(1) - s.expectNext(1) - sub.cancel() - } - - "wait when buffer is full and backpressure is on" in assertAllStagesStopped { - val s = TestSubscriber.manualProbe[Int]() - val queue = Source.queue(2, OverflowStrategy.backpressure).to(Sink.fromSubscriber(s)).run() - val sub = s.expectSubscription - assertSuccess(true, queue.offer(1)) - - val addedSecond = queue.offer(2) - - addedSecond.pipeTo(testActor) - expectNoMsg(300.millis) - - sub.request(1) - s.expectNext(1) - assertSuccess(true, addedSecond) - - sub.request(1) - s.expectNext(2) - - sub.cancel() - } - - } - -} diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/ActorRefBackpressureSinkSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/ActorRefBackpressureSinkSpec.scala index 883eaa009c..ee5d298d3d 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/ActorRefBackpressureSinkSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/ActorRefBackpressureSinkSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/ActorRefSinkSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/ActorRefSinkSpec.scala index 3c10bd5264..8bc3385c99 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/ActorRefSinkSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/ActorRefSinkSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/ActorRefSourceSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/ActorRefSourceSpec.scala index 2c73daadda..475659f0f3 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/ActorRefSourceSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/ActorRefSourceSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/AttributesSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/AttributesSpec.scala index 0c42807ba2..1b354abf65 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/AttributesSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/AttributesSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/BidiFlowSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/BidiFlowSpec.scala index 7e87519ca6..c2574810db 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/BidiFlowSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/BidiFlowSpec.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl +import akka.NotUsed import akka.stream.testkit.AkkaSpec import akka.stream.testkit.Utils._ import org.scalactic.ConversionCheckedTripleEquals @@ -113,7 +114,7 @@ class BidiFlowSpec extends AkkaSpec with ConversionCheckedTripleEquals { "suitably override attribute handling methods" in { import Attributes._ - val b: BidiFlow[Int, Long, ByteString, String, Unit] = bidi.withAttributes(name("")).addAttributes(asyncBoundary).named("") + val b: BidiFlow[Int, Long, ByteString, String, NotUsed] = bidi.withAttributes(name("")).addAttributes(asyncBoundary).named("") } } diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowAppendSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowAppendSpec.scala index 17dc1bf538..73ec165afd 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowAppendSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowAppendSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowBatchSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowBatchSpec.scala new file mode 100644 index 0000000000..d970ffb1ed --- /dev/null +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowBatchSpec.scala @@ -0,0 +1,99 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ +package akka.stream.scaladsl + +import scala.concurrent.Await +import scala.concurrent.duration._ +import scala.concurrent.forkjoin.ThreadLocalRandom +import akka.stream.{ OverflowStrategy, ActorMaterializer, ActorMaterializerSettings } +import akka.stream.testkit._ + +class FlowBatchSpec extends AkkaSpec { + + val settings = ActorMaterializerSettings(system) + .withInputBuffer(initialSize = 2, maxSize = 2) + + implicit val materializer = ActorMaterializer(settings) + + "Batch" must { + + "pass-through elements unchanged when there is no rate difference" in { + val publisher = TestPublisher.probe[Int]() + val subscriber = TestSubscriber.manualProbe[Int]() + + Source.fromPublisher(publisher).batch(max = 2, seed = i ⇒ i)(aggregate = _ + _).to(Sink.fromSubscriber(subscriber)).run() + val sub = subscriber.expectSubscription() + + for (i ← 1 to 100) { + sub.request(1) + publisher.sendNext(i) + subscriber.expectNext(i) + } + + sub.cancel() + } + + "aggregate elements while downstream is silent" in { + val publisher = TestPublisher.probe[Int]() + val subscriber = TestSubscriber.manualProbe[List[Int]]() + + Source.fromPublisher(publisher).batch(max = Long.MaxValue, seed = i ⇒ List(i))(aggregate = (ints, i) ⇒ i :: ints).to(Sink.fromSubscriber(subscriber)).run() + val sub = subscriber.expectSubscription() + + for (i ← 1 to 10) { + publisher.sendNext(i) + } + subscriber.expectNoMsg(1.second) + sub.request(1) + subscriber.expectNext(List(10, 9, 8, 7, 6, 5, 4, 3, 2, 1)) + sub.cancel() + } + + "work on a variable rate chain" in { + val future = Source(1 to 1000) + .batch(max = 100, seed = i ⇒ i)(aggregate = (sum, i) ⇒ sum + i) + .map { i ⇒ if (ThreadLocalRandom.current().nextBoolean()) Thread.sleep(10); i } + .runFold(0)(_ + _) + Await.result(future, 10.seconds) should be(500500) + } + + "backpressure subscriber when upstream is slower" in { + val publisher = TestPublisher.probe[Int]() + val subscriber = TestSubscriber.manualProbe[Int]() + + Source.fromPublisher(publisher).batch(max = 2, seed = i ⇒ i)(aggregate = _ + _).to(Sink.fromSubscriber(subscriber)).run() + val sub = subscriber.expectSubscription() + + sub.request(1) + publisher.sendNext(1) + subscriber.expectNext(1) + + sub.request(1) + subscriber.expectNoMsg(500.millis) + publisher.sendNext(2) + subscriber.expectNext(2) + + publisher.sendNext(3) + publisher.sendNext(4) + // The request can be in race with the above onNext(4) so the result would be either 3 or 7. + subscriber.expectNoMsg(500.millis) + sub.request(1) + subscriber.expectNext(7) + + sub.request(1) + subscriber.expectNoMsg(500.millis) + sub.cancel() + + } + + "work with a buffer and fold" in { + val future = Source(1 to 50) + .batch(max = Long.MaxValue, seed = i ⇒ i)(aggregate = _ + _) + .buffer(50, OverflowStrategy.backpressure) + .runFold(0)(_ + _) + Await.result(future, 3.seconds) should be((1 to 50).sum) + } + + } +} diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowBatchWeightedSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowBatchWeightedSpec.scala new file mode 100644 index 0000000000..05e3f8ff4a --- /dev/null +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowBatchWeightedSpec.scala @@ -0,0 +1,41 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ +package akka.stream.scaladsl + +import akka.stream.{ ActorMaterializer, ActorMaterializerSettings } +import akka.stream.testkit._ +import scala.concurrent.duration._ + +class FlowBatchWeightedSpec extends AkkaSpec { + + val settings = ActorMaterializerSettings(system) + .withInputBuffer(initialSize = 2, maxSize = 2) + + implicit val materializer = ActorMaterializer(settings) + + "BatchWeighted" must { + "Not aggregate heavy elements" in { + val publisher = TestPublisher.probe[Int]() + val subscriber = TestSubscriber.manualProbe[Int]() + + Source.fromPublisher(publisher).batchWeighted(max = 3, _ ⇒ 4, seed = i ⇒ i)(aggregate = _ + _).to(Sink.fromSubscriber(subscriber)).run() + val sub = subscriber.expectSubscription() + + publisher.sendNext(1) + publisher.sendNext(2) + + sub.request(1) + subscriber.expectNext(1) + + publisher.sendNext(3) + subscriber.expectNoMsg(1.second) + + sub.request(2) + subscriber.expectNext(2) + subscriber.expectNext(3) + + sub.cancel() + } + } +} diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowBufferSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowBufferSpec.scala index 283a907360..49833d8db5 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowBufferSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowBufferSpec.scala @@ -1,13 +1,12 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.scaladsl import scala.concurrent.Await import scala.concurrent.Future import scala.concurrent.duration._ -import akka.stream.{ ActorMaterializer, ActorMaterializerSettings, OverflowStrategy } -import akka.stream.OverflowStrategy.Fail.BufferOverflowException +import akka.stream.{ BufferOverflowException, ActorMaterializer, ActorMaterializerSettings, OverflowStrategy } import akka.stream.testkit._ import akka.stream.testkit.scaladsl._ import akka.stream.testkit.Utils._ diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowCollectSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowCollectSpec.scala index 0fb01a6ca3..a93e6dd04c 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowCollectSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowCollectSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowCompileSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowCompileSpec.scala index d470f268d7..a979f68a19 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowCompileSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowCompileSpec.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl +import akka.NotUsed import org.reactivestreams.Publisher import scala.collection.immutable.Seq @@ -111,19 +112,19 @@ class FlowCompileSpec extends AkkaSpec { "FlowOps" should { "be extensible" in { - val f: FlowOps[Int, Unit] { type Closed = Sink[Int, Unit] } = Flow[Int] + val f: FlowOps[Int, NotUsed] { type Closed = Sink[Int, NotUsed] } = Flow[Int] val fm = f.map(identity) - val f2: FlowOps[Int, Unit] = fm - val s: Sink[Int, Unit] = fm.to(Sink.ignore) + val f2: FlowOps[Int, NotUsed] = fm + val s: Sink[Int, NotUsed] = fm.to(Sink.ignore) } "be extensible (with MaterializedValue)" in { - val f: FlowOpsMat[Int, Unit] { type ClosedMat[+M] = Sink[Int, M] } = Flow[Int] + val f: FlowOpsMat[Int, NotUsed] { type ClosedMat[+M] = Sink[Int, M] } = Flow[Int] val fm = f.map(identity).concatMat(Source.empty)(Keep.both) // this asserts only the FlowOpsMat part of the signature, but fm also carries the // CloseMat type without which `.to(sink)` does not work - val f2: FlowOpsMat[Int, (Unit, Unit)] = fm - val s: Sink[Int, (Unit, Unit)] = fm.to(Sink.ignore) + val f2: FlowOpsMat[Int, (NotUsed, NotUsed)] = fm + val s: Sink[Int, (NotUsed, NotUsed)] = fm.to(Sink.ignore) } } } diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowConcatAllSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowConcatAllSpec.scala index f853248862..9429fbed6e 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowConcatAllSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowConcatAllSpec.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.scaladsl +import akka.NotUsed import akka.stream.impl.ConstantFun import scala.concurrent.duration._ @@ -60,7 +61,7 @@ class FlowConcatAllSpec extends AkkaSpec { } "on onError on master stream cancel the current open substream and signal error" in assertAllStagesStopped { - val publisher = TestPublisher.manualProbe[Source[Int, Unit]]() + val publisher = TestPublisher.manualProbe[Source[Int, NotUsed]]() val subscriber = TestSubscriber.manualProbe[Int]() Source.fromPublisher(publisher).flatMapConcat(ConstantFun.scalaIdentityFunction).to(Sink.fromSubscriber(subscriber)).run() @@ -80,7 +81,7 @@ class FlowConcatAllSpec extends AkkaSpec { } "on onError on master stream cancel the currently opening substream and signal error" in assertAllStagesStopped { - val publisher = TestPublisher.manualProbe[Source[Int, Unit]]() + val publisher = TestPublisher.manualProbe[Source[Int, NotUsed]]() val subscriber = TestSubscriber.manualProbe[Int]() Source.fromPublisher(publisher).flatMapConcat(ConstantFun.scalaIdentityFunction).to(Sink.fromSubscriber(subscriber)).run() @@ -120,7 +121,7 @@ class FlowConcatAllSpec extends AkkaSpec { } "on onError on open substream, cancel the master stream and signal error " in assertAllStagesStopped { - val publisher = TestPublisher.manualProbe[Source[Int, Unit]]() + val publisher = TestPublisher.manualProbe[Source[Int, NotUsed]]() val subscriber = TestSubscriber.manualProbe[Int]() Source.fromPublisher(publisher).flatMapConcat(ConstantFun.scalaIdentityFunction).to(Sink.fromSubscriber(subscriber)).run() @@ -140,7 +141,7 @@ class FlowConcatAllSpec extends AkkaSpec { } "on cancellation cancel the current open substream and the master stream" in assertAllStagesStopped { - val publisher = TestPublisher.manualProbe[Source[Int, Unit]]() + val publisher = TestPublisher.manualProbe[Source[Int, NotUsed]]() val subscriber = TestSubscriber.manualProbe[Int]() Source.fromPublisher(publisher).flatMapConcat(ConstantFun.scalaIdentityFunction).to(Sink.fromSubscriber(subscriber)).run() @@ -161,7 +162,7 @@ class FlowConcatAllSpec extends AkkaSpec { } "on cancellation cancel the currently opening substream and the master stream" in assertAllStagesStopped { - val publisher = TestPublisher.manualProbe[Source[Int, Unit]]() + val publisher = TestPublisher.manualProbe[Source[Int, NotUsed]]() val subscriber = TestSubscriber.manualProbe[Int]() Source.fromPublisher(publisher).flatMapConcat(ConstantFun.scalaIdentityFunction).to(Sink.fromSubscriber(subscriber)).run() @@ -184,11 +185,11 @@ class FlowConcatAllSpec extends AkkaSpec { } "pass along early cancellation" in assertAllStagesStopped { - val up = TestPublisher.manualProbe[Source[Int, Unit]]() + val up = TestPublisher.manualProbe[Source[Int, NotUsed]]() val down = TestSubscriber.manualProbe[Int]() val flowSubscriber = Source - .asSubscriber[Source[Int, Unit]] + .asSubscriber[Source[Int, NotUsed]] .flatMapConcat(ConstantFun.scalaIdentityFunction) .to(Sink.fromSubscriber(down)) .run() diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowConcatSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowConcatSpec.scala index 5427f41462..cf9a979456 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowConcatSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowConcatSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl @@ -10,6 +10,7 @@ import org.reactivestreams.Publisher import scala.concurrent.duration._ import scala.concurrent.{ Await, Promise } +import akka.NotUsed class FlowConcatSpec extends BaseTwoStreamsSetup { @@ -97,7 +98,7 @@ class FlowConcatSpec extends BaseTwoStreamsSetup { subscriber.expectSubscription().request(5) val errorSignalled = (1 to 4).foldLeft(false)((errorSignalled, e) ⇒ - if (!errorSignalled) subscriber.expectNextOrError(1, TestException).isLeft else true) + if (!errorSignalled) subscriber.expectNextOrError(e, TestException).isLeft else true) if (!errorSignalled) subscriber.expectSubscriptionAndError(TestException) } @@ -111,7 +112,7 @@ class FlowConcatSpec extends BaseTwoStreamsSetup { subscriber.expectSubscription().request(5) val errorSignalled = (1 to 4).foldLeft(false)((errorSignalled, e) ⇒ - if (!errorSignalled) subscriber.expectNextOrError(1, TestException).isLeft else true) + if (!errorSignalled) subscriber.expectNextOrError(e, TestException).isLeft else true) if (!errorSignalled) subscriber.expectSubscriptionAndError(TestException) } @@ -133,21 +134,22 @@ class FlowConcatSpec extends BaseTwoStreamsSetup { val runnable = testSource.toMat(Sink.ignore)(Keep.left) val (m1, m2) = runnable.run() - m1.isInstanceOf[Unit] should be(true) - m2.isInstanceOf[Unit] should be(true) + m1.isInstanceOf[NotUsed] should be(true) + m2.isInstanceOf[NotUsed] should be(true) runnable.mapMaterializedValue((_) ⇒ "boo").run() should be("boo") } "work with Flow DSL" in { - val testFlow = Flow[Int].concatMat(Source(6 to 10))(Keep.both).grouped(1000) + val testFlow: Flow[Int, Seq[Int], (NotUsed, NotUsed)] = Flow[Int].concatMat(Source(6 to 10))(Keep.both).grouped(1000) Await.result(Source(1 to 5).viaMat(testFlow)(Keep.both).runWith(Sink.head), 3.seconds) should ===(1 to 10) val runnable = Source(1 to 5).viaMat(testFlow)(Keep.both).to(Sink.ignore) - val (m1, (m2, m3)) = runnable.run() - m1.isInstanceOf[Unit] should be(true) - m2.isInstanceOf[Unit] should be(true) - m3.isInstanceOf[Unit] should be(true) + val x = runnable.run() + val (m1, (m2, m3)) = x + m1.isInstanceOf[NotUsed] should be(true) + m2.isInstanceOf[NotUsed] should be(true) + m3.isInstanceOf[NotUsed] should be(true) runnable.mapMaterializedValue((_) ⇒ "boo").run() should be("boo") } @@ -158,9 +160,9 @@ class FlowConcatSpec extends BaseTwoStreamsSetup { val sink = testFlow.concatMat(Source(1 to 5))(Keep.both).to(Sink.ignore).mapMaterializedValue[String] { case ((m1, m2), m3) ⇒ - m1.isInstanceOf[Unit] should be(true) - m2.isInstanceOf[Unit] should be(true) - m3.isInstanceOf[Unit] should be(true) + m1.isInstanceOf[NotUsed] should be(true) + m2.isInstanceOf[NotUsed] should be(true) + m3.isInstanceOf[NotUsed] should be(true) "boo" } Source(10 to 15).runWith(sink) should be("boo") diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowConflateSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowConflateSpec.scala index 3f9304608b..6a463ef01b 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowConflateSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowConflateSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDelaySpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDelaySpec.scala index 0cb262bbd9..907f6e696e 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDelaySpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDelaySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl @@ -7,7 +7,7 @@ import akka.stream.Attributes._ import akka.stream.testkit.Utils._ import akka.stream.testkit.scaladsl.TestSink import akka.stream.testkit.{ AkkaSpec, TestPublisher, TestSubscriber } -import akka.stream.{ DelayOverflowStrategy, ActorMaterializer } +import akka.stream.{ BufferOverflowException, DelayOverflowStrategy, ActorMaterializer } import scala.concurrent.Await import scala.concurrent.duration._ @@ -103,7 +103,7 @@ class FlowDelaySpec extends AkkaSpec { .withAttributes(inputBuffer(16, 16)) .runWith(TestSink.probe[Int]) .request(100) - .expectError(new DelayOverflowStrategy.Fail.BufferOverflowException("Buffer overflow for delay combinator (max capacity was: 16)!")) + .expectError(new BufferOverflowException("Buffer overflow for delay combinator (max capacity was: 16)!")) } diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDetacherSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDetacherSpec.scala index ff65b991e7..fb2970bb8e 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDetacherSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDetacherSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDispatcherSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDispatcherSpec.scala index 4953a7c396..8d22cfb59d 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDispatcherSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDispatcherSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl @@ -8,7 +8,7 @@ import akka.stream.testkit.AkkaSpec import akka.stream.ActorMaterializer import akka.stream.ActorMaterializerSettings -class FlowDispatcherSpec extends AkkaSpec("my-dispatcher = ${akka.test.stream-dispatcher}") { +class FlowDispatcherSpec extends AkkaSpec(s"my-dispatcher = $${akka.test.stream-dispatcher}") { val defaultSettings = ActorMaterializerSettings(system) diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDropSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDropSpec.scala index af7d4b9559..a9818a6c9e 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDropSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDropSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDropWhileSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDropWhileSpec.scala index a790aa434a..ccc87dfe89 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDropWhileSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDropWhileSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDropWithinSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDropWithinSpec.scala index 8743cba05f..6c78e738fd 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDropWithinSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDropWithinSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowExpandSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowExpandSpec.scala index 2dbab7fb8b..585019bfab 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowExpandSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowExpandSpec.scala @@ -1,15 +1,15 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.scaladsl import scala.concurrent.Await import scala.concurrent.duration._ import scala.concurrent.forkjoin.ThreadLocalRandom - import akka.stream.{ ActorMaterializer, ActorMaterializerSettings } - import akka.stream.testkit._ +import akka.stream.testkit.scaladsl.TestSource +import akka.stream.testkit.scaladsl.TestSink class FlowExpandSpec extends AkkaSpec { @@ -28,7 +28,7 @@ class FlowExpandSpec extends AkkaSpec { val subscriber = TestSubscriber.probe[Int]() // Simply repeat the last element as an extrapolation step - Source.fromPublisher(publisher).expand(seed = i ⇒ i)(extrapolate = i ⇒ (i, i)).to(Sink.fromSubscriber(subscriber)).run() + Source.fromPublisher(publisher).expand(Iterator.continually(_)).to(Sink.fromSubscriber(subscriber)).run() for (i ← 1 to 100) { // Order is important here: If the request comes first it will be extrapolated! @@ -44,7 +44,7 @@ class FlowExpandSpec extends AkkaSpec { val subscriber = TestSubscriber.probe[Int]() // Simply repeat the last element as an extrapolation step - Source.fromPublisher(publisher).expand(seed = i ⇒ i)(extrapolate = i ⇒ (i, i)).to(Sink.fromSubscriber(subscriber)).run() + Source.fromPublisher(publisher).expand(Iterator.continually(_)).to(Sink.fromSubscriber(subscriber)).run() publisher.sendNext(42) @@ -66,7 +66,7 @@ class FlowExpandSpec extends AkkaSpec { val subscriber = TestSubscriber.probe[Int]() // Simply repeat the last element as an extrapolation step - Source.fromPublisher(publisher).expand(seed = i ⇒ i)(extrapolate = i ⇒ (i, i)).to(Sink.fromSubscriber(subscriber)).run() + Source.fromPublisher(publisher).expand(Iterator.continually(_)).to(Sink.fromSubscriber(subscriber)).run() publisher.sendNext(1) subscriber.requestNext(1) @@ -84,7 +84,7 @@ class FlowExpandSpec extends AkkaSpec { "work on a variable rate chain" in { val future = Source(1 to 100) .map { i ⇒ if (ThreadLocalRandom.current().nextBoolean()) Thread.sleep(10); i } - .expand(seed = i ⇒ i)(extrapolate = i ⇒ (i, i)) + .expand(Iterator.continually(_)) .runFold(Set.empty[Int])(_ + _) Await.result(future, 10.seconds) should contain theSameElementsAs (1 to 100).toSet @@ -94,7 +94,7 @@ class FlowExpandSpec extends AkkaSpec { val publisher = TestPublisher.probe[Int]() val subscriber = TestSubscriber.probe[Int]() - Source.fromPublisher(publisher).expand(seed = i ⇒ i)(extrapolate = i ⇒ (i, i)).to(Sink.fromSubscriber(subscriber)).run() + Source.fromPublisher(publisher).expand(Iterator.continually(_)).to(Sink.fromSubscriber(subscriber)).run() publisher.sendNext(1) subscriber.requestNext(1) @@ -125,6 +125,26 @@ class FlowExpandSpec extends AkkaSpec { publisher.expectRequest() } + + "work properly with finite extrapolations" in { + val (source, sink) = + TestSource.probe[Int] + .expand(i ⇒ Iterator.from(0).map(i -> _).take(3)) + .toMat(TestSink.probe)(Keep.both) + .run() + source + .sendNext(1) + sink + .request(4) + .expectNext(1 -> 0, 1 -> 1, 1 -> 2) + .expectNoMsg(100.millis) + source + .sendNext(2) + .sendComplete() + sink + .expectNext(2 -> 0) + .expectComplete() + } } } diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFilterSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFilterSpec.scala index 56bd98e39f..a06c5f1bae 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFilterSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFilterSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFlattenMergeSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFlattenMergeSpec.scala index 97c029edd8..2afc8f6505 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFlattenMergeSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFlattenMergeSpec.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl +import akka.NotUsed import akka.stream.testkit.AkkaSpec import akka.stream.ActorMaterializer import scala.concurrent._ @@ -97,7 +98,7 @@ class FlowFlattenMergeSpec extends AkkaSpec with ScalaFutures with ConversionChe "cancel substreams when failing from main stream" in { val p1, p2 = TestPublisher.probe[Int]() val ex = new Exception("buh") - val p = Promise[Source[Int, Unit]] + val p = Promise[Source[Int, NotUsed]] (Source(List(Source.fromPublisher(p1), Source.fromPublisher(p2))) ++ Source.fromFuture(p.future)) .flatMapMerge(5, identity) .runWith(Sink.head) diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFoldSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFoldSpec.scala index 99043036f6..f63671ffac 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFoldSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFoldSpec.scala @@ -1,8 +1,10 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl +import akka.NotUsed + import scala.concurrent.Await import scala.util.control.NoStackTrace @@ -44,7 +46,7 @@ class FlowFoldSpec extends AkkaSpec { "propagate an error" in assertAllStagesStopped { val error = new Exception with NoStackTrace - val future = inputSource.map(x ⇒ if (x > 50) throw error else x).runFold(())(Keep.none) + val future = inputSource.map(x ⇒ if (x > 50) throw error else x).runFold[NotUsed](NotUsed)(Keep.none) the[Exception] thrownBy Await.result(future, 3.seconds) should be(error) } diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowForeachSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowForeachSpec.scala index 053bb4e320..1db3937ed4 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowForeachSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowForeachSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFromFutureSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFromFutureSpec.scala index 96e56fd5dc..5edfae2a11 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFromFutureSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFromFutureSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowGraphCompileSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowGraphCompileSpec.scala index 2f9645137c..daa12d7101 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowGraphCompileSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowGraphCompileSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowGroupBySpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowGroupBySpec.scala index f58095f745..8402b2f540 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowGroupBySpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowGroupBySpec.scala @@ -1,8 +1,10 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.scaladsl +import akka.NotUsed + import scala.concurrent.duration._ import scala.util.control.NoStackTrace import akka.stream.ActorMaterializer @@ -211,7 +213,7 @@ class FlowGroupBySpec extends AkkaSpec with ScalaFutures with ConversionCheckedT .groupBy(2, elem ⇒ if (elem == 2) throw exc else elem % 2) .lift(_ % 2) .runWith(Sink.asPublisher(false)) - val subscriber = TestSubscriber.manualProbe[(Int, Source[Int, Unit])]() + val subscriber = TestSubscriber.manualProbe[(Int, Source[Int, NotUsed])]() publisher.subscribe(subscriber) val upstreamSubscription = publisherProbeProbe.expectSubscription() @@ -242,7 +244,7 @@ class FlowGroupBySpec extends AkkaSpec with ScalaFutures with ConversionCheckedT .lift(_ % 2) .withAttributes(ActorAttributes.supervisionStrategy(resumingDecider)) .runWith(Sink.asPublisher(false)) - val subscriber = TestSubscriber.manualProbe[(Int, Source[Int, Unit])]() + val subscriber = TestSubscriber.manualProbe[(Int, Source[Int, NotUsed])]() publisher.subscribe(subscriber) val upstreamSubscription = publisherProbeProbe.expectSubscription() @@ -279,7 +281,7 @@ class FlowGroupBySpec extends AkkaSpec with ScalaFutures with ConversionCheckedT "pass along early cancellation" in assertAllStagesStopped { val up = TestPublisher.manualProbe[Int]() - val down = TestSubscriber.manualProbe[(Int, Source[Int, Unit])]() + val down = TestSubscriber.manualProbe[(Int, Source[Int, NotUsed])]() val flowSubscriber = Source.asSubscriber[Int].groupBy(2, _ % 2).lift(_ % 2).to(Sink.fromSubscriber(down)).run() diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowGroupedSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowGroupedSpec.scala index 8a1d15c2a2..458571cea3 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowGroupedSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowGroupedSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowGroupedWithinSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowGroupedWithinSpec.scala index c39911d7a8..59ffc16574 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowGroupedWithinSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowGroupedWithinSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowInterleaveSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowInterleaveSpec.scala index 913e64479f..6ae1253fa0 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowInterleaveSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowInterleaveSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl @@ -112,8 +112,9 @@ class FlowInterleaveSpec extends BaseTwoStreamsSetup { val subscriber2 = setup(nonemptyPublisher(1 to 4), failedPublisher) val subscription2 = subscriber2.expectSubscription() subscription2.request(4) - subscriber2.expectError(TestException) - + subscriber2.expectNextOrError(1, TestException).isLeft || + subscriber2.expectNextOrError(2, TestException).isLeft || + { subscriber2.expectError(TestException); true } } "work with one delayed failed and one nonempty publisher" in { diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowIntersperseSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowIntersperseSpec.scala index fa25b1644f..e360059884 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowIntersperseSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowIntersperseSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowIteratorSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowIteratorSpec.scala index 266503cc90..75ade88c1b 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowIteratorSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowIteratorSpec.scala @@ -1,8 +1,10 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl +import akka.NotUsed + import scala.collection.immutable import scala.concurrent.duration._ import akka.stream.ActorMaterializer @@ -15,13 +17,13 @@ import org.reactivestreams.Subscriber class FlowIteratorSpec extends AbstractFlowIteratorSpec { override def testName = "A Flow based on an iterator producing function" - override def createSource(elements: Int): Source[Int, Unit] = + override def createSource(elements: Int): Source[Int, NotUsed] = Source.fromIterator(() ⇒ (1 to elements).iterator) } class FlowIterableSpec extends AbstractFlowIteratorSpec { override def testName = "A Flow based on an iterable" - override def createSource(elements: Int): Source[Int, Unit] = + override def createSource(elements: Int): Source[Int, NotUsed] = Source(1 to elements) implicit def mmaterializer = super.materializer @@ -80,7 +82,7 @@ abstract class AbstractFlowIteratorSpec extends AkkaSpec { def testName: String - def createSource(elements: Int): Source[Int, Unit] + def createSource(elements: Int): Source[Int, NotUsed] testName must { "produce elements" in assertAllStagesStopped { diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowJoinSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowJoinSpec.scala index 061826c017..81f5e51a8d 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowJoinSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowJoinSpec.scala @@ -1,15 +1,20 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl -import akka.stream.{ FlowShape, ActorMaterializer, ActorMaterializerSettings } +import akka.stream.{ FlowShape, ActorMaterializer, ActorMaterializerSettings, OverflowStrategy } +import akka.stream.impl.fusing.GraphStages.Detacher import akka.stream.testkit._ +import akka.stream.testkit.Utils._ +import akka.stream.testkit.scaladsl._ import com.typesafe.config.ConfigFactory +import org.scalatest.concurrent.ScalaFutures +import org.scalatest.time._ + +import scala.collection.immutable import scala.concurrent.Await import scala.concurrent.duration._ -import akka.stream.OverflowStrategy -import org.scalatest.concurrent.ScalaFutures class FlowJoinSpec extends AkkaSpec(ConfigFactory.parseString("akka.loglevel=INFO")) with ScalaFutures { @@ -18,8 +23,11 @@ class FlowJoinSpec extends AkkaSpec(ConfigFactory.parseString("akka.loglevel=INF implicit val materializer = ActorMaterializer(settings) + implicit val defaultPatience = + PatienceConfig(timeout = Span(2, Seconds), interval = Span(200, Millis)) + "A Flow using join" must { - "allow for cycles" in { + "allow for cycles" in assertAllStagesStopped { val end = 47 val (even, odd) = (0 to end).partition(_ % 2 == 0) val result = Set() ++ even ++ odd ++ odd.map(_ * 10) @@ -51,14 +59,103 @@ class FlowJoinSpec extends AkkaSpec(ConfigFactory.parseString("akka.loglevel=INF sub.cancel() } - "propagate one element" in { + "allow for merge cycle" in assertAllStagesStopped { val source = Source.single("lonely traveler") val flow1 = Flow.fromGraph(GraphDSL.create(Sink.head[String]) { implicit b ⇒ sink ⇒ import GraphDSL.Implicits._ val merge = b.add(Merge[String](2)) - val broadcast = b.add(Broadcast[String](2)) + val broadcast = b.add(Broadcast[String](2, eagerCancel = true)) + source ~> merge.in(0) + merge.out ~> broadcast.in + broadcast.out(0) ~> sink + + FlowShape(merge.in(1), broadcast.out(1)) + }) + + whenReady(flow1.join(Flow[String]).run())(_ shouldBe "lonely traveler") + } + + "allow for merge preferred cycle" in assertAllStagesStopped { + val source = Source.single("lonely traveler") + + val flow1 = Flow.fromGraph(GraphDSL.create(Sink.head[String]) { implicit b ⇒ + sink ⇒ + import GraphDSL.Implicits._ + val merge = b.add(MergePreferred[String](1)) + val broadcast = b.add(Broadcast[String](2, eagerCancel = true)) + source ~> merge.preferred + merge.out ~> broadcast.in + broadcast.out(0) ~> sink + + FlowShape(merge.in(0), broadcast.out(1)) + }) + + whenReady(flow1.join(Flow[String]).run())(_ shouldBe "lonely traveler") + } + + "allow for zip cycle" in assertAllStagesStopped { + val source = Source(immutable.Seq("traveler1", "traveler2")) + + val flow = Flow.fromGraph(GraphDSL.create(TestSink.probe[(String, String)]) { implicit b ⇒ + sink ⇒ + import GraphDSL.Implicits._ + val zip = b.add(Zip[String, String]) + val broadcast = b.add(Broadcast[(String, String)](2)) + source ~> zip.in0 + zip.out ~> broadcast.in + broadcast.out(0) ~> sink + + FlowShape(zip.in1, broadcast.out(1)) + }) + + val feedback = Flow.fromGraph(GraphDSL.create(Source.single("ignition")) { implicit b ⇒ + ignition ⇒ + import GraphDSL.Implicits._ + val flow = b.add(Flow[(String, String)].map(_._1)) + val merge = b.add(Merge[String](2)) + + ignition ~> merge.in(0) + flow ~> merge.in(1) + + FlowShape(flow.in, merge.out) + }) + + val probe = flow.join(feedback).run() + probe.requestNext(("traveler1", "ignition")) + probe.requestNext(("traveler2", "traveler1")) + } + + "allow for concat cycle" in assertAllStagesStopped { + val flow = Flow.fromGraph(GraphDSL.create(TestSource.probe[String](system), Sink.head[String])(Keep.both) { implicit b ⇒ + (source, sink) ⇒ + import GraphDSL.Implicits._ + val concat = b.add(Concat[String](2)) + val broadcast = b.add(Broadcast[String](2, eagerCancel = true)) + source ~> concat.in(0) + concat.out ~> broadcast.in + broadcast.out(0) ~> sink + + FlowShape(concat.in(1), broadcast.out(1)) + }) + + val (probe, result) = flow.join(Flow[String]).run() + probe.sendNext("lonely traveler") + whenReady(result) { r ⇒ + r shouldBe "lonely traveler" + probe.sendComplete() + } + } + + "allow for interleave cycle" in assertAllStagesStopped { + val source = Source.single("lonely traveler") + + val flow1 = Flow.fromGraph(GraphDSL.create(Sink.head[String]) { implicit b ⇒ + sink ⇒ + import GraphDSL.Implicits._ + val merge = b.add(Interleave[String](2, 1)) + val broadcast = b.add(Broadcast[String](2, eagerCancel = true)) source ~> merge.in(0) merge.out ~> broadcast.in broadcast.out(0) ~> sink diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowLimitSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowLimitSpec.scala index 49addb91e8..e37015ada8 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowLimitSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowLimitSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowLimitWeightedSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowLimitWeightedSpec.scala index 8371a36331..5e74421b6a 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowLimitWeightedSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowLimitWeightedSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowLogSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowLogSpec.scala index de61ac35ea..69dccbc008 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowLogSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowLogSpec.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl +import akka.NotUsed import akka.event.{ DummyClassForStringSources, Logging } import akka.stream.ActorAttributes._ import akka.stream.Attributes.LogLevels @@ -64,7 +65,7 @@ class FlowLogSpec extends AkkaSpec("akka.loglevel = DEBUG") with ScriptedTest { "debug each element" in { val log = Logging(system, "com.example.ImportantLogger") - val debugging: javadsl.Flow[Integer, Integer, Unit] = javadsl.Flow.of(classOf[Integer]) + val debugging: javadsl.Flow[Integer, Integer, NotUsed] = javadsl.Flow.of(classOf[Integer]) .log("log-1") .log("log-2", new akka.japi.function.Function[Integer, Integer] { def apply(i: Integer) = i }) .log("log-3", new akka.japi.function.Function[Integer, Integer] { def apply(i: Integer) = i }, log) diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMapAsyncSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMapAsyncSpec.scala index 8e1d6e3b94..612b9eb845 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMapAsyncSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMapAsyncSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMapAsyncUnorderedSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMapAsyncUnorderedSpec.scala index 89ec80f447..c0c01c60f7 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMapAsyncUnorderedSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMapAsyncUnorderedSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMapConcatSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMapConcatSpec.scala index 13a9b7c5e6..7156dfaba6 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMapConcatSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMapConcatSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMapSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMapSpec.scala index 505b61d271..43035f0a43 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMapSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMapSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMergeSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMergeSpec.scala index cc0864f0cc..20ef661b57 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMergeSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowMergeSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowOnCompleteSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowOnCompleteSpec.scala index b9f8176f7d..90e3caab60 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowOnCompleteSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowOnCompleteSpec.scala @@ -1,8 +1,10 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl +import akka.Done + import scala.concurrent.duration._ import scala.util.{ Failure, Success } import scala.util.control.NoStackTrace @@ -31,7 +33,7 @@ class FlowOnCompleteSpec extends AkkaSpec with ScriptedTest { proc.sendNext(42) onCompleteProbe.expectNoMsg(100.millis) proc.sendComplete() - onCompleteProbe.expectMsg(Success(())) + onCompleteProbe.expectMsg(Success(Done)) } "yield the first error" in assertAllStagesStopped { @@ -53,7 +55,7 @@ class FlowOnCompleteSpec extends AkkaSpec with ScriptedTest { val proc = p.expectSubscription proc.expectRequest() proc.sendComplete() - onCompleteProbe.expectMsg(Success(())) + onCompleteProbe.expectMsg(Success(Done)) onCompleteProbe.expectNoMsg(100.millis) } @@ -75,7 +77,7 @@ class FlowOnCompleteSpec extends AkkaSpec with ScriptedTest { proc.sendComplete() onCompleteProbe.expectMsg("map-42") onCompleteProbe.expectMsg("foreach-42") - onCompleteProbe.expectMsg(Success(())) + onCompleteProbe.expectMsg(Success(Done)) } } diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowPrefixAndTailSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowPrefixAndTailSpec.scala index 753550e093..28f3286ecb 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowPrefixAndTailSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowPrefixAndTailSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowRecoverSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowRecoverSpec.scala index ed9f543099..2815e89429 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowRecoverSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowRecoverSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowReduceSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowReduceSpec.scala new file mode 100644 index 0000000000..5b32c4db4c --- /dev/null +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowReduceSpec.scala @@ -0,0 +1,59 @@ +/** + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package akka.stream.scaladsl + +import scala.concurrent.Await +import scala.util.control.NoStackTrace + +import akka.stream.ActorMaterializer +import akka.stream.testkit.AkkaSpec +import akka.stream.testkit.Utils._ +import scala.concurrent.duration._ + +class FlowReduceSpec extends AkkaSpec { + implicit val materializer = ActorMaterializer() + + "A Reduce" must { + val input = 1 to 100 + val expected = input.sum + val inputSource = Source(input).filter(_ ⇒ true).map(identity) + val reduceSource = inputSource.reduce[Int](_ + _).filter(_ ⇒ true).map(identity) + val reduceFlow = Flow[Int].filter(_ ⇒ true).map(identity).reduce(_ + _).filter(_ ⇒ true).map(identity) + val reduceSink = Sink.reduce[Int](_ + _) + + "work when using Source.runReduce" in assertAllStagesStopped { + Await.result(inputSource.runReduce(_ + _), 3.seconds) should be(expected) + } + + "work when using Source.reduce" in assertAllStagesStopped { + Await.result(reduceSource runWith Sink.head, 3.seconds) should be(expected) + } + + "work when using Sink.reduce" in assertAllStagesStopped { + Await.result(inputSource runWith reduceSink, 3.seconds) should be(expected) + } + + "work when using Flow.reduce" in assertAllStagesStopped { + Await.result(inputSource via reduceFlow runWith Sink.head, 3.seconds) should be(expected) + } + + "work when using Source.reduce + Flow.reduce + Sink.reduce" in assertAllStagesStopped { + Await.result(reduceSource via reduceFlow runWith reduceSink, 3.seconds) should be(expected) + } + + "propagate an error" in assertAllStagesStopped { + val error = new Exception with NoStackTrace + val future = inputSource.map(x ⇒ if (x > 50) throw error else x).runReduce(Keep.none) + the[Exception] thrownBy Await.result(future, 3.seconds) should be(error) + } + + "complete future with failure when reducing function throws" in assertAllStagesStopped { + val error = new Exception with NoStackTrace + val future = inputSource.runReduce[Int]((x, y) ⇒ if (x > 50) throw error else x + y) + the[Exception] thrownBy Await.result(future, 3.seconds) should be(error) + } + + } + +} diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowScanSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowScanSpec.scala index 59229a821c..2d6bda805b 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowScanSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowScanSpec.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl +import akka.NotUsed import akka.stream.testkit.scaladsl.TestSink import akka.stream.{ ActorAttributes, ActorMaterializer, ActorMaterializerSettings, Supervision } import akka.stream.testkit.AkkaSpec @@ -22,7 +23,7 @@ class FlowScanSpec extends AkkaSpec { "A Scan" must { - def scan(s: Source[Int, Unit], duration: Duration = 5.seconds): immutable.Seq[Int] = + def scan(s: Source[Int, NotUsed], duration: Duration = 5.seconds): immutable.Seq[Int] = Await.result(s.scan(0)(_ + _).runFold(immutable.Seq.empty[Int])(_ :+ _), duration) "Scan" in assertAllStagesStopped { diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSectionSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSectionSpec.scala index 51e5d7a3a6..b69ead4164 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSectionSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSectionSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSlidingSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSlidingSpec.scala index 993c76930d..95114c21d9 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSlidingSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSlidingSpec.scala @@ -1,18 +1,20 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl +import akka.actor.Status.Failure import akka.stream.testkit.Utils._ import akka.stream.{ ActorMaterializer, ActorMaterializerSettings } import akka.stream.testkit._ import org.scalacheck.Gen +import org.scalatest.concurrent.ScalaFutures import org.scalatest.prop.GeneratorDrivenPropertyChecks import akka.pattern.pipe import scala.concurrent.Await -class FlowSlidingSpec extends AkkaSpec with GeneratorDrivenPropertyChecks { +class FlowSlidingSpec extends AkkaSpec with GeneratorDrivenPropertyChecks with ScalaFutures { import system.dispatcher val settings = ActorMaterializerSettings(system) .withInputBuffer(initialSize = 2, maxSize = 16) @@ -26,7 +28,7 @@ class FlowSlidingSpec extends AkkaSpec with GeneratorDrivenPropertyChecks { case (len, win, step) ⇒ val af = Source.fromIterator(() ⇒ Iterator.from(0).take(len)).sliding(win, step).runFold(Seq.empty[Seq[Int]])(_ :+ _) val cf = Source.fromIterator(() ⇒ Iterator.from(0).take(len).sliding(win, step)).runFold(Seq.empty[Seq[Int]])(_ :+ _) - Await.result(af, remaining) should be(Await.result(cf, remaining)) + af.futureValue should be(cf.futureValue) } "behave just like collections sliding with step < window" in assertAllStagesStopped { diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSpec.scala index a93f305c04..89fc631287 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSpec.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl +import akka.NotUsed import akka.actor._ import akka.stream.Supervision._ import akka.stream.impl._ @@ -39,8 +40,8 @@ class FlowSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.debug.rece implicit val materializer = ActorMaterializer(settings) - val identity: Flow[Any, Any, Unit] ⇒ Flow[Any, Any, Unit] = in ⇒ in.map(e ⇒ e) - val identity2: Flow[Any, Any, Unit] ⇒ Flow[Any, Any, Unit] = in ⇒ identity(in) + val identity: Flow[Any, Any, NotUsed] ⇒ Flow[Any, Any, NotUsed] = in ⇒ in.map(e ⇒ e) + val identity2: Flow[Any, Any, NotUsed] ⇒ Flow[Any, Any, NotUsed] = in ⇒ identity(in) class BrokenActorInterpreter(_shell: GraphInterpreterShell, brokenMessage: Any) extends ActorGraphInterpreter(_shell) { @@ -56,7 +57,7 @@ class FlowSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.debug.rece } } - val faultyFlow: Flow[Any, Any, Unit] ⇒ Flow[Any, Any, Unit] = in ⇒ in.via({ + val faultyFlow: Flow[Any, Any, NotUsed] ⇒ Flow[Any, Any, NotUsed] = in ⇒ in.via({ val stage = new PushPullGraphStage((_) ⇒ fusing.Map({ x: Any ⇒ x }, stoppingDecider), Attributes.none) val assembly = new GraphAssembly( @@ -299,11 +300,11 @@ class FlowSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.debug.rece "be covariant" in { val f1: Source[Fruit, _] = Source.fromIterator[Fruit](apples) val p1: Publisher[Fruit] = Source.fromIterator[Fruit](apples).runWith(Sink.asPublisher(false)) - val f2: SubFlow[Fruit, _, Source[Fruit, Unit]#Repr, _] = Source.fromIterator[Fruit](apples).splitWhen(_ ⇒ true) - val f3: SubFlow[Fruit, _, Source[Fruit, Unit]#Repr, _] = Source.fromIterator[Fruit](apples).groupBy(2, _ ⇒ true) + val f2: SubFlow[Fruit, _, Source[Fruit, NotUsed]#Repr, _] = Source.fromIterator[Fruit](apples).splitWhen(_ ⇒ true) + val f3: SubFlow[Fruit, _, Source[Fruit, NotUsed]#Repr, _] = Source.fromIterator[Fruit](apples).groupBy(2, _ ⇒ true) val f4: Source[(immutable.Seq[Fruit], Source[Fruit, _]), _] = Source.fromIterator[Fruit](apples).prefixAndTail(1) - val d1: SubFlow[Fruit, _, Flow[String, Fruit, Unit]#Repr, _] = Flow[String].map(_ ⇒ new Apple).splitWhen(_ ⇒ true) - val d2: SubFlow[Fruit, _, Flow[String, Fruit, Unit]#Repr, _] = Flow[String].map(_ ⇒ new Apple).groupBy(2, _ ⇒ true) + val d1: SubFlow[Fruit, _, Flow[String, Fruit, NotUsed]#Repr, _] = Flow[String].map(_ ⇒ new Apple).splitWhen(_ ⇒ true) + val d2: SubFlow[Fruit, _, Flow[String, Fruit, NotUsed]#Repr, _] = Flow[String].map(_ ⇒ new Apple).groupBy(2, _ ⇒ true) val d3: Flow[String, (immutable.Seq[Apple], Source[Fruit, _]), _] = Flow[String].map(_ ⇒ new Apple).prefixAndTail(1) } @@ -504,7 +505,7 @@ class FlowSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.debug.rece } "call future subscribers' onError should be called instead of onSubscribed after initial upstream reported an error" in { - new ChainSetup[Int, String, Unit](_.map(_ ⇒ throw TestException), settings.withInputBuffer(initialSize = 1, maxSize = 1), + new ChainSetup[Int, String, NotUsed](_.map(_ ⇒ throw TestException), settings.withInputBuffer(initialSize = 1, maxSize = 1), toFanoutPublisher(1)) { downstreamSubscription.request(1) upstreamSubscription.expectRequest(1) @@ -589,7 +590,7 @@ class FlowSpec extends AkkaSpec(ConfigFactory.parseString("akka.actor.debug.rece "suitably override attribute handling methods" in { import Attributes._ - val f: Flow[Int, Int, Unit] = Flow[Int].withAttributes(asyncBoundary).addAttributes(none).named("") + val f: Flow[Int, Int, NotUsed] = Flow[Int].withAttributes(asyncBoundary).addAttributes(none).named("") } } diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSplitAfterSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSplitAfterSpec.scala index ca14b2c9ee..5d40c82d66 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSplitAfterSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSplitAfterSpec.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.scaladsl +import akka.NotUsed import akka.stream.ActorMaterializer import akka.stream.ActorMaterializerSettings import akka.stream.ActorAttributes @@ -163,7 +164,7 @@ class FlowSplitAfterSpec extends AkkaSpec { .splitAfter(elem ⇒ if (elem == 3) throw exc else elem % 3 == 0) .lift .runWith(Sink.asPublisher(false)) - val subscriber = TestSubscriber.manualProbe[Source[Int, Unit]]() + val subscriber = TestSubscriber.manualProbe[Source[Int, NotUsed]]() publisher.subscribe(subscriber) val upstreamSubscription = publisherProbeProbe.expectSubscription() @@ -199,7 +200,7 @@ class FlowSplitAfterSpec extends AkkaSpec { .lift .withAttributes(ActorAttributes.supervisionStrategy(resumingDecider)) .runWith(Sink.asPublisher(false)) - val subscriber = TestSubscriber.manualProbe[Source[Int, Unit]]() + val subscriber = TestSubscriber.manualProbe[Source[Int, NotUsed]]() publisher.subscribe(subscriber) val upstreamSubscription = publisherProbeProbe.expectSubscription() @@ -241,7 +242,7 @@ class FlowSplitAfterSpec extends AkkaSpec { "pass along early cancellation" in assertAllStagesStopped { val up = TestPublisher.manualProbe[Int]() - val down = TestSubscriber.manualProbe[Source[Int, Unit]]() + val down = TestSubscriber.manualProbe[Source[Int, NotUsed]]() val flowSubscriber = Source.asSubscriber[Int].splitAfter(_ % 3 == 0).lift.to(Sink.fromSubscriber(down)).run() diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSplitWhenSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSplitWhenSpec.scala index dd5423e77f..fc03a5e159 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSplitWhenSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSplitWhenSpec.scala @@ -3,6 +3,7 @@ */ package akka.stream.scaladsl +import akka.NotUsed import akka.stream._ import akka.stream.Supervision.resumingDecider import akka.stream.impl.SubscriptionTimeoutException @@ -144,7 +145,7 @@ class FlowSplitWhenSpec extends AkkaSpec { substream.cancel() - masterStream.expectNext(()) + masterStream.expectNext(NotUsed) masterStream.expectNoMsg(100.millis) masterStream.cancel() inputs.expectCancellation() @@ -217,7 +218,7 @@ class FlowSplitWhenSpec extends AkkaSpec { .splitWhen(elem ⇒ if (elem == 3) throw exc else elem % 3 == 0) .lift .runWith(Sink.asPublisher(false)) - val subscriber = TestSubscriber.manualProbe[Source[Int, Unit]]() + val subscriber = TestSubscriber.manualProbe[Source[Int, NotUsed]]() publisher.subscribe(subscriber) val upstreamSubscription = publisherProbeProbe.expectSubscription() @@ -290,7 +291,7 @@ class FlowSplitWhenSpec extends AkkaSpec { .lift .withAttributes(ActorAttributes.supervisionStrategy(resumingDecider)) .runWith(Sink.asPublisher(false)) - val subscriber = TestSubscriber.manualProbe[Source[Int, Unit]]() + val subscriber = TestSubscriber.manualProbe[Source[Int, NotUsed]]() publisher.subscribe(subscriber) val upstreamSubscription = publisherProbeProbe.expectSubscription() @@ -330,7 +331,7 @@ class FlowSplitWhenSpec extends AkkaSpec { "pass along early cancellation" in assertAllStagesStopped { val up = TestPublisher.manualProbe[Int]() - val down = TestSubscriber.manualProbe[Source[Int, Unit]]() + val down = TestSubscriber.manualProbe[Source[Int, NotUsed]]() val flowSubscriber = Source.asSubscriber[Int].splitWhen(_ % 3 == 0).lift.to(Sink.fromSubscriber(down)).run() diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowStageSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowStageSpec.scala index 14de445e84..dc2f0ab09d 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowStageSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowStageSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSupervisionSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSupervisionSpec.scala index 89cee1716c..b826a2d0d3 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSupervisionSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSupervisionSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl @@ -13,6 +13,7 @@ import scala.concurrent.Await import akka.stream.Supervision import akka.stream.impl.ReactiveStreamsCompliance import akka.stream.ActorAttributes +import akka.NotUsed class FlowSupervisionSpec extends AkkaSpec { import ActorAttributes.supervisionStrategy @@ -23,7 +24,7 @@ class FlowSupervisionSpec extends AkkaSpec { val failingMap = Flow[Int].map(n ⇒ if (n == 3) throw exc else n) - def run(f: Flow[Int, Int, Unit]): immutable.Seq[Int] = + def run(f: Flow[Int, Int, NotUsed]): immutable.Seq[Int] = Await.result(Source((1 to 5).toSeq ++ (1 to 5)).via(f).grouped(1000).runWith(Sink.head), 3.seconds) "Stream supervision" must { diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowTakeSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowTakeSpec.scala index 95ce21d549..b22ae804c8 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowTakeSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowTakeSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowTakeWhileSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowTakeWhileSpec.scala index 30f73716c3..50ca9ae76f 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowTakeWhileSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowTakeWhileSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowTakeWithinSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowTakeWithinSpec.scala index ceab747a23..cd7769c2a7 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowTakeWithinSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowTakeWithinSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowThrottleSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowThrottleSpec.scala index d2159f4d55..b2b48386af 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowThrottleSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowThrottleSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowWatchTerminationSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowWatchTerminationSpec.scala new file mode 100644 index 0000000000..56423e017f --- /dev/null +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowWatchTerminationSpec.scala @@ -0,0 +1,74 @@ +/** + * Copyright (C) 2015 Typesafe Inc. + */ +package akka.stream.scaladsl + +import akka.actor.Status.Failure +import akka.Done +import akka.pattern.pipe +import akka.stream._ +import akka.stream.testkit.AkkaSpec +import akka.stream.testkit.Utils._ +import akka.stream.testkit.scaladsl.{ TestSink, TestSource } +import org.scalactic.ConversionCheckedTripleEquals +import org.scalatest.concurrent.ScalaFutures + +import scala.util.control.NoStackTrace +import scala.concurrent.duration._ + +class FlowWatchTerminationSpec extends AkkaSpec with ScalaFutures with ConversionCheckedTripleEquals { + + val settings = ActorMaterializerSettings(system) + + implicit val materializer = ActorMaterializer(settings) + implicit val patience = PatienceConfig(3.seconds) + + "A WatchTermination" must { + + "complete future when stream is completed" in assertAllStagesStopped { + val (future, p) = Source(1 to 4).watchTermination()(Keep.right).toMat(TestSink.probe[Int])(Keep.both).run() + p.request(4).expectNext(1, 2, 3, 4) + future.futureValue should ===(Done) + p.expectComplete() + } + + "complete future when stream is cancelled from downstream" in assertAllStagesStopped { + val (future, p) = Source(1 to 4).watchTermination()(Keep.right).toMat(TestSink.probe[Int])(Keep.both).run() + p.request(3).expectNext(1, 2, 3).cancel() + future.futureValue should ===(Done) + } + + "fail future when stream is failed" in assertAllStagesStopped { + val ex = new RuntimeException("Stream failed.") with NoStackTrace + val (p, future) = TestSource.probe[Int].watchTermination()(Keep.both).to(Sink.ignore).run() + p.sendNext(1) + p.sendError(ex) + whenReady(future.failed) { _ shouldBe (ex) } + } + + "complete the future for an empty stream" in assertAllStagesStopped { + val (future, p) = Source.empty[Int].watchTermination()(Keep.right).toMat(TestSink.probe[Int])(Keep.both).run() + p.request(1) + future.futureValue should ===(Done) + } + + "complete future for graph" in assertAllStagesStopped { + implicit val ec = system.dispatcher + + val ((sourceProbe, future), sinkProbe) = TestSource.probe[Int].watchTermination()(Keep.both).concat(Source(2 to 5)).toMat(TestSink.probe[Int])(Keep.both).run() + future.pipeTo(testActor) + sinkProbe.request(5) + sourceProbe.sendNext(1) + sinkProbe.expectNext(1) + expectNoMsg(300.millis) + + sourceProbe.sendComplete() + expectMsg(Done) + + sinkProbe.expectNextN(2 to 5) + .expectComplete() + } + + } + +} diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowZipSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowZipSpec.scala index ae3853a2ac..82ea3104b2 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowZipSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowZipSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowZipWithSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowZipWithSpec.scala index e961d1cf2d..6dd7130d00 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowZipWithSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowZipWithSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphBackedFlowSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphBackedFlowSpec.scala index 98682339ed..7e7598ea25 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphBackedFlowSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphBackedFlowSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphBalanceSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphBalanceSpec.scala index 4cae284754..a855286c87 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphBalanceSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphBalanceSpec.scala @@ -4,7 +4,7 @@ import scala.concurrent.Await import scala.concurrent.duration._ import scala.concurrent.Future -import akka.stream.{ ClosedShape, ActorMaterializer, ActorMaterializerSettings } +import akka.stream.{ SourceShape, ClosedShape, ActorMaterializer, ActorMaterializerSettings } import akka.stream.testkit._ import akka.stream.testkit.scaladsl._ import akka.stream.testkit.Utils._ @@ -110,6 +110,18 @@ class GraphBalanceSpec extends AkkaSpec { s2.expectComplete() } + "work with one-way merge" in { + val result = Source.fromGraph(GraphDSL.create() { implicit b ⇒ + val balance = b.add(Balance[Int](1)) + val source = b.add(Source(1 to 3)) + + source ~> balance.in + SourceShape(balance.out(0)) + }).runFold(Seq[Int]())(_ :+ _) + + Await.result(result, 3.seconds) should ===(Seq(1, 2, 3)) + } + "work with 5-way balance" in { val sink = Sink.head[Seq[Int]] diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphBroadcastSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphBroadcastSpec.scala index 7f9bc8b454..528d853212 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphBroadcastSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphBroadcastSpec.scala @@ -49,6 +49,19 @@ class GraphBroadcastSpec extends AkkaSpec { c2.expectComplete() } + "work with one-way broadcast" in assertAllStagesStopped { + val result = Source.fromGraph(GraphDSL.create() { implicit b ⇒ + val broadcast = b.add(Broadcast[Int](1)) + val source = b.add(Source(1 to 3)) + + source ~> broadcast.in + + SourceShape(broadcast.out(0)) + }).runFold(Seq[Int]())(_ :+ _) + + Await.result(result, 3.seconds) should ===(Seq(1, 2, 3)) + } + "work with n-way broadcast" in assertAllStagesStopped { val headSink = Sink.head[Seq[Int]] diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphConcatSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphConcatSpec.scala index 4087e1a00c..ec876a435b 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphConcatSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphConcatSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphMatValueSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphMatValueSpec.scala index 7403c10c82..efa7433dd5 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphMatValueSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphMatValueSpec.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.scaladsl +import akka.NotUsed import akka.stream._ import akka.stream.testkit._ diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphMergePreferredSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphMergePreferredSpec.scala index 3fd9e83557..c2dd671f9d 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphMergePreferredSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphMergePreferredSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphMergeSortedSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphMergeSortedSpec.scala index 732e1b0c20..e4d27390f2 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphMergeSortedSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphMergeSortedSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphMergeSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphMergeSpec.scala index 91eb669251..85e054b7f9 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphMergeSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphMergeSpec.scala @@ -1,10 +1,11 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.scaladsl import akka.stream._ +import scala.concurrent.Await import scala.concurrent.duration._ import akka.stream.testkit._ @@ -58,6 +59,18 @@ class GraphMergeSpec extends TwoStreamsSetup { probe.expectComplete() } + "work with one-way merge" in { + val result = Source.fromGraph(GraphDSL.create() { implicit b ⇒ + val merge = b.add(Merge[Int](1)) + val source = b.add(Source(1 to 3)) + + source ~> merge.in(0) + SourceShape(merge.out) + }).runFold(Seq[Int]())(_ :+ _) + + Await.result(result, 3.seconds) should ===(Seq(1, 2, 3)) + } + "work with n-way merge" in { val source1 = Source(List(1)) val source2 = Source(List(2)) diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphOpsIntegrationSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphOpsIntegrationSpec.scala index 6367b2ae23..8149ee9e3c 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphOpsIntegrationSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphOpsIntegrationSpec.scala @@ -1,5 +1,7 @@ package akka.stream.scaladsl +import akka.NotUsed + import scala.collection.immutable import scala.concurrent.{ Future, Await } import scala.concurrent.duration._ @@ -29,7 +31,7 @@ object GraphOpsIntegrationSpec { } } - def apply[In, Out](pipeline: Flow[In, Out, _]): Graph[ShufflePorts[In, Out], Unit] = { + def apply[In, Out](pipeline: Flow[In, Out, _]): Graph[ShufflePorts[In, Out], NotUsed] = { GraphDSL.create() { implicit b ⇒ val merge = b.add(Merge[In](2)) val balance = b.add(Balance[Out](2)) diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphPartitionSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphPartitionSpec.scala new file mode 100644 index 0000000000..965ee0bdbc --- /dev/null +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphPartitionSpec.scala @@ -0,0 +1,175 @@ +/** + * Copyright (C) 2009-2016 Typesafe Inc. + */ +package akka.stream.scaladsl + +import akka.stream.testkit._ +import akka.stream.testkit.scaladsl.TestSink +import akka.stream.{ OverflowStrategy, ActorMaterializer, ActorMaterializerSettings, ClosedShape } +import akka.stream.testkit.Utils._ +import scala.concurrent.Await +import scala.concurrent.duration._ + +class GraphPartitionSpec extends AkkaSpec { + + val settings = ActorMaterializerSettings(system) + .withInputBuffer(initialSize = 2, maxSize = 16) + + implicit val materializer = ActorMaterializer(settings) + + "A partition" must { + import GraphDSL.Implicits._ + + "partition to three subscribers" in assertAllStagesStopped { + val c1 = TestSubscriber.probe[Int]() + val c2 = TestSubscriber.probe[Int]() + val c3 = TestSubscriber.probe[Int]() + + RunnableGraph.fromGraph(GraphDSL.create() { implicit b ⇒ + val partition = b.add(Partition[Int](3, { + case g if (g > 3) ⇒ 0 + case l if (l < 3) ⇒ 1 + case e if (e == 3) ⇒ 2 + })) + Source(List(1, 2, 3, 4, 5)) ~> partition.in + partition.out(0) ~> Sink.fromSubscriber(c1) + partition.out(1) ~> Sink.fromSubscriber(c2) + partition.out(2) ~> Sink.fromSubscriber(c3) + ClosedShape + }).run() + + c2.request(2) + c1.request(2) + c3.request(1) + c2.expectNext(1) + c2.expectNext(2) + c3.expectNext(3) + c1.expectNext(4) + c1.expectNext(5) + c1.expectComplete() + c2.expectComplete() + c3.expectComplete() + } + + "complete stage after upstream completes" in assertAllStagesStopped { + val c1 = TestSubscriber.probe[String]() + val c2 = TestSubscriber.probe[String]() + + RunnableGraph.fromGraph(GraphDSL.create() { implicit b ⇒ + val partition = b.add(Partition[String](2, { + case s if (s.length > 4) ⇒ 0 + case _ ⇒ 1 + })) + Source(List("this", "is", "just", "another", "test")) ~> partition.in + partition.out(0) ~> Sink.fromSubscriber(c1) + partition.out(1) ~> Sink.fromSubscriber(c2) + ClosedShape + }).run() + + c1.request(1) + c2.request(4) + c1.expectNext("another") + c2.expectNext("this") + c2.expectNext("is") + c2.expectNext("just") + c2.expectNext("test") + c1.expectComplete() + c2.expectComplete() + + } + + "remember first pull even though first element targeted another out" in assertAllStagesStopped { + val c1 = TestSubscriber.probe[Int]() + val c2 = TestSubscriber.probe[Int]() + + RunnableGraph.fromGraph(GraphDSL.create() { implicit b ⇒ + val partition = b.add(Partition[Int](2, { case l if l < 6 ⇒ 0; case _ ⇒ 1 })) + Source(List(6, 3)) ~> partition.in + partition.out(0) ~> Sink.fromSubscriber(c1) + partition.out(1) ~> Sink.fromSubscriber(c2) + ClosedShape + }).run() + + c1.request(1) + c1.expectNoMsg(1.seconds) + c2.request(1) + c2.expectNext(6) + c1.expectNext(3) + c1.expectComplete() + c2.expectComplete() + } + + "cancel upstream when downstreams cancel" in assertAllStagesStopped { + val p1 = TestPublisher.probe[Int]() + val c1 = TestSubscriber.probe[Int]() + val c2 = TestSubscriber.probe[Int]() + + RunnableGraph.fromGraph(GraphDSL.create() { implicit b ⇒ + val partition = b.add(Partition[Int](2, { case l if l < 6 ⇒ 0; case _ ⇒ 1 })) + Source.fromPublisher(p1.getPublisher) ~> partition.in + partition.out(0) ~> Flow[Int].buffer(16, OverflowStrategy.backpressure) ~> Sink.fromSubscriber(c1) + partition.out(1) ~> Flow[Int].buffer(16, OverflowStrategy.backpressure) ~> Sink.fromSubscriber(c2) + ClosedShape + }).run() + + val p1Sub = p1.expectSubscription() + val sub1 = c1.expectSubscription() + val sub2 = c2.expectSubscription() + sub1.request(3) + sub2.request(3) + p1Sub.sendNext(1) + p1Sub.sendNext(8) + c1.expectNext(1) + c2.expectNext(8) + p1Sub.sendNext(2) + c1.expectNext(2) + sub1.cancel() + sub2.cancel() + p1Sub.expectCancellation() + } + + "work with merge" in assertAllStagesStopped { + val s = Sink.seq[Int] + val input = Set(5, 2, 9, 1, 1, 1, 10) + + val g = RunnableGraph.fromGraph(GraphDSL.create(s) { implicit b ⇒ + sink ⇒ + val partition = b.add(Partition[Int](2, { case l if l < 4 ⇒ 0; case _ ⇒ 1 })) + val merge = b.add(Merge[Int](2)) + Source(input) ~> partition.in + partition.out(0) ~> merge.in(0) + partition.out(1) ~> merge.in(1) + merge.out ~> sink.in + + ClosedShape + }) + + val result = Await.result(g.run(), 300.millis) + + result.toSet should be(input) + + } + + "stage completion is waiting for pending output" in assertAllStagesStopped { + + val c1 = TestSubscriber.probe[Int]() + val c2 = TestSubscriber.probe[Int]() + + RunnableGraph.fromGraph(GraphDSL.create() { implicit b ⇒ + val partition = b.add(Partition[Int](2, { case l if l < 6 ⇒ 0; case _ ⇒ 1 })) + Source(List(6)) ~> partition.in + partition.out(0) ~> Sink.fromSubscriber(c1) + partition.out(1) ~> Sink.fromSubscriber(c2) + ClosedShape + }).run() + + c1.request(1) + c1.expectNoMsg(1.second) + c2.request(1) + c2.expectNext(6) + c1.expectComplete() + c2.expectComplete() + } + + } +} diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphStageTimersSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphStageTimersSpec.scala index 7172299bf1..e614bdd7ea 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphStageTimersSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphStageTimersSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphUnzipSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphUnzipSpec.scala index d438ddf37f..a0791705f8 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphUnzipSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphUnzipSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphUnzipWithSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphUnzipWithSpec.scala index 7a4a013626..bca40a7173 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphUnzipWithSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphUnzipWithSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphZipSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphZipSpec.scala index e4987e386c..0ac9c86266 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphZipSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphZipSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl @@ -129,9 +129,6 @@ class GraphZipSpec extends TwoStreamsSetup { downstream.requestNext((1, "A")) downstream.expectComplete() - - upstream1.expectNoMsg(500.millis) - upstream2.expectNoMsg(500.millis) } "complete if one side complete before requested with elements pending" in { @@ -159,9 +156,6 @@ class GraphZipSpec extends TwoStreamsSetup { downstream.requestNext((1, "A")) downstream.expectComplete() - - upstream1.expectNoMsg(500.millis) - upstream2.expectNoMsg(500.millis) } "complete if one side complete before requested with elements pending 2" in { @@ -190,9 +184,6 @@ class GraphZipSpec extends TwoStreamsSetup { upstream2.sendComplete() downstream.requestNext((1, "A")) downstream.expectComplete() - - upstream1.expectNoMsg(500.millis) - upstream2.expectNoMsg(500.millis) } commonTests() diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/HeadSinkSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/HeadSinkSpec.scala index ce8dfc658c..651fdf918d 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/HeadSinkSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/HeadSinkSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/ImplicitMaterializerSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/ImplicitMaterializerSpec.scala deleted file mode 100644 index 458584e777..0000000000 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/ImplicitMaterializerSpec.scala +++ /dev/null @@ -1,40 +0,0 @@ -/** - * Copyright (C) 2014 Typesafe Inc. - */ -package akka.stream.scaladsl - -import akka.actor.{ Actor, Props } -import akka.stream.ActorMaterializerSettings -import akka.stream.testkit.AkkaSpec -import akka.testkit._ -import akka.pattern.pipe - -object ImplicitMaterializerSpec { - class SomeActor(input: List[String]) extends Actor with ImplicitMaterializer { - - override def materializerSettings = ActorMaterializerSettings(context.system) - .withInputBuffer(initialSize = 2, maxSize = 16) - - val flow = Source(input).map(_.toUpperCase()) - - def receive = { - case "run" ⇒ - // run takes an implicit ActorMaterializer parameter, which is provided by ImplicitMaterializer - import context.dispatcher - flow.runFold("")(_ + _) pipeTo sender() - } - } -} - -class ImplicitMaterializerSpec extends AkkaSpec with ImplicitSender { - import ImplicitMaterializerSpec._ - - "An ImplicitMaterializer" must { - - "provide implicit ActorMaterializer" in { - val actor = system.actorOf(Props(classOf[SomeActor], List("a", "b", "c")).withDispatcher("akka.test.stream-dispatcher")) - actor ! "run" - expectMsg("ABC") - } - } -} diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/LastSinkSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/LastSinkSpec.scala index 95f41bb1c0..f83093fc1e 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/LastSinkSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/LastSinkSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/One2OneBidiFlowSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/One2OneBidiFlowSpec.scala index a8f6b70769..1c3881d2ab 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/One2OneBidiFlowSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/One2OneBidiFlowSpec.scala @@ -1,9 +1,11 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl import java.util.concurrent.atomic.AtomicInteger +import akka.NotUsed + import scala.concurrent.Await import scala.concurrent.duration._ import org.scalactic.ConversionCheckedTripleEquals @@ -16,7 +18,7 @@ class One2OneBidiFlowSpec extends AkkaSpec with ConversionCheckedTripleEquals { "A One2OneBidiFlow" must { - def test(flow: Flow[Int, Int, Unit]) = + def test(flow: Flow[Int, Int, NotUsed]) = Source(List(1, 2, 3)).via(flow).grouped(10).runWith(Sink.head) "be fully transparent for valid one-to-one streams" in { diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/PublisherSinkSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/PublisherSinkSpec.scala index 8404ba4948..bc01f061af 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/PublisherSinkSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/PublisherSinkSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/QueueSinkSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/QueueSinkSpec.scala index 2249f82c13..24ae8b5cb2 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/QueueSinkSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/QueueSinkSpec.scala @@ -1,11 +1,11 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl import akka.actor.Status import akka.pattern.pipe -import akka.stream.ActorMaterializer +import akka.stream.{ OverflowStrategy, ActorMaterializer } import akka.stream.testkit.Utils._ import akka.stream.testkit.{ AkkaSpec, _ } @@ -97,5 +97,20 @@ class QueueSinkSpec extends AkkaSpec { queue.pull() } + "fail pull future when stream is completed" in assertAllStagesStopped { + val probe = TestPublisher.manualProbe[Int]() + val queue = Source.fromPublisher(probe).runWith(Sink.queue()) + val sub = probe.expectSubscription() + + queue.pull().pipeTo(testActor) + sub.sendNext(1) + expectMsg(Some(1)) + + sub.sendComplete() + Await.result(queue.pull(), noMsgTimeout) should be(None) + + queue.pull().onFailure { case e ⇒ e.isInstanceOf[IllegalStateException] should ===(true) } + } + } } diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/QueueSourceSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/QueueSourceSpec.scala new file mode 100644 index 0000000000..71dab7f8e1 --- /dev/null +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/QueueSourceSpec.scala @@ -0,0 +1,228 @@ +/** + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package akka.stream.scaladsl + +import akka.actor.{ NoSerializationVerificationNeeded, Status } +import akka.pattern.pipe +import akka.stream._ +import akka.stream.impl.QueueSource +import akka.stream.stage.OutHandler +import akka.stream.testkit.Utils._ +import akka.stream.testkit.{ AkkaSpec, TestSubscriber } +import akka.testkit.TestProbe +import scala.concurrent.duration._ +import scala.concurrent.{ Future, _ } +import akka.Done + +class QueueSourceSpec extends AkkaSpec { + implicit val materializer = ActorMaterializer() + implicit val ec = system.dispatcher + val pause = 300.millis + + def assertSuccess(f: Future[QueueOfferResult]): Unit = { + f pipeTo testActor + expectMsg(QueueOfferResult.Enqueued) + } + + object SourceTestMessages { + case object Pull extends NoSerializationVerificationNeeded + case object Finish extends NoSerializationVerificationNeeded + } + + def testSource(maxBuffer: Int, overflowStrategy: OverflowStrategy, probe: TestProbe): Source[Int, SourceQueue[Int]] = { + class QueueSourceTestStage(maxBuffer: Int, overflowStrategy: OverflowStrategy) + extends QueueSource[Int](maxBuffer, overflowStrategy) { + + override def createLogicAndMaterializedValue(inheritedAttributes: Attributes) = { + val (logic, inputStream) = super.createLogicAndMaterializedValue(inheritedAttributes) + val outHandler = logic.handlers(out.id).asInstanceOf[OutHandler] + logic.handlers(out.id) = new OutHandler { + override def onPull(): Unit = { + probe.ref ! SourceTestMessages.Pull + outHandler.onPull() + } + override def onDownstreamFinish(): Unit = { + probe.ref ! SourceTestMessages.Finish + outHandler.onDownstreamFinish() + } + + } + (logic, inputStream) + } + } + Source.fromGraph(new QueueSourceTestStage(maxBuffer, overflowStrategy)) + } + + "A QueueSourceSpec" must { + + "emit received messages to the stream" in { + val s = TestSubscriber.manualProbe[Int]() + val queue = Source.queue(10, OverflowStrategy.fail).to(Sink.fromSubscriber(s)).run() + val sub = s.expectSubscription + for (i ← 1 to 3) { + sub.request(1) + assertSuccess(queue.offer(i)) + s.expectNext(i) + } + + queue.watchCompletion().pipeTo(testActor) + expectNoMsg(pause) + + sub.cancel() + expectMsg(Done) + } + + "buffer when needed" in { + val s = TestSubscriber.manualProbe[Int]() + val queue = Source.queue(100, OverflowStrategy.dropHead).to(Sink.fromSubscriber(s)).run() + val sub = s.expectSubscription + for (n ← 1 to 20) assertSuccess(queue.offer(n)) + sub.request(10) + for (n ← 1 to 10) assertSuccess(queue.offer(n)) + sub.request(10) + for (n ← 11 to 20) assertSuccess(queue.offer(n)) + + for (n ← 200 to 399) assertSuccess(queue.offer(n)) + sub.request(100) + for (n ← 300 to 399) assertSuccess(queue.offer(n)) + sub.cancel() + } + + "not fail when 0 buffer space and demand is signalled" in assertAllStagesStopped { + val s = TestSubscriber.manualProbe[Int]() + val queue = Source.queue(0, OverflowStrategy.dropHead).to(Sink.fromSubscriber(s)).run() + val sub = s.expectSubscription + sub.request(1) + + assertSuccess(queue.offer(1)) + + sub.cancel() + } + + "wait for demand when buffer is 0" in assertAllStagesStopped { + val s = TestSubscriber.manualProbe[Int]() + val queue = Source.queue(0, OverflowStrategy.dropHead).to(Sink.fromSubscriber(s)).run() + val sub = s.expectSubscription + queue.offer(1).pipeTo(testActor) + expectNoMsg(pause) + sub.request(1) + expectMsg(QueueOfferResult.Enqueued) + s.expectNext(1) + sub.cancel() + } + + "finish offer and complete futures when stream completed" in assertAllStagesStopped { + val s = TestSubscriber.manualProbe[Int]() + val queue = Source.queue(0, OverflowStrategy.dropHead).to(Sink.fromSubscriber(s)).run() + val sub = s.expectSubscription + + queue.watchCompletion.pipeTo(testActor) + queue.offer(1) pipeTo testActor + expectNoMsg(pause) + + sub.cancel() + + expectMsgAllOf(QueueOfferResult.QueueClosed, Done) + } + + "fail stream on buffer overflow in fail mode" in assertAllStagesStopped { + val s = TestSubscriber.manualProbe[Int]() + val queue = Source.queue(1, OverflowStrategy.fail).to(Sink.fromSubscriber(s)).run() + s.expectSubscription + + queue.offer(1) + queue.offer(2) + s.expectError() + } + + "remember pull from downstream to send offered element immediately" in assertAllStagesStopped { + val s = TestSubscriber.manualProbe[Int]() + val probe = TestProbe() + val queue = testSource(1, OverflowStrategy.dropHead, probe).to(Sink.fromSubscriber(s)).run() + val sub = s.expectSubscription + + sub.request(1) + probe.expectMsg(SourceTestMessages.Pull) + assertSuccess(queue.offer(1)) + s.expectNext(1) + sub.cancel() + } + + "fail offer future if user does not wait in backpressure mode" in assertAllStagesStopped { + val s = TestSubscriber.manualProbe[Int]() + val queue = Source.queue(5, OverflowStrategy.backpressure).to(Sink.fromSubscriber(s)).run() + val sub = s.expectSubscription + + for (i ← 1 to 5) assertSuccess(queue.offer(i)) + + queue.offer(6).pipeTo(testActor) + expectNoMsg(pause) + + val future = queue.offer(7) + future.onFailure { case e ⇒ e.isInstanceOf[IllegalStateException] should ===(true) } + future.onSuccess { case _ ⇒ fail() } + Await.ready(future, pause) + + sub.request(1) + s.expectNext(1) + expectMsg(QueueOfferResult.Enqueued) + sub.cancel() + } + + "complete watching future with failure if stream failed" in assertAllStagesStopped { + val s = TestSubscriber.manualProbe[Int]() + val queue = Source.queue(1, OverflowStrategy.fail).to(Sink.fromSubscriber(s)).run() + queue.watchCompletion().pipeTo(testActor) + queue.offer(1) //need to wait when first offer is done as initialization can be done in this moment + queue.offer(2) + expectMsgClass(classOf[Status.Failure]) + } + + "return false when elemen was not added to buffer" in assertAllStagesStopped { + val s = TestSubscriber.manualProbe[Int]() + val queue = Source.queue(1, OverflowStrategy.dropNew).to(Sink.fromSubscriber(s)).run() + val sub = s.expectSubscription + + queue.offer(1) + queue.offer(2) pipeTo testActor + expectMsg(QueueOfferResult.Dropped) + + sub.request(1) + s.expectNext(1) + sub.cancel() + } + + "wait when buffer is full and backpressure is on" in assertAllStagesStopped { + val s = TestSubscriber.manualProbe[Int]() + val queue = Source.queue(1, OverflowStrategy.backpressure).to(Sink.fromSubscriber(s)).run() + val sub = s.expectSubscription + assertSuccess(queue.offer(1)) + + queue.offer(2) pipeTo testActor + expectNoMsg(pause) + + sub.request(1) + s.expectNext(1) + + sub.request(1) + s.expectNext(2) + expectMsg(QueueOfferResult.Enqueued) + + sub.cancel() + } + + "fail offer future when stream is completed" in assertAllStagesStopped { + val s = TestSubscriber.manualProbe[Int]() + val queue = Source.queue(1, OverflowStrategy.dropNew).to(Sink.fromSubscriber(s)).run() + val sub = s.expectSubscription + queue.watchCompletion().pipeTo(testActor) + sub.cancel() + expectMsg(Done) + + queue.offer(1).onFailure { case e ⇒ e.isInstanceOf[IllegalStateException] should ===(true) } + } + + } + +} diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SeqSinkSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SeqSinkSpec.scala index 6ffefe4d52..622accd823 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SeqSinkSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SeqSinkSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SinkForeachParallelSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SinkForeachParallelSpec.scala index 875a265702..452ac389b1 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SinkForeachParallelSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SinkForeachParallelSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SinkSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SinkSpec.scala index aaccae0480..57eb34e6f9 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SinkSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SinkSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SourceSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SourceSpec.scala index 6bcd64aa69..57b9484cae 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SourceSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SourceSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl @@ -12,6 +12,7 @@ import scala.util.Failure import scala.util.control.NoStackTrace import akka.stream._ import akka.stream.testkit._ +import akka.NotUsed class SourceSpec extends AkkaSpec with DefaultTimeout with ScalaFutures { @@ -252,7 +253,7 @@ class SourceSpec extends AkkaSpec with DefaultTimeout with ScalaFutures { } "generate an unbounded fibonacci sequence" in { - Source.unfoldInf((0, 1))({ case (a, b) ⇒ (b, a + b) → a }) + Source.unfold((0, 1))({ case (a, b) ⇒ Some((b, a + b) → a) }) .take(36) .runFold(List.empty[Int]) { case (xs, x) ⇒ x :: xs } .futureValue should ===(expected) @@ -271,7 +272,7 @@ class SourceSpec extends AkkaSpec with DefaultTimeout with ScalaFutures { "A Source" must { "suitably override attribute handling methods" in { import Attributes._ - val s: Source[Int, Unit] = Source.single(42).withAttributes(asyncBoundary).addAttributes(none).named("") + val s: Source[Int, NotUsed] = Source.single(42).withAttributes(asyncBoundary).addAttributes(none).named("") } } diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/StageActorRefSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/StageActorRefSpec.scala index 3d866e7221..92b8a16512 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/StageActorRefSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/StageActorRefSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.scaladsl @@ -186,23 +186,23 @@ object StageActorRefSpec { val p: Promise[Int] = Promise() val logic = new GraphStageLogic(shape) { - implicit def self = stageActorRef // must be a `def`, we want self to be the sender for our replies + implicit def self = stageActor.ref // must be a `def`; we want self to be the sender for our replies var sum: Int = 0 override def preStart(): Unit = { pull(in) - probe ! getStageActorRef(behaviour) + probe ! getStageActor(behaviour).ref } def behaviour(m: (ActorRef, Any)): Unit = { m match { case (sender, Add(n)) ⇒ sum += n case (sender, PullNow) ⇒ pull(in) - case (sender, CallInitStageActorRef) ⇒ sender ! getStageActorRef(behaviour) + case (sender, CallInitStageActorRef) ⇒ sender ! getStageActor(behaviour).ref case (sender, BecomeStringEcho) ⇒ - getStageActorRef({ + getStageActor { case (theSender, msg) ⇒ theSender ! msg.toString - }) + } case (sender, StopNow) ⇒ p.trySuccess(sum) completeStage() @@ -235,4 +235,4 @@ object StageActorRefSpec { } } -} \ No newline at end of file +} diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SubscriberSinkSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SubscriberSinkSpec.scala index e74fd1ac3c..3528a97a26 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SubscriberSinkSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SubscriberSinkSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SubscriberSourceSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SubscriberSourceSpec.scala index d4b2f5880d..08cf91e1f7 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SubscriberSourceSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SubscriberSourceSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SubstreamSubscriptionTimeoutSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SubstreamSubscriptionTimeoutSpec.scala index f028a5c8cc..0d486d389f 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SubstreamSubscriptionTimeoutSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SubstreamSubscriptionTimeoutSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl @@ -41,10 +41,9 @@ class SubstreamSubscriptionTimeoutSpec(conf: String) extends AkkaSpec(conf) { "groupBy and splitwhen" must { "timeout and cancel substream publishers when no-one subscribes to them after some time (time them out)" in assertAllStagesStopped { - val publisherProbe = TestPublisher.probe[Int]() - val publisher = Source.fromPublisher(publisherProbe).groupBy(3, _ % 3).lift(_ % 3).runWith(Sink.asPublisher(false)) val subscriber = TestSubscriber.manualProbe[(Int, Source[Int, _])]() - publisher.subscribe(subscriber) + val publisherProbe = TestPublisher.probe[Int]() + val publisher = Source.fromPublisher(publisherProbe).groupBy(3, _ % 3).lift(_ % 3).runWith(Sink.fromSubscriber(subscriber)) val downstreamSubscription = subscriber.expectSubscription() downstreamSubscription.request(100) @@ -56,7 +55,7 @@ class SubstreamSubscriptionTimeoutSpec(conf: String) extends AkkaSpec(conf) { val (_, s1) = subscriber.expectNext() // should not break normal usage val s1SubscriberProbe = TestSubscriber.manualProbe[Int]() - s1.runWith(Sink.asPublisher(false)).subscribe(s1SubscriberProbe) + s1.runWith(Sink.fromSubscriber(s1SubscriberProbe)) val s1Subscription = s1SubscriberProbe.expectSubscription() s1Subscription.request(100) s1SubscriberProbe.expectNext(1) @@ -64,7 +63,7 @@ class SubstreamSubscriptionTimeoutSpec(conf: String) extends AkkaSpec(conf) { val (_, s2) = subscriber.expectNext() // should not break normal usage val s2SubscriberProbe = TestSubscriber.manualProbe[Int]() - s2.runWith(Sink.asPublisher(false)).subscribe(s2SubscriberProbe) + s2.runWith(Sink.fromSubscriber(s2SubscriberProbe)) val s2Subscription = s2SubscriberProbe.expectSubscription() s2Subscription.request(100) s2SubscriberProbe.expectNext(2) @@ -74,7 +73,8 @@ class SubstreamSubscriptionTimeoutSpec(conf: String) extends AkkaSpec(conf) { // sleep long enough for it to be cleaned up Thread.sleep(1500) - val f = s3.runWith(Sink.head).recover { case _: SubscriptionTimeoutException ⇒ "expected" } + // Must be a Sink.seq, otherwise there is a race due to the concat in the `lift` implementation + val f = s3.runWith(Sink.seq).recover { case _: SubscriptionTimeoutException ⇒ "expected" } Await.result(f, 300.millis) should equal("expected") publisherProbe.sendComplete() @@ -82,9 +82,8 @@ class SubstreamSubscriptionTimeoutSpec(conf: String) extends AkkaSpec(conf) { "timeout and stop groupBy parent actor if none of the substreams are actually consumed" in assertAllStagesStopped { val publisherProbe = TestPublisher.probe[Int]() - val publisher = Source.fromPublisher(publisherProbe).groupBy(2, _ % 2).lift(_ % 2).runWith(Sink.asPublisher(false)) val subscriber = TestSubscriber.manualProbe[(Int, Source[Int, _])]() - publisher.subscribe(subscriber) + val publisher = Source.fromPublisher(publisherProbe).groupBy(2, _ % 2).lift(_ % 2).runWith(Sink.fromSubscriber(subscriber)) val downstreamSubscription = subscriber.expectSubscription() downstreamSubscription.request(100) @@ -100,9 +99,8 @@ class SubstreamSubscriptionTimeoutSpec(conf: String) extends AkkaSpec(conf) { "not timeout and cancel substream publishers when they have been subscribed to" in { val publisherProbe = TestPublisher.probe[Int]() - val publisher = Source.fromPublisher(publisherProbe).groupBy(2, _ % 2).lift(_ % 2).runWith(Sink.asPublisher(false)) val subscriber = TestSubscriber.manualProbe[(Int, Source[Int, _])]() - publisher.subscribe(subscriber) + val publisher = Source.fromPublisher(publisherProbe).groupBy(2, _ % 2).lift(_ % 2).runWith(Sink.fromSubscriber(subscriber)) val downstreamSubscription = subscriber.expectSubscription() downstreamSubscription.request(100) @@ -113,7 +111,7 @@ class SubstreamSubscriptionTimeoutSpec(conf: String) extends AkkaSpec(conf) { val (_, s1) = subscriber.expectNext() // should not break normal usage val s1SubscriberProbe = TestSubscriber.manualProbe[Int]() - s1.runWith(Sink.asPublisher(false)).subscribe(s1SubscriberProbe) + s1.runWith(Sink.fromSubscriber(s1SubscriberProbe)) val s1Sub = s1SubscriberProbe.expectSubscription() s1Sub.request(1) s1SubscriberProbe.expectNext(1) @@ -121,7 +119,7 @@ class SubstreamSubscriptionTimeoutSpec(conf: String) extends AkkaSpec(conf) { val (_, s2) = subscriber.expectNext() // should not break normal usage val s2SubscriberProbe = TestSubscriber.manualProbe[Int]() - s2.runWith(Sink.asPublisher(false)).subscribe(s2SubscriberProbe) + s2.runWith(Sink.fromSubscriber(s2SubscriberProbe)) val s2Sub = s2SubscriberProbe.expectSubscription() // sleep long enough for timeout to trigger if not canceled diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/TestConfig.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/TestConfig.scala index d2182f7b8f..9aa01457bb 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/TestConfig.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/TestConfig.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/TickSourceSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/TickSourceSpec.scala index d241efa175..1f46315af7 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/TickSourceSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/TickSourceSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream/build.sbt b/akka-stream/build.sbt new file mode 100644 index 0000000000..e1ee341328 --- /dev/null +++ b/akka-stream/build.sbt @@ -0,0 +1,10 @@ +import akka._ +import com.typesafe.tools.mima.plugin.MimaKeys +import spray.boilerplate.BoilerplatePlugin._ + +AkkaBuild.defaultSettings +Formatting.formatSettings +OSGi.stream +Dependencies.stream +MimaKeys.previousArtifacts := akkaStreamAndHttpPreviousArtifacts("akka-stream").value +Boilerplate.settings diff --git a/akka-stream/src/main/boilerplate/akka/stream/javadsl/GraphCreate.scala.template b/akka-stream/src/main/boilerplate/akka/stream/javadsl/GraphCreate.scala.template index d123124b70..920c09a154 100644 --- a/akka-stream/src/main/boilerplate/akka/stream/javadsl/GraphCreate.scala.template +++ b/akka-stream/src/main/boilerplate/akka/stream/javadsl/GraphCreate.scala.template @@ -6,12 +6,13 @@ package akka.stream.javadsl import akka.stream.scaladsl import akka.stream.{ Inlet, Shape, Graph } import akka.japi.function +import akka.NotUsed private[stream] abstract class GraphCreate { /** * Creates a new [[Graph]] of the given [[Shape]] by passing a [[GraphDSL.Builder]] to the given create function. */ - def create[S <: Shape](block: function.Function[GraphDSL.Builder[Unit], S]): Graph[S, Unit] = + def create[S <: Shape](block: function.Function[GraphDSL.Builder[NotUsed], S]): Graph[S, NotUsed] = scaladsl.GraphDSL.create() { b ⇒ block.apply(b.asJava) } /** diff --git a/akka-stream/src/main/boilerplate/akka/stream/javadsl/UnzipWith.scala.template b/akka-stream/src/main/boilerplate/akka/stream/javadsl/UnzipWith.scala.template index a1677a66e0..b1312fc14b 100644 --- a/akka-stream/src/main/boilerplate/akka/stream/javadsl/UnzipWith.scala.template +++ b/akka-stream/src/main/boilerplate/akka/stream/javadsl/UnzipWith.scala.template @@ -8,6 +8,7 @@ import akka.stream.scaladsl import akka.japi.function import akka.japi.Pair import akka.japi.tuple._ +import akka.NotUsed /** * Split one stream into several streams using a splitting function. @@ -27,7 +28,7 @@ object UnzipWith { * * @param f unzipping-function from the input value to the pair of output values */ - def create[In, A, B](f: function.Function[In, Pair[A, B]]): Graph[FanOutShape2[In, A, B], Unit] = + def create[In, A, B](f: function.Function[In, Pair[A, B]]): Graph[FanOutShape2[In, A, B], NotUsed] = scaladsl.UnzipWith[In, A, B]((in: In) => f.apply(in) match { case Pair(a, b) => (a, b) }) @@ -35,7 +36,7 @@ object UnzipWith { * * @param f unzipping-function from the input value to the output values */ - def create1[In, [#T1#]](f: function.Function[In, Tuple1[[#T1#]]]): Graph[FanOutShape1[In, [#T1#]], Unit] = + def create1[In, [#T1#]](f: function.Function[In, Tuple1[[#T1#]]]): Graph[FanOutShape1[In, [#T1#]], NotUsed] = scaladsl.UnzipWith[In, [#T1#]]((in: In) => f.apply(in).toScala)# ] diff --git a/akka-stream/src/main/boilerplate/akka/stream/javadsl/ZipWith.scala.template b/akka-stream/src/main/boilerplate/akka/stream/javadsl/ZipWith.scala.template index 56116d009b..744e264958 100644 --- a/akka-stream/src/main/boilerplate/akka/stream/javadsl/ZipWith.scala.template +++ b/akka-stream/src/main/boilerplate/akka/stream/javadsl/ZipWith.scala.template @@ -6,6 +6,7 @@ package akka.stream.javadsl import akka.stream._ import akka.stream.scaladsl import akka.japi.function +import akka.NotUsed /** * Combine the elements of multiple streams into a stream of combined elements using a combiner function. @@ -26,7 +27,7 @@ object ZipWith { * @param f zipping-function from the input values to the output value * @param attributes optional attributes for this vertex */ - def create[A, B, Out](f: function.Function2[A, B, Out]): Graph[FanInShape2[A, B, Out], Unit] = + def create[A, B, Out](f: function.Function2[A, B, Out]): Graph[FanInShape2[A, B, Out], NotUsed] = scaladsl.ZipWith(f.apply _) [3..20#/** Create a new `ZipWith` specialized for 1 inputs. @@ -34,7 +35,7 @@ object ZipWith { * @param f zipping-function from the input values to the output value * @param attributes optional attributes for this vertex */ - def create1[[#T1#], Out](f: function.Function1[[#T1#], Out]): Graph[FanInShape1[[#T1#], Out], Unit] = + def create1[[#T1#], Out](f: function.Function1[[#T1#], Out]): Graph[FanInShape1[[#T1#], Out], NotUsed] = scaladsl.ZipWith(f.apply _)# ] diff --git a/akka-stream/src/main/boilerplate/akka/stream/scaladsl/GraphApply.scala.template b/akka-stream/src/main/boilerplate/akka/stream/scaladsl/GraphApply.scala.template index 721ba2a11d..6af4c5ea84 100644 --- a/akka-stream/src/main/boilerplate/akka/stream/scaladsl/GraphApply.scala.template +++ b/akka-stream/src/main/boilerplate/akka/stream/scaladsl/GraphApply.scala.template @@ -3,6 +3,7 @@ */ package akka.stream.scaladsl +import akka.NotUsed import akka.stream.impl.StreamLayout import akka.stream.impl.StreamLayout.Module import akka.stream.{ Graph, Attributes, Shape } @@ -11,7 +12,7 @@ trait GraphApply { /** * Creates a new [[Graph]] by passing a [[GraphDSL.Builder]] to the given create function. */ - def create[S <: Shape]()(buildBlock: GraphDSL.Builder[Unit] ⇒ S): Graph[S, Unit] = { + def create[S <: Shape]()(buildBlock: GraphDSL.Builder[NotUsed] ⇒ S): Graph[S, NotUsed] = { val builder = new GraphDSL.Builder val s = buildBlock(builder) val mod = builder.module.nest().replaceShape(s) diff --git a/akka-stream/src/main/boilerplate/akka/stream/scaladsl/ZipWithApply.scala.template b/akka-stream/src/main/boilerplate/akka/stream/scaladsl/ZipWithApply.scala.template index 4afdec4342..4cad1f31a6 100644 --- a/akka-stream/src/main/boilerplate/akka/stream/scaladsl/ZipWithApply.scala.template +++ b/akka-stream/src/main/boilerplate/akka/stream/scaladsl/ZipWithApply.scala.template @@ -31,13 +31,22 @@ class ZipWith1[[#A1#], O] (zipper: ([#A1#]) ⇒ O) extends GraphStage[FanInShape ] override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { - var pending = 1 + var pending = ##0 // Without this field the completion signalling would take one extra pull var willShutDown = false private def pushAll(): Unit = { push(out, zipper([#grab(in0)#])) if (willShutDown) completeStage() + else { + [#pull(in0)# + ] + } + } + + override def preStart(): Unit = { + [#pull(in0)# + ] } [#setHandler(in0, new InHandler { @@ -56,17 +65,13 @@ class ZipWith1[[#A1#], O] (zipper: ([#A1#]) ⇒ O) extends GraphStage[FanInShape setHandler(out, new OutHandler { override def onPull(): Unit = { - pending = shape.inlets.size - if (willShutDown) completeStage() - else { - [#pull(in0)# - ] - } + pending += shape.inlets.size + if (pending == ##0) pushAll() } }) } - override def toString = "Zip" + override def toString = "ZipWith1" } # diff --git a/akka-stream/src/main/java/akka/stream/javadsl/AsPublisher.java b/akka-stream/src/main/java/akka/stream/javadsl/AsPublisher.java new file mode 100644 index 0000000000..aaba7caca7 --- /dev/null +++ b/akka-stream/src/main/java/akka/stream/javadsl/AsPublisher.java @@ -0,0 +1,8 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ +package akka.stream.javadsl; + +public enum AsPublisher { + WITH_FANOUT, WITHOUT_FANOUT +} diff --git a/akka-stream/src/main/scala/akka/stream/ActorMaterializer.scala b/akka-stream/src/main/scala/akka/stream/ActorMaterializer.scala index a482f8224d..55d6e95a2f 100644 --- a/akka-stream/src/main/scala/akka/stream/ActorMaterializer.scala +++ b/akka-stream/src/main/scala/akka/stream/ActorMaterializer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream diff --git a/akka-stream/src/main/scala/akka/stream/Attributes.scala b/akka-stream/src/main/scala/akka/stream/Attributes.scala index db6091ca57..f895e731cb 100644 --- a/akka-stream/src/main/scala/akka/stream/Attributes.scala +++ b/akka-stream/src/main/scala/akka/stream/Attributes.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream @@ -143,6 +143,7 @@ final case class Attributes(attributeList: List[Attributes.Attribute] = Nil) { if (i.hasNext) i.next() match { case Name(n) ⇒ + // FIXME this URLEncode is a bug IMO, if that format is important then that is how it should be store in Name val nn = URLEncoder.encode(n, "UTF-8") if (buf ne null) concatNames(i, null, buf.append('-').append(nn)) else if (first ne null) { @@ -171,11 +172,12 @@ object Attributes { final case class Name(n: String) extends Attribute final case class InputBuffer(initial: Int, max: Int) extends Attribute final case class LogLevels(onElement: Logging.LogLevel, onFinish: Logging.LogLevel, onFailure: Logging.LogLevel) extends Attribute + final case object AsyncBoundary extends Attribute + object LogLevels { /** Use to disable logging on certain operations when configuring [[Attributes.LogLevels]] */ final val Off: Logging.LogLevel = Logging.levelFor("off").get } - final case object AsyncBoundary extends Attribute /** * INTERNAL API diff --git a/akka-stream/src/main/scala/akka/stream/Fusing.scala b/akka-stream/src/main/scala/akka/stream/Fusing.scala index fb676b6ba8..c7f67c978a 100644 --- a/akka-stream/src/main/scala/akka/stream/Fusing.scala +++ b/akka-stream/src/main/scala/akka/stream/Fusing.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream diff --git a/akka-stream/src/main/scala/akka/stream/Graph.scala b/akka-stream/src/main/scala/akka/stream/Graph.scala index ca64f92017..e2358df4ad 100644 --- a/akka-stream/src/main/scala/akka/stream/Graph.scala +++ b/akka-stream/src/main/scala/akka/stream/Graph.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream diff --git a/akka-stream/src/main/scala/akka/stream/Materializer.scala b/akka-stream/src/main/scala/akka/stream/Materializer.scala index 056dc4f631..ff1187e580 100644 --- a/akka-stream/src/main/scala/akka/stream/Materializer.scala +++ b/akka-stream/src/main/scala/akka/stream/Materializer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream diff --git a/akka-stream/src/main/scala/akka/stream/OverflowStrategy.scala b/akka-stream/src/main/scala/akka/stream/OverflowStrategy.scala index 25f0fb5c89..5a4acc513b 100644 --- a/akka-stream/src/main/scala/akka/stream/OverflowStrategy.scala +++ b/akka-stream/src/main/scala/akka/stream/OverflowStrategy.scala @@ -1,50 +1,55 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream +import OverflowStrategies._ + /** - * Represents a strategy that decides how to deal with a buffer that is full but is about to receive a new element. + * Represents a strategy that decides how to deal with a buffer of time based stage + * that is full but is about to receive a new element. */ -sealed abstract class OverflowStrategy extends Serializable -sealed trait DelayOverflowStrategy extends Serializable +sealed abstract class DelayOverflowStrategy extends Serializable -private[akka] trait BaseOverflowStrategy { +final case class BufferOverflowException(msg: String) extends RuntimeException(msg) +/** + * Represents a strategy that decides how to deal with a buffer that is full but is + * about to receive a new element. + */ +sealed abstract class OverflowStrategy extends DelayOverflowStrategy +private[akka] object OverflowStrategies { /** * INTERNAL API */ - private[akka] case object DropHead extends OverflowStrategy with DelayOverflowStrategy - + private[akka] case object DropHead extends OverflowStrategy /** * INTERNAL API */ - private[akka] case object DropTail extends OverflowStrategy with DelayOverflowStrategy - + private[akka] case object DropTail extends OverflowStrategy /** * INTERNAL API */ - private[akka] case object DropBuffer extends OverflowStrategy with DelayOverflowStrategy - + private[akka] case object DropBuffer extends OverflowStrategy /** * INTERNAL API */ - private[akka] case object DropNew extends OverflowStrategy with DelayOverflowStrategy - + private[akka] case object DropNew extends OverflowStrategy /** * INTERNAL API */ - private[akka] case object Backpressure extends OverflowStrategy with DelayOverflowStrategy - + private[akka] case object Backpressure extends OverflowStrategy /** * INTERNAL API */ - private[akka] case object Fail extends OverflowStrategy with DelayOverflowStrategy { - final case class BufferOverflowException(msg: String) extends RuntimeException(msg) - } + private[akka] case object Fail extends OverflowStrategy + /** + * INTERNAL API + */ + private[akka] case object EmitEarly extends DelayOverflowStrategy } -object OverflowStrategy extends BaseOverflowStrategy { +object OverflowStrategy { /** * If the buffer is full when a new element arrives, drops the oldest element from the buffer to make space for * the new element. @@ -79,12 +84,7 @@ object OverflowStrategy extends BaseOverflowStrategy { def fail: OverflowStrategy = Fail } -object DelayOverflowStrategy extends BaseOverflowStrategy { - /** - * INTERNAL API - */ - private[akka] case object EmitEarly extends DelayOverflowStrategy - +object DelayOverflowStrategy { /** * If the buffer is full when a new element is available this strategy send next element downstream without waiting */ diff --git a/akka-stream/src/main/scala/akka/stream/Queue.scala b/akka-stream/src/main/scala/akka/stream/Queue.scala deleted file mode 100644 index 919edba4f3..0000000000 --- a/akka-stream/src/main/scala/akka/stream/Queue.scala +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Copyright (C) 2015 Typesafe Inc. - */ -package akka.stream - -import scala.concurrent.Future - -/** - * This trait allows to have the queue as a data source for some stream. - */ -trait SourceQueue[T] { - - /** - * Method offers next element to a stream and returns future that: - * - competes with true if element is consumed by a stream - * - competes with false when stream dropped offered element - * - fails if stream is completed or cancelled. - * - * @param elem element to send to a stream - */ - def offer(elem: T): Future[Boolean] -} - -/** - * Trait allows to have the queue as a sink for some stream. - * "SinkQueue" pulls data from stream with backpressure mechanism. - */ -trait SinkQueue[T] { - - /** - * Method pulls elements from stream and returns future that: - * - fails if stream is failed - * - completes with None in case if stream is completed - * - completes with `Some(element)` in case next element is available from stream. - */ - def pull(): Future[Option[T]] -} diff --git a/akka-stream/src/main/scala/akka/stream/QueueOfferResult.scala b/akka-stream/src/main/scala/akka/stream/QueueOfferResult.scala new file mode 100644 index 0000000000..26addfe1ed --- /dev/null +++ b/akka-stream/src/main/scala/akka/stream/QueueOfferResult.scala @@ -0,0 +1,33 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ +package akka.stream + +sealed abstract class QueueOfferResult + +/** + * Contains types that is used as return types for async callbacks to streams + */ +object QueueOfferResult { + + /** + * Type is used to indicate that stream is successfully enqueued an element + */ + final case object Enqueued extends QueueOfferResult + + /** + * Type is used to indicate that stream is dropped an element + */ + final case object Dropped extends QueueOfferResult + + /** + * Type is used to indicate that stream is failed before or during call to the stream + * @param cause - exception that stream failed with + */ + final case class Failure(cause: Throwable) extends QueueOfferResult + + /** + * Type is used to indicate that stream is completed before call + */ + case object QueueClosed extends QueueOfferResult +} diff --git a/akka-stream/src/main/scala/akka/stream/Shape.scala b/akka-stream/src/main/scala/akka/stream/Shape.scala index f478505a62..43ab6bf33f 100644 --- a/akka-stream/src/main/scala/akka/stream/Shape.scala +++ b/akka-stream/src/main/scala/akka/stream/Shape.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream diff --git a/akka-stream/src/main/scala/akka/stream/StreamLimitReachedException.scala b/akka-stream/src/main/scala/akka/stream/StreamLimitReachedException.scala index abffbb5b05..6eb8930258 100644 --- a/akka-stream/src/main/scala/akka/stream/StreamLimitReachedException.scala +++ b/akka-stream/src/main/scala/akka/stream/StreamLimitReachedException.scala @@ -1,3 +1,6 @@ +/** + * Copyright (C) 2015 Typesafe Inc. + */ package akka.stream class StreamLimitReachedException(val n: Long) extends RuntimeException(s"limit of $n reached") diff --git a/akka-stream/src/main/scala/akka/stream/StreamTcpException.scala b/akka-stream/src/main/scala/akka/stream/StreamTcpException.scala index 6fde9797cf..7a68d0edf2 100644 --- a/akka-stream/src/main/scala/akka/stream/StreamTcpException.scala +++ b/akka-stream/src/main/scala/akka/stream/StreamTcpException.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream diff --git a/akka-stream/src/main/scala/akka/stream/Supervision.scala b/akka-stream/src/main/scala/akka/stream/Supervision.scala index c02a71f946..f2850b0b88 100644 --- a/akka-stream/src/main/scala/akka/stream/Supervision.scala +++ b/akka-stream/src/main/scala/akka/stream/Supervision.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream diff --git a/akka-stream/src/main/scala/akka/stream/ThrottleMode.scala b/akka-stream/src/main/scala/akka/stream/ThrottleMode.scala index f3c65023b7..58300f5ad3 100644 --- a/akka-stream/src/main/scala/akka/stream/ThrottleMode.scala +++ b/akka-stream/src/main/scala/akka/stream/ThrottleMode.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream diff --git a/akka-stream/src/main/scala/akka/stream/Transformer.scala b/akka-stream/src/main/scala/akka/stream/Transformer.scala index ea2a46e887..414ca2059e 100644 --- a/akka-stream/src/main/scala/akka/stream/Transformer.scala +++ b/akka-stream/src/main/scala/akka/stream/Transformer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream diff --git a/akka-stream/src/main/scala/akka/stream/actor/ActorPublisher.scala b/akka-stream/src/main/scala/akka/stream/actor/ActorPublisher.scala index 535628d7f4..dad5520190 100644 --- a/akka-stream/src/main/scala/akka/stream/actor/ActorPublisher.scala +++ b/akka-stream/src/main/scala/akka/stream/actor/ActorPublisher.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.actor diff --git a/akka-stream/src/main/scala/akka/stream/actor/ActorSubscriber.scala b/akka-stream/src/main/scala/akka/stream/actor/ActorSubscriber.scala index 25c66987b3..2013ce03f0 100644 --- a/akka-stream/src/main/scala/akka/stream/actor/ActorSubscriber.scala +++ b/akka-stream/src/main/scala/akka/stream/actor/ActorSubscriber.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.actor diff --git a/akka-stream/src/main/scala/akka/stream/extra/Implicits.scala b/akka-stream/src/main/scala/akka/stream/extra/Implicits.scala index f1accb8562..b42577e84b 100644 --- a/akka-stream/src/main/scala/akka/stream/extra/Implicits.scala +++ b/akka-stream/src/main/scala/akka/stream/extra/Implicits.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.extra diff --git a/akka-stream/src/main/scala/akka/stream/extra/Timed.scala b/akka-stream/src/main/scala/akka/stream/extra/Timed.scala index 58d2a1a806..53da9b85d0 100644 --- a/akka-stream/src/main/scala/akka/stream/extra/Timed.scala +++ b/akka-stream/src/main/scala/akka/stream/extra/Timed.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.extra diff --git a/akka-stream/src/main/scala/akka/stream/impl/AcknowledgePublisher.scala b/akka-stream/src/main/scala/akka/stream/impl/AcknowledgePublisher.scala deleted file mode 100644 index 1f7ed0e1e1..0000000000 --- a/akka-stream/src/main/scala/akka/stream/impl/AcknowledgePublisher.scala +++ /dev/null @@ -1,92 +0,0 @@ -/** - * Copyright (C) 2015 Typesafe Inc. - */ -package akka.stream.impl - -import akka.actor.{ ActorRef, Props } -import akka.stream.OverflowStrategy -import akka.stream.OverflowStrategy._ -import akka.stream.actor.ActorPublisherMessage.Request -import akka.stream.impl.AcknowledgePublisher.{ Rejected, Ok } - -/** - * INTERNAL API - */ -private[akka] object AcknowledgePublisher { - def props(bufferSize: Int, overflowStrategy: OverflowStrategy) = - Props(new AcknowledgePublisher(bufferSize, overflowStrategy)) - - case class Ok() - case class Rejected() -} - -/** - * INTERNAL API - */ -private[akka] class AcknowledgePublisher(bufferSize: Int, overflowStrategy: OverflowStrategy) - extends ActorRefSourceActor(bufferSize, overflowStrategy) { - - var backpressedElem: Option[ActorRef] = None - - override def requestElem: Receive = { - case _: Request ⇒ - // totalDemand is tracked by super - if (bufferSize != 0) - while (totalDemand > 0L && !buffer.isEmpty) { - //if buffer is full - sent ack message to sender in case of Backpressure mode - if (buffer.isFull) backpressedElem match { - case Some(ref) ⇒ - ref ! Ok(); backpressedElem = None - case None ⇒ //do nothing - } - onNext(buffer.dequeue()) - } - } - - override def receiveElem: Receive = { - case elem if isActive ⇒ - if (totalDemand > 0L) { - onNext(elem) - sendAck(true) - } else if (bufferSize == 0) { - log.debug("Dropping element because there is no downstream demand: [{}]", elem) - sendAck(false) - } else if (!buffer.isFull) - enqueueAndSendAck(elem) - else (overflowStrategy: @unchecked) match { - case DropHead ⇒ - log.debug("Dropping the head element because buffer is full and overflowStrategy is: [DropHead]") - buffer.dropHead() - enqueueAndSendAck(elem) - case DropTail ⇒ - log.debug("Dropping the tail element because buffer is full and overflowStrategy is: [DropTail]") - buffer.dropTail() - enqueueAndSendAck(elem) - case DropBuffer ⇒ - log.debug("Dropping all the buffered elements because buffer is full and overflowStrategy is: [DropBuffer]") - buffer.clear() - enqueueAndSendAck(elem) - case DropNew ⇒ - log.debug("Dropping the new element because buffer is full and overflowStrategy is: [DropNew]") - sendAck(false) - case Fail ⇒ - log.error("Failing because buffer is full and overflowStrategy is: [Fail]") - onErrorThenStop(new Fail.BufferOverflowException(s"Buffer overflow (max capacity was: $bufferSize)!")) - case Backpressure ⇒ - log.debug("Backpressuring because buffer is full and overflowStrategy is: [Backpressure]") - sendAck(false) //does not allow to send more than buffer size - } - } - - def enqueueAndSendAck(elem: Any): Unit = { - buffer.enqueue(elem) - if (buffer.isFull && overflowStrategy == Backpressure) backpressedElem = Some(sender) - else sendAck(true) - } - - def sendAck(isOk: Boolean): Unit = { - val msg = if (isOk) Ok() else Rejected() - context.sender() ! msg - } - -} diff --git a/akka-stream/src/main/scala/akka/stream/impl/ActorMaterializerImpl.scala b/akka-stream/src/main/scala/akka/stream/impl/ActorMaterializerImpl.scala index 4b5dbb9848..db581e1860 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/ActorMaterializerImpl.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/ActorMaterializerImpl.scala @@ -1,10 +1,11 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.impl import java.util.concurrent.atomic.{ AtomicBoolean, AtomicLong } import java.{ util ⇒ ju } +import akka.NotUsed import akka.actor._ import akka.event.Logging import akka.dispatch.Dispatchers @@ -134,7 +135,7 @@ private[akka] case class ActorMaterializerImpl(system: ActorSystem, assignPort(tls.plainIn, FanIn.SubInput[Any](impl, SslTlsCipherActor.UserIn)) assignPort(tls.cipherIn, FanIn.SubInput[Any](impl, SslTlsCipherActor.TransportIn)) - matVal.put(atomic, ()) + matVal.put(atomic, NotUsed) case graph: GraphModule ⇒ matGraph(graph, effectiveAttributes, matVal) diff --git a/akka-stream/src/main/scala/akka/stream/impl/ActorProcessor.scala b/akka-stream/src/main/scala/akka/stream/impl/ActorProcessor.scala index 329e73c0d6..dd4fb3ccf6 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/ActorProcessor.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/ActorProcessor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream/src/main/scala/akka/stream/impl/ActorPublisher.scala b/akka-stream/src/main/scala/akka/stream/impl/ActorPublisher.scala index 15104af243..fd181e4b35 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/ActorPublisher.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/ActorPublisher.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream/src/main/scala/akka/stream/impl/ActorRefBackpressureSinkStage.scala b/akka-stream/src/main/scala/akka/stream/impl/ActorRefBackpressureSinkStage.scala index 8e7aa10f32..92b74da6a0 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/ActorRefBackpressureSinkStage.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/ActorRefBackpressureSinkStage.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl @@ -7,7 +7,7 @@ import java.util import akka.actor._ import akka.dispatch.sysmsg.{ DeathWatchNotification, SystemMessage, Watch } -import akka.stream.stage.GraphStageLogic.StageActorRef +import akka.stream.stage.GraphStageLogic.StageActor import akka.stream.{ Inlet, SinkShape, ActorMaterializer, Attributes } import akka.stream.Attributes.InputBuffer import akka.stream.stage._ @@ -28,7 +28,7 @@ private[akka] class ActorRefBackpressureSinkStage[In](ref: ActorRef, onInitMessa override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { - implicit var self: StageActorRef = _ + implicit def self: ActorRef = stageActor.ref val buffer: util.Deque[In] = new util.ArrayDeque[In]() var acknowledgementReceived = false @@ -46,8 +46,7 @@ private[akka] class ActorRefBackpressureSinkStage[In](ref: ActorRef, onInitMessa override def preStart() = { setKeepGoing(true) - self = getStageActorRef(receive) - self.watch(ref) + getStageActor(receive).watch(ref) ref ! onInitMessage pull(in) } diff --git a/akka-stream/src/main/scala/akka/stream/impl/ActorRefSinkActor.scala b/akka-stream/src/main/scala/akka/stream/impl/ActorRefSinkActor.scala index faaec3722c..70a70478f2 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/ActorRefSinkActor.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/ActorRefSinkActor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream/src/main/scala/akka/stream/impl/ActorRefSourceActor.scala b/akka-stream/src/main/scala/akka/stream/impl/ActorRefSourceActor.scala index 1e9c35ed4e..c64a6e8c09 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/ActorRefSourceActor.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/ActorRefSourceActor.scala @@ -1,19 +1,20 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl import akka.actor.ActorLogging import akka.actor.Props import akka.actor.Status -import akka.stream.OverflowStrategy +import akka.stream.OverflowStrategies._ +import akka.stream.{ BufferOverflowException, OverflowStrategy, OverflowStrategies } /** * INTERNAL API */ private[akka] object ActorRefSourceActor { def props(bufferSize: Int, overflowStrategy: OverflowStrategy) = { - require(overflowStrategy != OverflowStrategy.Backpressure, "Backpressure overflowStrategy not supported") + require(overflowStrategy != OverflowStrategies.Backpressure, "Backpressure overflowStrategy not supported") Props(new ActorRefSourceActor(bufferSize, overflowStrategy)) } } @@ -58,7 +59,7 @@ private[akka] class ActorRefSourceActor(bufferSize: Int, overflowStrategy: Overf log.debug("Dropping element because there is no downstream demand: [{}]", elem) else if (!buffer.isFull) buffer.enqueue(elem) - else (overflowStrategy: @unchecked) match { + else overflowStrategy match { case DropHead ⇒ log.debug("Dropping the head element because buffer is full and overflowStrategy is: [DropHead]") buffer.dropHead() @@ -76,7 +77,7 @@ private[akka] class ActorRefSourceActor(bufferSize: Int, overflowStrategy: Overf log.debug("Dropping the new element because buffer is full and overflowStrategy is: [DropNew]") case Fail ⇒ log.error("Failing because buffer is full and overflowStrategy is: [Fail]") - onErrorThenStop(new Fail.BufferOverflowException(s"Buffer overflow (max capacity was: $bufferSize)!")) + onErrorThenStop(new BufferOverflowException(s"Buffer overflow (max capacity was: $bufferSize)!")) case Backpressure ⇒ // there is a precondition check in Source.actorRefSource factory method log.debug("Backpressuring because buffer is full and overflowStrategy is: [Backpressure]") diff --git a/akka-stream/src/main/scala/akka/stream/impl/BoundedBuffer.scala b/akka-stream/src/main/scala/akka/stream/impl/BoundedBuffer.scala index ab50babe0b..b0216f09c6 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/BoundedBuffer.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/BoundedBuffer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream/src/main/scala/akka/stream/impl/CompletedPublishers.scala b/akka-stream/src/main/scala/akka/stream/impl/CompletedPublishers.scala index 411c5ec6ec..8a4de0b7c3 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/CompletedPublishers.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/CompletedPublishers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream/src/main/scala/akka/stream/impl/ConstantFun.scala b/akka-stream/src/main/scala/akka/stream/impl/ConstantFun.scala index c06ca93c87..cd152ba336 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/ConstantFun.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/ConstantFun.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl @@ -20,4 +20,8 @@ private[akka] object ConstantFun { def javaIdentityFunction[T]: JFun[T, T] = JavaIdentityFunction.asInstanceOf[JFun[T, T]] def scalaIdentityFunction[T]: T ⇒ T = conforms + + val zeroLong = (_: Any) ⇒ 0L + + val oneLong = (_: Any) ⇒ 1L } diff --git a/akka-stream/src/main/scala/akka/stream/impl/Emit.scala b/akka-stream/src/main/scala/akka/stream/impl/Emit.scala deleted file mode 100644 index f757d6910e..0000000000 --- a/akka-stream/src/main/scala/akka/stream/impl/Emit.scala +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Copyright (C) 2014 Typesafe Inc. - */ -package akka.stream.impl - -import scala.collection.immutable - -/** - * INTERNAL API - */ -private[akka] trait Emit { this: ActorProcessorImpl with Pump ⇒ - - // TODO performance improvement: mutable buffer? - var emits = immutable.Seq.empty[Any] - - // Save previous phase we should return to in a var to avoid allocation - private var phaseAfterFlush: TransferPhase = _ - - // Enters flushing phase if there are emits pending - def emitAndThen(andThen: TransferPhase): Unit = - if (emits.nonEmpty) { - phaseAfterFlush = andThen - nextPhase(emitting) - } else nextPhase(andThen) - - // Emits all pending elements, then returns to savedPhase - private val emitting = TransferPhase(primaryOutputs.NeedsDemand) { () ⇒ - primaryOutputs.enqueueOutputElement(emits.head) - emits = emits.tail - if (emits.isEmpty) nextPhase(phaseAfterFlush) - } - -} diff --git a/akka-stream/src/main/scala/akka/stream/impl/ExposedPublisherReceive.scala b/akka-stream/src/main/scala/akka/stream/impl/ExposedPublisherReceive.scala index 134e37300a..f38612eb6f 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/ExposedPublisherReceive.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/ExposedPublisherReceive.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream/src/main/scala/akka/stream/impl/FanIn.scala b/akka-stream/src/main/scala/akka/stream/impl/FanIn.scala index 974e657ff9..895a29667d 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/FanIn.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/FanIn.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream/src/main/scala/akka/stream/impl/FanOut.scala b/akka-stream/src/main/scala/akka/stream/impl/FanOut.scala index 03f4eb6ec1..8d985f8cda 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/FanOut.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/FanOut.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream/src/main/scala/akka/stream/impl/FixedSizeBuffer.scala b/akka-stream/src/main/scala/akka/stream/impl/FixedSizeBuffer.scala index 8ac3dcbe7c..ae53622daa 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/FixedSizeBuffer.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/FixedSizeBuffer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream/src/main/scala/akka/stream/impl/FlowModule.scala b/akka-stream/src/main/scala/akka/stream/impl/FlowModule.scala index 673c6dd674..72238ed791 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/FlowModule.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/FlowModule.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream/src/main/scala/akka/stream/impl/GroupByProcessorImpl.scala b/akka-stream/src/main/scala/akka/stream/impl/GroupByProcessorImpl.scala index 33f89c898c..62384e8d32 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/GroupByProcessorImpl.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/GroupByProcessorImpl.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.impl @@ -72,7 +72,7 @@ private[akka] class GroupByProcessorImpl(settings: ActorMaterializerSettings, va if (keyToSubstreamOutput.size == maxSubstreams) throw new IllegalStateException(s"cannot open substream for key '$key': too many substreams open") val substreamOutput = createSubstreamOutput() - val substreamFlow = Source.fromPublisher(substreamOutput) + val substreamFlow = Source.fromPublisher[Any](substreamOutput) primaryOutputs.enqueueOutputElement(substreamFlow) keyToSubstreamOutput(key) = substreamOutput nextPhase(dispatchToSubstream(elem, substreamOutput)) diff --git a/akka-stream/src/main/scala/akka/stream/impl/Messages.scala b/akka-stream/src/main/scala/akka/stream/impl/Messages.scala index 02b09ddd20..a42d0ae5f9 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/Messages.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/Messages.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream/src/main/scala/akka/stream/impl/Modules.scala b/akka-stream/src/main/scala/akka/stream/impl/Modules.scala index 2180eb0523..e6db823860 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/Modules.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/Modules.scala @@ -1,20 +1,16 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl -import java.util.concurrent.atomic.AtomicInteger - +import akka.NotUsed import akka.actor._ import akka.stream._ -import akka.stream.impl.AcknowledgePublisher.{ Ok, Rejected } import akka.stream.impl.StreamLayout.Module -import akka.util.Timeout import org.reactivestreams._ import scala.annotation.unchecked.uncheckedVariance -import scala.concurrent.duration.{ FiniteDuration, _ } -import scala.concurrent.{ Future, Promise } +import scala.concurrent.Promise import scala.language.postfixOps /** @@ -67,10 +63,10 @@ private[akka] final class SubscriberSource[Out](val attributes: Attributes, shap * that mediate the flow of elements downstream and the propagation of * back-pressure upstream. */ -private[akka] final class PublisherSource[Out](p: Publisher[Out], val attributes: Attributes, shape: SourceShape[Out]) extends SourceModule[Out, Unit](shape) { - override def create(context: MaterializationContext) = (p, ()) +private[akka] final class PublisherSource[Out](p: Publisher[Out], val attributes: Attributes, shape: SourceShape[Out]) extends SourceModule[Out, NotUsed](shape) { + override def create(context: MaterializationContext) = (p, NotUsed) - override protected def newInstance(shape: SourceShape[Out]): SourceModule[Out, Unit] = new PublisherSource[Out](p, attributes, shape) + override protected def newInstance(shape: SourceShape[Out]): SourceModule[Out, NotUsed] = new PublisherSource[Out](p, attributes, shape) override def withAttributes(attr: Attributes): Module = new PublisherSource[Out](p, attr, amendShape(attr)) } @@ -121,32 +117,3 @@ private[akka] final class ActorRefSource[Out]( override def withAttributes(attr: Attributes): Module = new ActorRefSource(bufferSize, overflowStrategy, attr, amendShape(attr)) } - -/** - * INTERNAL API - */ -private[akka] final class AcknowledgeSource[Out](bufferSize: Int, overflowStrategy: OverflowStrategy, - val attributes: Attributes, shape: SourceShape[Out], - timeout: FiniteDuration = 5 seconds) - extends SourceModule[Out, SourceQueue[Out]](shape) { - - override def create(context: MaterializationContext) = { - import akka.pattern.ask - val ref = ActorMaterializer.downcast(context.materializer).actorOf(context, - AcknowledgePublisher.props(bufferSize, overflowStrategy)) - implicit val t = Timeout(timeout) - - (akka.stream.actor.ActorPublisher[Out](ref), new SourceQueue[Out] { - implicit val ctx = context.materializer.executionContext - override def offer(out: Out): Future[Boolean] = (ref ? out).map { - case Ok() ⇒ true - case Rejected() ⇒ false - } - }) - } - - override protected def newInstance(shape: SourceShape[Out]): SourceModule[Out, SourceQueue[Out]] = - new AcknowledgeSource[Out](bufferSize, overflowStrategy, attributes, shape, timeout) - override def withAttributes(attr: Attributes): Module = - new AcknowledgeSource(bufferSize, overflowStrategy, attr, amendShape(attr), timeout) -} diff --git a/akka-stream/src/main/scala/akka/stream/impl/ReactiveStreamsCompliance.scala b/akka-stream/src/main/scala/akka/stream/impl/ReactiveStreamsCompliance.scala index f7464f50b0..2aa3ada98e 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/ReactiveStreamsCompliance.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/ReactiveStreamsCompliance.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream/src/main/scala/akka/stream/impl/ResizableMultiReaderRingBuffer.scala b/akka-stream/src/main/scala/akka/stream/impl/ResizableMultiReaderRingBuffer.scala index 4f4c86c481..251677c78b 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/ResizableMultiReaderRingBuffer.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/ResizableMultiReaderRingBuffer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream/src/main/scala/akka/stream/impl/SeqActorName.scala b/akka-stream/src/main/scala/akka/stream/impl/SeqActorName.scala index b088e0b95b..8790e0bca5 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/SeqActorName.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/SeqActorName.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream/src/main/scala/akka/stream/impl/SinkholeSubscriber.scala b/akka-stream/src/main/scala/akka/stream/impl/SinkholeSubscriber.scala index 563af923b0..fdd9e97f22 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/SinkholeSubscriber.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/SinkholeSubscriber.scala @@ -1,9 +1,11 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.impl import java.util.concurrent.atomic.AtomicReference +import akka.Done + import scala.concurrent.Promise import org.reactivestreams.{ Subscriber, Subscription } @@ -11,7 +13,7 @@ import org.reactivestreams.{ Subscriber, Subscription } * INTERNAL API */ -private[akka] final class SinkholeSubscriber[T](whenComplete: Promise[Unit]) extends Subscriber[T] { +private[akka] final class SinkholeSubscriber[T](whenComplete: Promise[Done]) extends Subscriber[T] { private[this] var running: Boolean = false override def onSubscribe(sub: Subscription): Unit = { @@ -28,7 +30,7 @@ private[akka] final class SinkholeSubscriber[T](whenComplete: Promise[Unit]) ext whenComplete.tryFailure(cause) } - override def onComplete(): Unit = whenComplete.trySuccess(()) + override def onComplete(): Unit = whenComplete.trySuccess(Done) override def onNext(element: T): Unit = ReactiveStreamsCompliance.requireNonNullElement(element) } diff --git a/akka-stream/src/main/scala/akka/stream/impl/Sinks.scala b/akka-stream/src/main/scala/akka/stream/impl/Sinks.scala index 2bc7da30ed..1d3553f374 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/Sinks.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/Sinks.scala @@ -1,21 +1,25 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.impl import java.util.concurrent.atomic.AtomicReference - +import akka.{ Done, NotUsed } import akka.actor.{ ActorRef, Props } import akka.stream.Attributes.InputBuffer import akka.stream._ import akka.stream.impl.StreamLayout.Module -import akka.stream.stage.{ AsyncCallback, GraphStageLogic, GraphStageWithMaterializedValue, InHandler } +import akka.stream.stage._ import org.reactivestreams.{ Publisher, Subscriber } - import scala.annotation.unchecked.uncheckedVariance import scala.concurrent.{ Future, Promise } import scala.language.postfixOps import scala.util.{ Failure, Success, Try } +import akka.stream.scaladsl.SinkQueue +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters._ +import scala.compat.java8.OptionConverters._ +import java.util.Optional /** * INTERNAL API @@ -93,15 +97,15 @@ private[akka] final class FanoutPublisherSink[In]( * Attaches a subscriber to this stream which will just discard all received * elements. */ -private[akka] final class SinkholeSink(val attributes: Attributes, shape: SinkShape[Any]) extends SinkModule[Any, Future[Unit]](shape) { +private[akka] final class SinkholeSink(val attributes: Attributes, shape: SinkShape[Any]) extends SinkModule[Any, Future[Done]](shape) { override def create(context: MaterializationContext) = { val effectiveSettings = ActorMaterializer.downcast(context.materializer).effectiveSettings(context.effectiveAttributes) - val p = Promise[Unit]() + val p = Promise[Done]() (new SinkholeSubscriber[Any](p), p.future) } - override protected def newInstance(shape: SinkShape[Any]): SinkModule[Any, Future[Unit]] = new SinkholeSink(attributes, shape) + override protected def newInstance(shape: SinkShape[Any]): SinkModule[Any, Future[Done]] = new SinkholeSink(attributes, shape) override def withAttributes(attr: Attributes): Module = new SinkholeSink(attr, amendShape(attr)) override def toString: String = "SinkholeSink" } @@ -110,11 +114,11 @@ private[akka] final class SinkholeSink(val attributes: Attributes, shape: SinkSh * INTERNAL API * Attaches a subscriber to this stream. */ -private[akka] final class SubscriberSink[In](subscriber: Subscriber[In], val attributes: Attributes, shape: SinkShape[In]) extends SinkModule[In, Unit](shape) { +private[akka] final class SubscriberSink[In](subscriber: Subscriber[In], val attributes: Attributes, shape: SinkShape[In]) extends SinkModule[In, NotUsed](shape) { - override def create(context: MaterializationContext) = (subscriber, ()) + override def create(context: MaterializationContext) = (subscriber, NotUsed) - override protected def newInstance(shape: SinkShape[In]): SinkModule[In, Unit] = new SubscriberSink[In](subscriber, attributes, shape) + override protected def newInstance(shape: SinkShape[In]): SinkModule[In, NotUsed] = new SubscriberSink[In](subscriber, attributes, shape) override def withAttributes(attr: Attributes): Module = new SubscriberSink[In](subscriber, attr, amendShape(attr)) override def toString: String = "SubscriberSink" } @@ -123,9 +127,9 @@ private[akka] final class SubscriberSink[In](subscriber: Subscriber[In], val att * INTERNAL API * A sink that immediately cancels its upstream upon materialization. */ -private[akka] final class CancelSink(val attributes: Attributes, shape: SinkShape[Any]) extends SinkModule[Any, Unit](shape) { - override def create(context: MaterializationContext): (Subscriber[Any], Unit) = (new CancellingSubscriber[Any], ()) - override protected def newInstance(shape: SinkShape[Any]): SinkModule[Any, Unit] = new CancelSink(attributes, shape) +private[akka] final class CancelSink(val attributes: Attributes, shape: SinkShape[Any]) extends SinkModule[Any, NotUsed](shape) { + override def create(context: MaterializationContext): (Subscriber[Any], NotUsed) = (new CancellingSubscriber[Any], NotUsed) + override protected def newInstance(shape: SinkShape[Any]): SinkModule[Any, NotUsed] = new CancelSink(attributes, shape) override def withAttributes(attr: Attributes): Module = new CancelSink(attr, amendShape(attr)) override def toString: String = "CancelSink" } @@ -152,17 +156,17 @@ private[akka] final class ActorSubscriberSink[In](props: Props, val attributes: */ private[akka] final class ActorRefSink[In](ref: ActorRef, onCompleteMessage: Any, val attributes: Attributes, - shape: SinkShape[In]) extends SinkModule[In, Unit](shape) { + shape: SinkShape[In]) extends SinkModule[In, NotUsed](shape) { override def create(context: MaterializationContext) = { val actorMaterializer = ActorMaterializer.downcast(context.materializer) val effectiveSettings = actorMaterializer.effectiveSettings(context.effectiveAttributes) val subscriberRef = actorMaterializer.actorOf(context, ActorRefSinkActor.props(ref, effectiveSettings.maxInputBufferSize, onCompleteMessage)) - (akka.stream.actor.ActorSubscriber[In](subscriberRef), ()) + (akka.stream.actor.ActorSubscriber[In](subscriberRef), NotUsed) } - override protected def newInstance(shape: SinkShape[In]): SinkModule[In, Unit] = + override protected def newInstance(shape: SinkShape[In]): SinkModule[In, NotUsed] = new ActorRefSink[In](ref, onCompleteMessage, attributes, shape) override def withAttributes(attr: Attributes): Module = new ActorRefSink[In](ref, onCompleteMessage, attr, amendShape(attr)) @@ -242,11 +246,7 @@ private[akka] final class HeadOptionStage[T] extends GraphStageWithMaterializedV * INTERNAL API */ private[akka] class QueueSink[T]() extends GraphStageWithMaterializedValue[SinkShape[T], SinkQueue[T]] { - trait RequestElementCallback[E] { - val requestElement = new AtomicReference[AnyRef](Nil) - } - - type Requested[E] = Promise[Option[T]] + type Requested[E] = Promise[Option[E]] val in = Inlet[T]("queueSink.in") override val shape: SinkShape[T] = SinkShape.of(in) @@ -260,15 +260,17 @@ private[akka] class QueueSink[T]() extends GraphStageWithMaterializedValue[SinkS val buffer = FixedSizeBuffer[Received[T]](maxBuffer + 1) var currentRequest: Option[Requested[T]] = None - val stageLogic = new GraphStageLogic(shape) with RequestElementCallback[Requested[T]] { + val stageLogic = new GraphStageLogic(shape) with CallbackWrapper[Requested[T]] { override def preStart(): Unit = { setKeepGoing(true) - val list = requestElement.getAndSet(callback.invoke _).asInstanceOf[List[Requested[T]]] - list.reverse.foreach(callback.invoke) + initCallback(callback.invoke) pull(in) } + override def postStop(): Unit = stopCallback(promise ⇒ + promise.failure(new IllegalStateException("Stream is terminated. QueueSink is detached"))) + private val callback: AsyncCallback[Requested[T]] = getAsyncCallback(promise ⇒ currentRequest match { case Some(_) ⇒ @@ -310,19 +312,15 @@ private[akka] class QueueSink[T]() extends GraphStageWithMaterializedValue[SinkS (stageLogic, new SinkQueue[T] { override def pull(): Future[Option[T]] = { - val ref = stageLogic.requestElement val p = Promise[Option[T]] - ref.get() match { - case l: List[_] ⇒ - if (!ref.compareAndSet(l, p :: l)) - ref.get() match { - case _: List[_] ⇒ throw new IllegalStateException("Concurrent call of SinkQueue.pull() is detected") - case f: Function1[_, _] ⇒ f.asInstanceOf[Requested[T] ⇒ Unit](p) - } - case f: Function1[_, _] ⇒ f.asInstanceOf[Requested[T] ⇒ Unit](p) - } + stageLogic.invoke(p) p.future } }) } } + +private[akka] final class SinkQueueAdapter[T](delegate: SinkQueue[T]) extends akka.stream.javadsl.SinkQueue[T] { + import akka.dispatch.ExecutionContexts.{ sameThreadExecutionContext ⇒ same } + def pull(): CompletionStage[Optional[T]] = delegate.pull().map(_.asJava)(same).toJava +} diff --git a/akka-stream/src/main/scala/akka/stream/impl/Sources.scala b/akka-stream/src/main/scala/akka/stream/impl/Sources.scala new file mode 100644 index 0000000000..fd3d4ae0e1 --- /dev/null +++ b/akka-stream/src/main/scala/akka/stream/impl/Sources.scala @@ -0,0 +1,135 @@ +/** + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package akka.stream.impl + +import akka.stream.OverflowStrategies._ +import akka.stream._ +import akka.stream.stage._ +import scala.concurrent.{ Future, Promise } +import akka.stream.scaladsl.SourceQueue +import akka.Done +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters._ + +/** + * INTERNAL API + */ +private[akka] class QueueSource[T](maxBuffer: Int, overflowStrategy: OverflowStrategy) extends GraphStageWithMaterializedValue[SourceShape[T], SourceQueue[T]] { + type Offered = Promise[QueueOfferResult] + + val out = Outlet[T]("queueSource.out") + override val shape: SourceShape[T] = SourceShape.of(out) + val completion = Promise[Done] + + override def createLogicAndMaterializedValue(inheritedAttributes: Attributes) = { + val stageLogic = new GraphStageLogic(shape) with CallbackWrapper[(T, Offered)] { + val buffer = if (maxBuffer == 0) null else FixedSizeBuffer[T](maxBuffer) + var pendingOffer: Option[(T, Offered)] = None + var pulled = false + + override def preStart(): Unit = initCallback(callback.invoke) + override def postStop(): Unit = stopCallback { + case (elem, promise) ⇒ promise.failure(new IllegalStateException("Stream is terminated. SourceQueue is detached")) + } + + private def enqueueAndSuccess(elem: T, promise: Offered): Unit = { + buffer.enqueue(elem) + promise.success(QueueOfferResult.Enqueued) + } + + private def bufferElem(elem: T, promise: Offered): Unit = { + if (!buffer.isFull) { + enqueueAndSuccess(elem, promise) + } else overflowStrategy match { + case DropHead ⇒ + buffer.dropHead() + enqueueAndSuccess(elem, promise) + case DropTail ⇒ + buffer.dropTail() + enqueueAndSuccess(elem, promise) + case DropBuffer ⇒ + buffer.clear() + enqueueAndSuccess(elem, promise) + case DropNew ⇒ + promise.success(QueueOfferResult.Dropped) + case Fail ⇒ + val bufferOverflowException = new BufferOverflowException(s"Buffer overflow (max capacity was: $maxBuffer)!") + promise.success(QueueOfferResult.Failure(bufferOverflowException)) + completion.failure(bufferOverflowException) + failStage(bufferOverflowException) + case Backpressure ⇒ + pendingOffer match { + case Some(_) ⇒ + promise.failure(new IllegalStateException("You have to wait for previous offer to be resolved to send another request")) + case None ⇒ + pendingOffer = Some((elem, promise)) + } + } + } + + private val callback: AsyncCallback[(T, Offered)] = getAsyncCallback(tuple ⇒ { + val (elem, promise) = tuple + + if (maxBuffer != 0) { + bufferElem(elem, promise) + if (pulled) { + push(out, buffer.dequeue()) + pulled = false + } + } else if (pulled) { + push(out, elem) + pulled = false + promise.success(QueueOfferResult.Enqueued) + } else pendingOffer = Some(tuple) + }) + + setHandler(out, new OutHandler { + override def onDownstreamFinish(): Unit = { + pendingOffer match { + case Some((elem, promise)) ⇒ + promise.success(QueueOfferResult.QueueClosed) + pendingOffer = None + case None ⇒ // do nothing + } + completion.success(Done) + completeStage() + } + + override def onPull(): Unit = { + if (maxBuffer == 0) + pendingOffer match { + case Some((elem, promise)) ⇒ + push(out, elem) + promise.success(QueueOfferResult.Enqueued) + pendingOffer = None + case None ⇒ pulled = true + } + else if (!buffer.isEmpty) { + push(out, buffer.dequeue()) + pendingOffer match { + case Some((elem, promise)) ⇒ + enqueueAndSuccess(elem, promise) + pendingOffer = None + case None ⇒ //do nothing + } + } else pulled = true + } + }) + } + + (stageLogic, new SourceQueue[T] { + override def watchCompletion() = completion.future + override def offer(element: T): Future[QueueOfferResult] = { + val p = Promise[QueueOfferResult]() + stageLogic.invoke((element, p)) + p.future + } + }) + } +} + +private[akka] final class SourceQueueAdapter[T](delegate: SourceQueue[T]) extends akka.stream.javadsl.SourceQueue[T] { + def offer(elem: T): CompletionStage[QueueOfferResult] = delegate.offer(elem).toJava + def watchCompletion(): CompletionStage[Done] = delegate.watchCompletion().toJava +} diff --git a/akka-stream/src/main/scala/akka/stream/impl/Stages.scala b/akka-stream/src/main/scala/akka/stream/impl/Stages.scala index 088ed69981..f9881314de 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/Stages.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/Stages.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl @@ -25,14 +25,17 @@ private[stream] object Stages { val IODispatcher = ActorAttributes.Dispatcher("akka.stream.default-blocking-io-dispatcher") val fused = name("fused") + val materializedValueSource = name("matValueSource") val map = name("map") val log = name("log") val filter = name("filter") + val filterNot = name("filterNot") val collect = name("collect") val recover = name("recover") val mapAsync = name("mapAsync") val mapAsyncUnordered = name("mapAsyncUnordered") val grouped = name("grouped") + val groupedWithin = name("groupedWithin") val limit = name("limit") val limitWeighted = name("limitWeighted") val sliding = name("sliding") @@ -42,11 +45,15 @@ private[stream] object Stages { val dropWhile = name("dropWhile") val scan = name("scan") val fold = name("fold") + val reduce = name("reduce") val intersperse = name("intersperse") val buffer = name("buffer") val conflate = name("conflate") + val batch = name("batch") + val batchWeighted = name("batchWeighted") val expand = name("expand") val mapConcat = name("mapConcat") + val detacher = name("detacher") val groupBy = name("groupBy") val prefixAndTail = name("prefixAndTail") val split = name("split") @@ -57,6 +64,7 @@ private[stream] object Stages { val merge = name("merge") val mergePreferred = name("mergePreferred") + val flattenMerge = name("flattenMerge") val broadcast = name("broadcast") val balance = name("balance") val zip = name("zip") @@ -65,7 +73,9 @@ private[stream] object Stages { val repeat = name("repeat") val unfold = name("unfold") val unfoldAsync = name("unfoldAsync") - val unfoldInf = name("unfoldInf") + val delay = name("delay") and inputBuffer(16, 16) + + val terminationWatcher = name("terminationWatcher") val publisherSource = name("publisherSource") val iterableSource = name("iterableSource") @@ -80,7 +90,7 @@ private[stream] object Stages { val subscriberSource = name("subscriberSource") val actorPublisherSource = name("actorPublisherSource") val actorRefSource = name("actorRefSource") - val acknowledgeSource = name("acknowledgeSource") + val queueSource = name("queueSource") val inputStreamSource = name("inputStreamSource") and IODispatcher val outputStreamSource = name("outputStreamSource") and IODispatcher val fileSource = name("fileSource") and IODispatcher @@ -199,10 +209,6 @@ private[stream] object Stages { override def create(attr: Attributes): Stage[In, Out] = fusing.Conflate(seed, aggregate, supervision(attr)) } - final case class Expand[In, Out, Seed](seed: In ⇒ Seed, extrapolate: Seed ⇒ (Out, Seed), attributes: Attributes = expand) extends SymbolicStage[In, Out] { - override def create(attr: Attributes): Stage[In, Out] = fusing.Expand(seed, extrapolate) - } - final case class MapConcat[In, Out](f: In ⇒ immutable.Iterable[Out], attributes: Attributes = mapConcat) extends SymbolicStage[In, Out] { override def create(attr: Attributes): Stage[In, Out] = fusing.MapConcat(f, supervision(attr)) } diff --git a/akka-stream/src/main/scala/akka/stream/impl/StreamLayout.scala b/akka-stream/src/main/scala/akka/stream/impl/StreamLayout.scala index b3766d87ec..3c52cd29ee 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/StreamLayout.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/StreamLayout.scala @@ -1,10 +1,11 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl import java.util.concurrent.atomic.{ AtomicInteger, AtomicReference } import java.{ util ⇒ ju } +import akka.NotUsed import akka.stream.impl.MaterializerSession.MaterializationPanic import akka.stream.impl.StreamLayout.Module import akka.stream.impl.fusing.GraphStages.MaterializedValueSource @@ -676,7 +677,7 @@ private[stream] abstract class MaterializerSession(val topLevel: StreamLayout.Mo case Atomic(m) ⇒ matVal.get(m) case Combine(f, d1, d2) ⇒ f(resolveMaterialized(d1, matVal, indent + " "), resolveMaterialized(d2, matVal, indent + " ")) case Transform(f, d) ⇒ f(resolveMaterialized(d, matVal, indent + " ")) - case Ignore ⇒ () + case Ignore ⇒ NotUsed } if (MaterializerSession.Debug) println(indent + s"result = $ret") matValSrc.remove(matNode) match { diff --git a/akka-stream/src/main/scala/akka/stream/impl/StreamOfStreamProcessors.scala b/akka-stream/src/main/scala/akka/stream/impl/StreamOfStreamProcessors.scala index c659035bd3..f227447d47 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/StreamOfStreamProcessors.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/StreamOfStreamProcessors.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.impl @@ -224,204 +224,3 @@ private[akka] abstract class MultiStreamOutputProcessor(_settings: ActorMaterial override def activeReceive: Receive = primaryInputs.subreceive orElse primaryOutputs.subreceive orElse outputSubstreamManagement } -/** - * INTERNAL API - */ -private[akka] object TwoStreamInputProcessor { - class OtherActorSubscriber[T](val impl: ActorRef) extends Subscriber[T] { - override def onError(cause: Throwable): Unit = { - ReactiveStreamsCompliance.requireNonNullException(cause) - impl ! OtherStreamOnError(cause) - } - override def onComplete(): Unit = impl ! OtherStreamOnComplete - override def onNext(element: T): Unit = { - ReactiveStreamsCompliance.requireNonNullElement(element) - impl ! OtherStreamOnNext(element) - } - override def onSubscribe(subscription: Subscription): Unit = { - ReactiveStreamsCompliance.requireNonNullSubscription(subscription) - impl ! OtherStreamOnSubscribe(subscription) - } - } - - case object OtherStreamOnComplete extends DeadLetterSuppression - final case class OtherStreamOnNext(element: Any) extends DeadLetterSuppression - final case class OtherStreamOnSubscribe(subscription: Subscription) extends DeadLetterSuppression - final case class OtherStreamOnError(ex: Throwable) extends DeadLetterSuppression -} - -/** - * INTERNAL API - */ -private[akka] abstract class TwoStreamInputProcessor(_settings: ActorMaterializerSettings, val other: Publisher[Any]) - extends ActorProcessorImpl(_settings) { - import akka.stream.impl.TwoStreamInputProcessor._ - - val secondaryInputs: Inputs = new BatchingInputBuffer(settings.initialInputBufferSize, this) { - override val subreceive: SubReceive = new SubReceive(waitingForUpstream) - - override def inputOnError(e: Throwable): Unit = TwoStreamInputProcessor.this.onError(e) - - override def waitingForUpstream: Receive = { - case OtherStreamOnComplete ⇒ onComplete() - case OtherStreamOnSubscribe(subscription) ⇒ onSubscribe(subscription) - case OtherStreamOnError(e) ⇒ TwoStreamInputProcessor.this.onError(e) - } - - override def upstreamRunning: Receive = { - case OtherStreamOnNext(element) ⇒ enqueueInputElement(element) - case OtherStreamOnComplete ⇒ onComplete() - case OtherStreamOnError(e) ⇒ TwoStreamInputProcessor.this.onError(e) - } - override protected def completed: Actor.Receive = { - case OtherStreamOnSubscribe(_) ⇒ throw ActorPublisher.NormalShutdownReason - } - } - - override def activeReceive: Receive = - secondaryInputs.subreceive orElse primaryInputs.subreceive orElse primaryOutputs.subreceive - - other.subscribe(new OtherActorSubscriber(self)) - - override def pumpFinished(): Unit = { - secondaryInputs.cancel() - super.pumpFinished() - } - -} - -/** - * INTERNAL API - */ -private[akka] object MultiStreamInputProcessor { - case class SubstreamKey(id: Long) - - class SubstreamSubscriber[T](val impl: ActorRef, key: SubstreamKey) extends AtomicReference[Subscription] with Subscriber[T] { - override def onError(cause: Throwable): Unit = { - ReactiveStreamsCompliance.requireNonNullException(cause) - impl ! SubstreamOnError(key, cause) - } - override def onComplete(): Unit = impl ! SubstreamOnComplete(key) - override def onNext(element: T): Unit = { - ReactiveStreamsCompliance.requireNonNullElement(element) - impl ! SubstreamOnNext(key, element) - } - override def onSubscribe(subscription: Subscription): Unit = { - ReactiveStreamsCompliance.requireNonNullSubscription(subscription) - if (compareAndSet(null, subscription)) impl ! SubstreamStreamOnSubscribe(key, subscription) - else subscription.cancel() - } - } - - case class SubstreamOnComplete(key: SubstreamKey) extends DeadLetterSuppression with NoSerializationVerificationNeeded - case class SubstreamOnNext(key: SubstreamKey, element: Any) extends DeadLetterSuppression with NoSerializationVerificationNeeded - case class SubstreamOnError(key: SubstreamKey, e: Throwable) extends DeadLetterSuppression with NoSerializationVerificationNeeded - case class SubstreamStreamOnSubscribe(key: SubstreamKey, subscription: Subscription) extends DeadLetterSuppression with NoSerializationVerificationNeeded - - class SubstreamInput(val key: SubstreamKey, bufferSize: Int, processor: MultiStreamInputProcessorLike, pump: Pump) extends BatchingInputBuffer(bufferSize, pump) { - // Not driven directly - override val subreceive = new SubReceive(Actor.emptyBehavior) - - def substreamOnComplete(): Unit = onComplete() - def substreamOnSubscribe(subscription: Subscription): Unit = onSubscribe(subscription) - def substreamOnError(e: Throwable): Unit = onError(e) - def substreamOnNext(elem: Any): Unit = enqueueInputElement(elem) - - override protected def inputOnError(e: Throwable): Unit = { - super.inputOnError(e) - processor.invalidateSubstreamInput(key, e) - } - } - -} - -/** - * INTERNAL API - */ -private[akka] trait MultiStreamInputProcessorLike extends Pump { this: Actor ⇒ - - import MultiStreamInputProcessor._ - - protected def nextId(): Long - protected def inputBufferSize: Int - - private val substreamInputs = collection.mutable.Map.empty[SubstreamKey, SubstreamInput] - private val waitingForOnSubscribe = collection.mutable.Map.empty[SubstreamKey, SubstreamSubscriber[Any]] - - val inputSubstreamManagement: Receive = { - case SubstreamStreamOnSubscribe(key, subscription) ⇒ - substreamInputs(key).substreamOnSubscribe(subscription) - waitingForOnSubscribe -= key - case SubstreamOnNext(key, element) ⇒ - substreamInputs(key).substreamOnNext(element) - case SubstreamOnComplete(key) ⇒ - substreamInputs(key).substreamOnComplete() - substreamInputs -= key - case SubstreamOnError(key, e) ⇒ - substreamInputs(key).substreamOnError(e) - } - - def createSubstreamInput(): SubstreamInput = { - val key = SubstreamKey(nextId()) - val inputs = new SubstreamInput(key, inputBufferSize, this, this) - substreamInputs(key) = inputs - inputs - } - - def createAndSubscribeSubstreamInput(p: Publisher[Any]): SubstreamInput = { - val inputs = createSubstreamInput() - val sub = new SubstreamSubscriber[Any](self, inputs.key) - waitingForOnSubscribe(inputs.key) = sub - p.subscribe(sub) - inputs - } - - def invalidateSubstreamInput(substream: SubstreamKey, e: Throwable): Unit = { - substreamInputs(substream).cancel() - substreamInputs -= substream - pump() - } - - protected def failInputs(e: Throwable): Unit = { - cancelWaitingForOnSubscribe() - substreamInputs.values foreach (_.cancel()) - } - - protected def finishInputs(): Unit = { - cancelWaitingForOnSubscribe() - substreamInputs.values foreach (_.cancel()) - } - - private def cancelWaitingForOnSubscribe(): Unit = - waitingForOnSubscribe.valuesIterator.foreach { sub ⇒ - sub.getAndSet(CancelledSubscription) match { - case null ⇒ // we were first - case subscription ⇒ - // SubstreamOnSubscribe is still in flight and will not arrive - subscription.cancel() - } - } - -} - -/** - * INTERNAL API - */ -private[akka] abstract class MultiStreamInputProcessor(_settings: ActorMaterializerSettings) extends ActorProcessorImpl(_settings) with MultiStreamInputProcessorLike { - private var _nextId = 0L - protected def nextId(): Long = { _nextId += 1; _nextId } - - override protected val inputBufferSize = _settings.initialInputBufferSize - - override protected def fail(e: Throwable) = { - failInputs(e) - super.fail(e) - } - - override def pumpFinished() = { - finishInputs() - super.pumpFinished() - } - - override def activeReceive = primaryInputs.subreceive orElse primaryOutputs.subreceive orElse inputSubstreamManagement -} diff --git a/akka-stream/src/main/scala/akka/stream/impl/StreamSubscriptionTimeout.scala b/akka-stream/src/main/scala/akka/stream/impl/StreamSubscriptionTimeout.scala index e8804e9155..2094d7c24a 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/StreamSubscriptionTimeout.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/StreamSubscriptionTimeout.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream/src/main/scala/akka/stream/impl/SubFlowImpl.scala b/akka-stream/src/main/scala/akka/stream/impl/SubFlowImpl.scala index 3dd63c0d4c..de40baca43 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/SubFlowImpl.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/SubFlowImpl.scala @@ -1,21 +1,22 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl +import akka.NotUsed import akka.stream._ import akka.stream.scaladsl._ import language.higherKinds object SubFlowImpl { trait MergeBack[In, F[+_]] { - def apply[T](f: Flow[In, T, Unit], breadth: Int): F[T] + def apply[T](f: Flow[In, T, NotUsed], breadth: Int): F[T] } } -class SubFlowImpl[In, Out, Mat, F[+_], C](val subFlow: Flow[In, Out, Unit], +class SubFlowImpl[In, Out, Mat, F[+_], C](val subFlow: Flow[In, Out, NotUsed], mergeBackFunction: SubFlowImpl.MergeBack[In, F], - finishFunction: Sink[In, Unit] ⇒ C) + finishFunction: Sink[In, NotUsed] ⇒ C) extends SubFlow[Out, Mat, F, C] { override def deprecatedAndThen[U](op: Stages.StageModule): SubFlow[U, Mat, F, C] = diff --git a/akka-stream/src/main/scala/akka/stream/impl/SubscriberManagement.scala b/akka-stream/src/main/scala/akka/stream/impl/SubscriberManagement.scala index 472925a140..1db9aaba58 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/SubscriberManagement.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/SubscriberManagement.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream/src/main/scala/akka/stream/impl/Throttle.scala b/akka-stream/src/main/scala/akka/stream/impl/Throttle.scala index 797fff8650..c259dafff7 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/Throttle.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/Throttle.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl @@ -20,6 +20,9 @@ private[stream] class Throttle[T](cost: Int, costCalculation: (T) ⇒ Int, mode: ThrottleMode) extends SimpleLinearGraphStage[T] { + require(cost > 0, "cost must be > 0") + require(per.toMillis > 0, "per time must be > 0") + require(!(mode == ThrottleMode.Enforcing && maximumBurst < 0), "maximumBurst must be > 0 in Enforcing mode") override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new TimerGraphStageLogic(shape) { var willStop = false diff --git a/akka-stream/src/main/scala/akka/stream/impl/Timers.scala b/akka-stream/src/main/scala/akka/stream/impl/Timers.scala index 039d6cc5cf..f032d8f545 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/Timers.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/Timers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream/src/main/scala/akka/stream/impl/Transfer.scala b/akka-stream/src/main/scala/akka/stream/impl/Transfer.scala index fe0a8634a8..8ba68aa169 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/Transfer.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/Transfer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.impl diff --git a/akka-stream/src/main/scala/akka/stream/impl/Unfold.scala b/akka-stream/src/main/scala/akka/stream/impl/Unfold.scala index 5a873fe54f..372fba2d53 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/Unfold.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/Unfold.scala @@ -1,12 +1,13 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl +import akka.stream.impl.Stages.DefaultAttributes import akka.stream.stage.{ OutHandler, GraphStageLogic, GraphStage } import akka.stream._ -import scala.concurrent.{ ExecutionContext, Future } +import scala.concurrent.Future import scala.util.{ Failure, Success, Try } /** @@ -15,7 +16,7 @@ import scala.util.{ Failure, Success, Try } private[akka] final class Unfold[S, E](s: S, f: S ⇒ Option[(S, E)]) extends GraphStage[SourceShape[E]] { val out: Outlet[E] = Outlet("Unfold.out") override val shape: SourceShape[E] = SourceShape(out) - + override def initialAttributes: Attributes = DefaultAttributes.unfold override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { private[this] var state = s @@ -38,7 +39,7 @@ private[akka] final class Unfold[S, E](s: S, f: S ⇒ Option[(S, E)]) extends Gr private[akka] final class UnfoldAsync[S, E](s: S, f: S ⇒ Future[Option[(S, E)]]) extends GraphStage[SourceShape[E]] { val out: Outlet[E] = Outlet("UnfoldAsync.out") override val shape: SourceShape[E] = SourceShape(out) - + override def initialAttributes: Attributes = DefaultAttributes.unfoldAsync override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { private[this] var state = s diff --git a/akka-stream/src/main/scala/akka/stream/impl/fusing/ActorGraphInterpreter.scala b/akka-stream/src/main/scala/akka/stream/impl/fusing/ActorGraphInterpreter.scala index 5bb63b7a3e..a41f8c8fd8 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/fusing/ActorGraphInterpreter.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/fusing/ActorGraphInterpreter.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl.fusing @@ -269,12 +269,12 @@ private[stream] object ActorGraphInterpreter { } def exposedPublisher(publisher: ActorPublisher[Any]): Unit = { + exposedPublisher = publisher upstreamFailed match { case _: Some[_] ⇒ publisher.shutdown(upstreamFailed) case _ ⇒ if (upstreamCompleted) publisher.shutdown(None) - else exposedPublisher = publisher } } @@ -532,7 +532,9 @@ private[stream] class ActorGraphInterpreter(_initial: GraphInterpreterShell) ext if (shell.isInitialized) { // yes, this steals another shell’s Resume, but that’s okay because extra ones will just not do anything finishShellRegistration() - } else tryInit(shell) + } else if (!tryInit(shell)) { + if (activeInterpreters.isEmpty) finishShellRegistration() + } } override def preStart(): Unit = { @@ -564,5 +566,9 @@ private[stream] class ActorGraphInterpreter(_initial: GraphInterpreterShell) ext } } - override def postStop(): Unit = activeInterpreters.foreach(_.tryAbort(AbruptTerminationException(self))) + override def postStop(): Unit = { + val ex = AbruptTerminationException(self) + activeInterpreters.foreach(_.tryAbort(ex)) + newShells.foreach(s ⇒ if (tryInit(s)) s.tryAbort(ex)) + } } diff --git a/akka-stream/src/main/scala/akka/stream/impl/fusing/Fusing.scala b/akka-stream/src/main/scala/akka/stream/impl/fusing/Fusing.scala index 185f9ccb09..9af78b8837 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/fusing/Fusing.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/fusing/Fusing.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl.fusing @@ -504,6 +504,7 @@ private[stream] object Fusing { private def removeMapping[T](orig: T, map: ju.Map[T, List[T]]): T = map.remove(orig) match { case null ⇒ null.asInstanceOf[T] + case Nil ⇒ throw new IllegalStateException("mappings corrupted") case x :: Nil ⇒ x case x :: xs ⇒ map.put(orig, xs) diff --git a/akka-stream/src/main/scala/akka/stream/impl/fusing/GraphInterpreter.scala b/akka-stream/src/main/scala/akka/stream/impl/fusing/GraphInterpreter.scala index 45215b6202..41d5e8ddb6 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/fusing/GraphInterpreter.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/fusing/GraphInterpreter.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl.fusing diff --git a/akka-stream/src/main/scala/akka/stream/impl/fusing/GraphStages.scala b/akka-stream/src/main/scala/akka/stream/impl/fusing/GraphStages.scala index ea70aaf273..4ccc35bf5b 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/fusing/GraphStages.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/fusing/GraphStages.scala @@ -1,13 +1,15 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl.fusing +import akka.Done import java.util.concurrent.atomic.AtomicBoolean import akka.actor.Cancellable import akka.dispatch.ExecutionContexts import akka.event.Logging import akka.stream._ +import akka.stream.scaladsl._ import akka.stream.impl.Stages.DefaultAttributes import akka.stream.stage._ import scala.concurrent.{ Future, Promise } @@ -41,14 +43,14 @@ object GraphStages { /** * INERNAL API */ - private[stream] abstract class SimpleLinearGraphStage[T] extends GraphStage[FlowShape[T, T]] { + private[akka] abstract class SimpleLinearGraphStage[T] extends GraphStage[FlowShape[T, T]] { val in = Inlet[T](Logging.simpleName(this) + ".in") val out = Outlet[T](Logging.simpleName(this) + ".out") override val shape = FlowShape(in, out) } object Identity extends SimpleLinearGraphStage[Any] { - override def initialAttributes = Attributes.name("identityOp") + override def initialAttributes = DefaultAttributes.identityOp override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { setHandler(in, new InHandler { @@ -65,14 +67,16 @@ object GraphStages { def identity[T] = Identity.asInstanceOf[SimpleLinearGraphStage[T]] - private class Detacher[T] extends GraphStage[FlowShape[T, T]] { - val in = Inlet[T]("in") - val out = Outlet[T]("out") - override def initialAttributes = Attributes.name("Detacher") + /** + * INERNAL API + */ + private[stream] final class Detacher[T] extends GraphStage[FlowShape[T, T]] { + val in = Inlet[T]("Detacher.in") + val out = Outlet[T]("Detacher.out") + override def initialAttributes = DefaultAttributes.detacher override val shape = FlowShape(in, out) override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { - var initialized = false setHandler(in, new InHandler { override def onPush(): Unit = { @@ -105,14 +109,115 @@ object GraphStages { private val _detacher = new Detacher[Any] def detacher[T]: GraphStage[FlowShape[T, T]] = _detacher.asInstanceOf[GraphStage[FlowShape[T, T]]] + final class Breaker(callback: Breaker.Operation ⇒ Unit) { + import Breaker._ + def complete(): Unit = callback(Complete) + def cancel(): Unit = callback(Cancel) + def fail(ex: Throwable): Unit = callback(Fail(ex)) + def completeAndCancel(): Unit = callback(CompleteAndCancel) + def failAndCancel(ex: Throwable): Unit = callback(FailAndCancel(ex)) + } + + object Breaker extends GraphStageWithMaterializedValue[FlowShape[Any, Any], Future[Breaker]] { + sealed trait Operation + case object Complete extends Operation + case object Cancel extends Operation + case class Fail(ex: Throwable) extends Operation + case object CompleteAndCancel extends Operation + case class FailAndCancel(ex: Throwable) extends Operation + + override val initialAttributes = Attributes.name("breaker") + override val shape = FlowShape(Inlet[Any]("breaker.in"), Outlet[Any]("breaker.out")) + + override def createLogicAndMaterializedValue(attr: Attributes) = { + val promise = Promise[Breaker] + + val logic = new GraphStageLogic(shape) { + + passAlong(shape.in, shape.out) + setHandler(shape.out, eagerTerminateOutput) + + override def preStart(): Unit = { + pull(shape.in) + promise.success(new Breaker(getAsyncCallback[Operation] { + case Complete ⇒ complete(shape.out) + case Cancel ⇒ cancel(shape.in) + case Fail(ex) ⇒ fail(shape.out, ex) + case CompleteAndCancel ⇒ completeStage() + case FailAndCancel(ex) ⇒ failStage(ex) + }.invoke)) + } + } + + (logic, promise.future) + } + } + + def breaker[T]: Graph[FlowShape[T, T], Future[Breaker]] = Breaker.asInstanceOf[Graph[FlowShape[T, T], Future[Breaker]]] + + object BidiBreaker extends GraphStageWithMaterializedValue[BidiShape[Any, Any, Any, Any], Future[Breaker]] { + import Breaker._ + + override val initialAttributes = Attributes.name("breaker") + override val shape = BidiShape( + Inlet[Any]("breaker.in1"), Outlet[Any]("breaker.out1"), + Inlet[Any]("breaker.in2"), Outlet[Any]("breaker.out2")) + + override def createLogicAndMaterializedValue(attr: Attributes) = { + val promise = Promise[Breaker] + + val logic = new GraphStageLogic(shape) { + + setHandler(shape.in1, new InHandler { + override def onPush(): Unit = push(shape.out1, grab(shape.in1)) + override def onUpstreamFinish(): Unit = complete(shape.out1) + override def onUpstreamFailure(ex: Throwable): Unit = fail(shape.out1, ex) + }) + setHandler(shape.in2, new InHandler { + override def onPush(): Unit = push(shape.out2, grab(shape.in2)) + override def onUpstreamFinish(): Unit = complete(shape.out2) + override def onUpstreamFailure(ex: Throwable): Unit = fail(shape.out2, ex) + }) + setHandler(shape.out1, new OutHandler { + override def onPull(): Unit = pull(shape.in1) + override def onDownstreamFinish(): Unit = cancel(shape.in1) + }) + setHandler(shape.out2, new OutHandler { + override def onPull(): Unit = pull(shape.in2) + override def onDownstreamFinish(): Unit = cancel(shape.in2) + }) + + override def preStart(): Unit = { + promise.success(new Breaker(getAsyncCallback[Operation] { + case Complete ⇒ + complete(shape.out1) + complete(shape.out2) + case Cancel ⇒ + cancel(shape.in1) + cancel(shape.in2) + case Fail(ex) ⇒ + fail(shape.out1, ex) + fail(shape.out2, ex) + case CompleteAndCancel ⇒ completeStage() + case FailAndCancel(ex) ⇒ failStage(ex) + }.invoke)) + } + } + + (logic, promise.future) + } + } + + def bidiBreaker[T1, T2]: Graph[BidiShape[T1, T1, T2, T2], Future[Breaker]] = BidiBreaker.asInstanceOf[Graph[BidiShape[T1, T1, T2, T2], Future[Breaker]]] + private object TickSource { class TickSourceCancellable(cancelled: AtomicBoolean) extends Cancellable { - private val cancelPromise = Promise[Unit]() + private val cancelPromise = Promise[Done]() - def cancelFuture: Future[Unit] = cancelPromise.future + def cancelFuture: Future[Done] = cancelPromise.future override def cancel(): Boolean = { - if (!isCancelled) cancelPromise.trySuccess(()) + if (!isCancelled) cancelPromise.trySuccess(Done) true } @@ -120,13 +225,50 @@ object GraphStages { } } - class TickSource[T](initialDelay: FiniteDuration, interval: FiniteDuration, tick: T) + private object TerminationWatcher extends GraphStageWithMaterializedValue[FlowShape[Any, Any], Future[Done]] { + val in = Inlet[Any]("terminationWatcher.in") + val out = Outlet[Any]("terminationWatcher.out") + override val shape = FlowShape(in, out) + override def initialAttributes: Attributes = DefaultAttributes.terminationWatcher + + override def createLogicAndMaterializedValue(inheritedAttributes: Attributes): (GraphStageLogic, Future[Done]) = { + val finishPromise = Promise[Done]() + + (new GraphStageLogic(shape) { + setHandler(in, new InHandler { + override def onPush(): Unit = push(out, grab(in)) + + override def onUpstreamFinish(): Unit = { + finishPromise.success(Done) + completeStage() + } + + override def onUpstreamFailure(ex: Throwable): Unit = { + finishPromise.failure(ex) + failStage(ex) + } + }) + setHandler(out, new OutHandler { + override def onPull(): Unit = pull(in) + override def onDownstreamFinish(): Unit = { + finishPromise.success(Done) + completeStage() + } + }) + }, finishPromise.future) + } + + override def toString = "TerminationWatcher" + } + + def terminationWatcher[T]: GraphStageWithMaterializedValue[FlowShape[T, T], Future[Done]] = + TerminationWatcher.asInstanceOf[GraphStageWithMaterializedValue[FlowShape[T, T], Future[Done]]] + + final class TickSource[T](initialDelay: FiniteDuration, interval: FiniteDuration, tick: T) extends GraphStageWithMaterializedValue[SourceShape[T], Cancellable] { - - val out = Outlet[T]("TimerSource.out") - override def initialAttributes = Attributes.name("TickSource") - override val shape = SourceShape(out) - + override val shape = SourceShape(Outlet[T]("TickSource.out")) + val out = shape.out + override def initialAttributes: Attributes = DefaultAttributes.tickSource override def createLogicAndMaterializedValue(inheritedAttributes: Attributes): (GraphStageLogic, Cancellable) = { import TickSource._ @@ -144,9 +286,7 @@ object GraphStages { cancellable.cancelFuture.onComplete(_ ⇒ callback.invoke(()))(interpreter.materializer.executionContext) } - setHandler(out, new OutHandler { - override def onPull() = () // Do nothing - }) + setHandler(out, eagerTerminateOutput) override protected def onTimer(timerKey: Any) = if (isAvailable(out)) push(out, tick) @@ -165,9 +305,9 @@ object GraphStages { * * This source is not reusable, it is only created internally. */ - private[stream] class MaterializedValueSource[T](val computation: MaterializedValueNode, val out: Outlet[T]) extends GraphStage[SourceShape[T]] { + private[stream] final class MaterializedValueSource[T](val computation: MaterializedValueNode, val out: Outlet[T]) extends GraphStage[SourceShape[T]] { def this(computation: MaterializedValueNode) = this(computation, Outlet[T]("matValue")) - override def initialAttributes: Attributes = Attributes.name("matValueSource") + override def initialAttributes: Attributes = DefaultAttributes.materializedValueSource override val shape = SourceShape(out) private val promise = Promise[T] @@ -183,10 +323,11 @@ object GraphStages { } } - override def toString: String = s"MatValSrc($computation)" + override def toString: String = s"MaterializedValueSource($computation)" } - private[stream] class SingleSource[T](val elem: T) extends GraphStage[SourceShape[T]] { + private[stream] final class SingleSource[T](val elem: T) extends GraphStage[SourceShape[T]] { + override def initialAttributes: Attributes = DefaultAttributes.singleSource ReactiveStreamsCompliance.requireNonNullElement(elem) val out = Outlet[T]("single.out") val shape = SourceShape(out) @@ -220,4 +361,27 @@ object GraphStages { } override def toString: String = "FutureSource" } + + /** + * INTERNAL API. + * + * Fusing graphs that have cycles involving FanIn stages might lead to deadlocks if + * demand is not carefully managed. + * + * This means that FanIn stages need to early pull every relevant input on startup. + * This can either be implemented inside the stage itself, or this method can be used, + * which adds a detacher stage to every input. + */ + private[stream] def withDetachedInputs[T](stage: GraphStage[UniformFanInShape[T, T]]) = + GraphDSL.create() { implicit builder ⇒ + import GraphDSL.Implicits._ + val concat = builder.add(stage) + val ds = concat.inSeq.map { inlet ⇒ + val detacher = builder.add(GraphStages.detacher[T]) + detacher ~> inlet + detacher.in + } + UniformFanInShape(concat.out, ds: _*) + } + } diff --git a/akka-stream/src/main/scala/akka/stream/impl/fusing/IteratorInterpreter.scala b/akka-stream/src/main/scala/akka/stream/impl/fusing/IteratorInterpreter.scala index 6d23909c6a..fdc7b4bca6 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/fusing/IteratorInterpreter.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/fusing/IteratorInterpreter.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.impl.fusing diff --git a/akka-stream/src/main/scala/akka/stream/impl/fusing/Ops.scala b/akka-stream/src/main/scala/akka/stream/impl/fusing/Ops.scala index e9d79b8def..a2706ce4d0 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/fusing/Ops.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/fusing/Ops.scala @@ -1,12 +1,13 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.impl.fusing import akka.event.Logging.LogLevel import akka.event.{ LogSource, Logging, LoggingAdapter } import akka.stream.Attributes.{ InputBuffer, LogLevels } -import akka.stream.DelayOverflowStrategy.EmitEarly +import akka.stream.impl.Stages.DefaultAttributes +import akka.stream.OverflowStrategies._ import akka.stream.impl.fusing.GraphStages.SimpleLinearGraphStage import akka.stream.impl.{ FixedSizeBuffer, BoundedBuffer, ReactiveStreamsCompliance } import akka.stream.stage._ @@ -19,6 +20,7 @@ import scala.util.control.NonFatal import scala.util.{ Failure, Success, Try } import akka.stream.ActorAttributes.SupervisionStrategy import scala.concurrent.duration.{ FiniteDuration, _ } +import akka.stream.impl.Stages.DefaultAttributes /** * INTERNAL API @@ -241,6 +243,9 @@ private[akka] final case class Fold[In, Out](zero: Out, f: (Out, In) ⇒ Out, de * INTERNAL API */ final case class Intersperse[T](start: Option[T], inject: T, end: Option[T]) extends GraphStage[FlowShape[T, T]] { + ReactiveStreamsCompliance.requireNonNullElement(inject) + if (start.isDefined) ReactiveStreamsCompliance.requireNonNullElement(start.get) + if (end.isDefined) ReactiveStreamsCompliance.requireNonNullElement(end.get) private val in = Inlet[T]("in") private val out = Outlet[T]("out") @@ -367,8 +372,6 @@ private[akka] final case class Sliding[T](n: Int, step: Int) extends PushPullSta */ private[akka] final case class Buffer[T](size: Int, overflowStrategy: OverflowStrategy) extends DetachedStage[T, T] { - import OverflowStrategy._ - private val buffer = FixedSizeBuffer[T](size) override def onPush(elem: T, ctx: DetachedContext[T]): UpstreamDirective = @@ -389,8 +392,8 @@ private[akka] final case class Buffer[T](size: Int, overflowStrategy: OverflowSt if (buffer.isEmpty) ctx.finish() else ctx.absorbTermination() - val enqueueAction: (DetachedContext[T], T) ⇒ UpstreamDirective = { - (overflowStrategy: @unchecked) match { + val enqueueAction: (DetachedContext[T], T) ⇒ UpstreamDirective = + overflowStrategy match { case DropHead ⇒ (ctx, elem) ⇒ if (buffer.isFull) buffer.dropHead() buffer.enqueue(elem) @@ -411,13 +414,13 @@ private[akka] final case class Buffer[T](size: Int, overflowStrategy: OverflowSt if (buffer.isFull) ctx.holdUpstream() else ctx.pull() case Fail ⇒ (ctx, elem) ⇒ - if (buffer.isFull) ctx.fail(new Fail.BufferOverflowException(s"Buffer overflow (max capacity was: $size)!")) + if (buffer.isFull) ctx.fail(new BufferOverflowException(s"Buffer overflow (max capacity was: $size)!")) else { buffer.enqueue(elem) ctx.pull() } } - } + } /** @@ -476,47 +479,131 @@ private[akka] final case class Conflate[In, Out](seed: In ⇒ Out, aggregate: (O /** * INTERNAL API */ -private[akka] final case class Expand[In, Out, Seed](seed: In ⇒ Seed, extrapolate: Seed ⇒ (Out, Seed)) extends DetachedStage[In, Out] { - private var s: Seed = _ - private var started: Boolean = false - private var expanded: Boolean = false +private[akka] final case class Batch[In, Out](max: Long, costFn: In ⇒ Long, seed: In ⇒ Out, aggregate: (Out, In) ⇒ Out) + extends GraphStage[FlowShape[In, Out]] { - override def onPush(elem: In, ctx: DetachedContext[Out]): UpstreamDirective = { - s = seed(elem) - started = true - expanded = false - if (ctx.isHoldingDownstream) { - val (emit, newS) = extrapolate(s) - s = newS - expanded = true - ctx.pushAndPull(emit) - } else ctx.holdUpstream() - } + val in = Inlet[In]("Batch.in") + val out = Outlet[Out]("Batch.out") - override def onPull(ctx: DetachedContext[Out]): DownstreamDirective = { - if (ctx.isFinishing) { - if (!started) ctx.finish() - else ctx.pushAndFinish(extrapolate(s)._1) - } else if (!started) ctx.holdDownstream() - else { - val (emit, newS) = extrapolate(s) - s = newS - expanded = true - if (ctx.isHoldingUpstream) ctx.pushAndPull(emit) - else ctx.push(emit) + override val shape: FlowShape[In, Out] = FlowShape.of(in, out) + + override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { + private var agg: Out = null.asInstanceOf[Out] + private var left: Long = max + private var pending: In = null.asInstanceOf[In] + + private def flush(): Unit = { + push(out, agg) + left = max + if (pending != null) { + val elem = pending + agg = seed(elem) + left -= costFn(elem) + pending = null.asInstanceOf[In] + } else { + agg = null.asInstanceOf[Out] + } } + override def preStart() = pull(in) + + setHandler(in, new InHandler { + + override def onPush(): Unit = { + val elem = grab(in) + val cost = costFn(elem) + if (agg == null) { + left -= cost + agg = seed(elem) + } else if (left < cost) { + pending = elem + } else { + left -= cost + agg = aggregate(agg, elem) + } + + if (isAvailable(out)) flush() + if (pending == null) pull(in) + } + + override def onUpstreamFinish(): Unit = { + if (agg == null) completeStage() + } + }) + + setHandler(out, new OutHandler { + + override def onPull(): Unit = { + if (agg == null) { + if (isClosed(in)) completeStage() + else if (!hasBeenPulled(in)) pull(in) + } else if (isClosed(in)) { + push(out, agg) + if (pending == null) completeStage() + else { + agg = seed(pending) + pending = null.asInstanceOf[In] + } + } else { + flush() + if (!hasBeenPulled(in)) pull(in) + } + } + }) } +} - override def onUpstreamFinish(ctx: DetachedContext[Out]): TerminationDirective = { - if (expanded) ctx.finish() - else ctx.absorbTermination() +/** + * INTERNAL API + */ +private[akka] final class Expand[In, Out](extrapolate: In ⇒ Iterator[Out]) extends GraphStage[FlowShape[In, Out]] { + private val in = Inlet[In]("expand.in") + private val out = Outlet[Out]("expand.out") + + override def initialAttributes = DefaultAttributes.expand + override val shape = FlowShape(in, out) + + override def createLogic(attr: Attributes) = new GraphStageLogic(shape) { + private var iterator: Iterator[Out] = Iterator.empty + private var expanded = false + + override def preStart(): Unit = pull(in) + + setHandler(in, new InHandler { + override def onPush(): Unit = { + iterator = extrapolate(grab(in)) + if (iterator.hasNext) { + if (isAvailable(out)) { + expanded = true + pull(in) + push(out, iterator.next()) + } else expanded = false + } else pull(in) + } + override def onUpstreamFinish(): Unit = { + if (iterator.hasNext && !expanded) () // need to wait + else completeStage() + } + }) + + setHandler(out, new OutHandler { + override def onPull(): Unit = { + if (iterator.hasNext) { + if (expanded == false) { + expanded = true + if (isClosed(in)) { + push(out, iterator.next()) + completeStage() + } else { + // expand needs to pull first to be “fair” when upstream is not actually slow + pull(in) + push(out, iterator.next()) + } + } else push(out, iterator.next()) + } + } + }) } - - override def decide(t: Throwable): Supervision.Directive = Supervision.Stop - - override def restart(): Expand[In, Out, Seed] = - throw new UnsupportedOperationException("Expand doesn't support restart") } /** @@ -538,15 +625,14 @@ private[akka] final case class MapAsync[In, Out](parallelism: Int, f: In ⇒ Fut private val in = Inlet[In]("in") private val out = Outlet[Out]("out") - override def initialAttributes = Attributes.name("MapAsync") + override def initialAttributes = DefaultAttributes.mapAsync override val shape = FlowShape(in, out) override def createLogic(inheritedAttributes: Attributes) = new GraphStageLogic(shape) { override def toString = s"MapAsync.Logic(buffer=$buffer)" - val decider = - inheritedAttributes.getAttribute(classOf[SupervisionStrategy]) - .map(_.decider).getOrElse(Supervision.stoppingDecider) + //FIXME Put Supervision.stoppingDecider as a SupervisionStrategy on DefaultAttributes.mapAsync? + val decider = inheritedAttributes.getAttribute(classOf[SupervisionStrategy]).map(_.decider).getOrElse(Supervision.stoppingDecider) val buffer = new BoundedBuffer[Holder[Try[Out]]](parallelism) def todo = buffer.used @@ -617,7 +703,7 @@ private[akka] final case class MapAsyncUnordered[In, Out](parallelism: Int, f: I private val in = Inlet[In]("in") private val out = Outlet[Out]("out") - override def initialAttributes = Attributes.name("MapAsyncUnordered") + override def initialAttributes = DefaultAttributes.mapAsyncUnordered override val shape = FlowShape(in, out) override def createLogic(inheritedAttributes: Attributes) = new GraphStageLogic(shape) { @@ -782,10 +868,13 @@ private[stream] object TimerKeys { case object GroupedWithinTimerKey } -private[stream] class GroupedWithin[T](n: Int, d: FiniteDuration) extends GraphStage[FlowShape[T, immutable.Seq[T]]] { +private[stream] final class GroupedWithin[T](n: Int, d: FiniteDuration) extends GraphStage[FlowShape[T, immutable.Seq[T]]] { + require(n > 0, "n must be greater than 0") + require(d > Duration.Zero) + val in = Inlet[T]("in") val out = Outlet[immutable.Seq[T]]("out") - override def initialAttributes = Attributes.name("GroupedWithin") + override def initialAttributes = DefaultAttributes.groupedWithin val shape = FlowShape(in, out) override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new TimerGraphStageLogic(shape) { @@ -854,17 +943,23 @@ private[stream] class GroupedWithin[T](n: Int, d: FiniteDuration) extends GraphS } } -private[stream] class Delay[T](d: FiniteDuration, strategy: DelayOverflowStrategy) extends SimpleLinearGraphStage[T] { - +private[stream] final class Delay[T](d: FiniteDuration, strategy: DelayOverflowStrategy) extends SimpleLinearGraphStage[T] { + private[this] def timerName = "DelayedTimer" + override def initialAttributes: Attributes = DefaultAttributes.delay override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new TimerGraphStageLogic(shape) { - val size = inheritedAttributes.getAttribute(classOf[InputBuffer], InputBuffer(16, 16)).max + val size = + inheritedAttributes.getAttribute(classOf[InputBuffer]) match { + case None ⇒ throw new IllegalStateException(s"Couldn't find InputBuffer Attribute for $this") + case Some(InputBuffer(min, max)) ⇒ max + } + val buffer = FixedSizeBuffer[(Long, T)](size) // buffer has pairs timestamp with upstream element - val timerName = "DelayedTimer" var willStop = false setHandler(in, handler = new InHandler { + //FIXME rewrite into distinct strategy functions to avoid matching on strategy for every input when full override def onPush(): Unit = { - if (buffer.isFull) (strategy: @unchecked) match { + if (buffer.isFull) strategy match { case EmitEarly ⇒ if (!isTimerActive(timerName)) push(out, buffer.dequeue()._2) @@ -872,24 +967,24 @@ private[stream] class Delay[T](d: FiniteDuration, strategy: DelayOverflowStrateg cancelTimer(timerName) onTimer(timerName) } - case DelayOverflowStrategy.DropHead ⇒ + case DropHead ⇒ buffer.dropHead() grabAndPull(true) - case DelayOverflowStrategy.DropTail ⇒ + case DropTail ⇒ buffer.dropTail() grabAndPull(true) - case DelayOverflowStrategy.DropNew ⇒ + case DropNew ⇒ grab(in) if (!isTimerActive(timerName)) scheduleOnce(timerName, d) - case DelayOverflowStrategy.DropBuffer ⇒ + case DropBuffer ⇒ buffer.clear() grabAndPull(true) - case DelayOverflowStrategy.Fail ⇒ - failStage(new DelayOverflowStrategy.Fail.BufferOverflowException(s"Buffer overflow for delay combinator (max capacity was: $size)!")) - case DelayOverflowStrategy.Backpressure ⇒ throw new IllegalStateException("Delay buffer must never overflow in Backpressure mode") + case Fail ⇒ + failStage(new BufferOverflowException(s"Buffer overflow for delay combinator (max capacity was: $size)!")) + case Backpressure ⇒ throw new IllegalStateException("Delay buffer must never overflow in Backpressure mode") } else { - grabAndPull(strategy != DelayOverflowStrategy.Backpressure || buffer.size < size - 1) + grabAndPull(strategy != Backpressure || buffer.size < size - 1) if (!isTimerActive(timerName)) scheduleOnce(timerName, d) } } @@ -932,7 +1027,7 @@ private[stream] class Delay[T](d: FiniteDuration, strategy: DelayOverflowStrateg override def toString = "Delay" } -private[stream] class TakeWithin[T](timeout: FiniteDuration) extends SimpleLinearGraphStage[T] { +private[stream] final class TakeWithin[T](timeout: FiniteDuration) extends SimpleLinearGraphStage[T] { override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new TimerGraphStageLogic(shape) { setHandler(in, new InHandler { @@ -952,7 +1047,7 @@ private[stream] class TakeWithin[T](timeout: FiniteDuration) extends SimpleLinea override def toString = "TakeWithin" } -private[stream] class DropWithin[T](timeout: FiniteDuration) extends SimpleLinearGraphStage[T] { +private[stream] final class DropWithin[T](timeout: FiniteDuration) extends SimpleLinearGraphStage[T] { override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new TimerGraphStageLogic(shape) { private var allow = false @@ -974,3 +1069,38 @@ private[stream] class DropWithin[T](timeout: FiniteDuration) extends SimpleLinea override def toString = "DropWithin" } + +/** + * INTERNAL API + */ +private[stream] final class Reduce[T](f: (T, T) ⇒ T) extends SimpleLinearGraphStage[T] { + override def initialAttributes: Attributes = DefaultAttributes.reduce + + override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { + override def toString = s"Reduce.Logic(aggregator=$aggregator)" + var aggregator: T = _ + + setHandler(in, new InHandler { + override def onPush(): Unit = { + aggregator = grab(in) + pull(in) + setHandler(in, rest) + } + }) + def rest = new InHandler { + override def onPush(): Unit = { + aggregator = f(aggregator, grab(in)) + pull(in) + } + override def onUpstreamFinish(): Unit = { + push(out, aggregator) + completeStage() + } + } + + setHandler(out, new OutHandler { + override def onPull(): Unit = pull(in) + }) + } + override def toString = "Reduce" +} diff --git a/akka-stream/src/main/scala/akka/stream/impl/fusing/StreamOfStreams.scala b/akka-stream/src/main/scala/akka/stream/impl/fusing/StreamOfStreams.scala index 923f3b1a31..f21b54b5cd 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/fusing/StreamOfStreams.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/fusing/StreamOfStreams.scala @@ -1,17 +1,17 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl.fusing import java.util.concurrent.atomic.AtomicReference +import akka.NotUsed import akka.stream._ +import akka.stream.impl.Stages.DefaultAttributes import akka.stream.impl.SubscriptionTimeoutException import akka.stream.stage._ import akka.stream.scaladsl._ import akka.stream.actor.ActorSubscriberMessage -import akka.stream.actor.ActorSubscriberMessage._ import akka.stream.actor.ActorPublisherMessage -import akka.stream.actor.ActorPublisherMessage._ import java.{ util ⇒ ju } import scala.collection.immutable import scala.concurrent._ @@ -30,7 +30,7 @@ final class FlattenMerge[T, M](breadth: Int) extends GraphStage[FlowShape[Graph[ private val in = Inlet[Graph[SourceShape[T], M]]("flatten.in") private val out = Outlet[T]("flatten.out") - override def initialAttributes = Attributes.name("FlattenMerge") + override def initialAttributes = DefaultAttributes.flattenMerge override val shape = FlowShape(in, out) override def createLogic(attr: Attributes) = new GraphStageLogic(shape) { @@ -106,12 +106,12 @@ final class FlattenMerge[T, M](breadth: Int) extends GraphStage[FlowShape[Graph[ /** * INTERNAL API */ -final class PrefixAndTail[T](n: Int) extends GraphStage[FlowShape[T, (immutable.Seq[T], Source[T, Unit])]] { +final class PrefixAndTail[T](n: Int) extends GraphStage[FlowShape[T, (immutable.Seq[T], Source[T, NotUsed])]] { val in: Inlet[T] = Inlet("PrefixAndTail.in") - val out: Outlet[(immutable.Seq[T], Source[T, Unit])] = Outlet("PrefixAndTail.out") - override val shape: FlowShape[T, (immutable.Seq[T], Source[T, Unit])] = FlowShape(in, out) + val out: Outlet[(immutable.Seq[T], Source[T, NotUsed])] = Outlet("PrefixAndTail.out") + override val shape: FlowShape[T, (immutable.Seq[T], Source[T, NotUsed])] = FlowShape(in, out) - override def initialAttributes = Attributes.name("PrefixAndTail") + override def initialAttributes = DefaultAttributes.prefixAndTail private final class PrefixAndTailLogic(_shape: Shape) extends TimerGraphStageLogic(_shape) with OutHandler with InHandler { @@ -142,7 +142,7 @@ final class PrefixAndTail[T](n: Int) extends GraphStage[FlowShape[T, (immutable. } } - private def openSubstream(): Source[T, Unit] = { + private def openSubstream(): Source[T, NotUsed] = { val timeout = ActorMaterializer.downcast(interpreter.materializer).settings.subscriptionTimeoutSettings.timeout tailSource = new SubSourceOutlet[T]("TailSource") tailSource.setHandler(subHandler) @@ -214,17 +214,17 @@ object Split { /** Splits after the current element. The current element will be the last element in the current substream. */ case object SplitAfter extends SplitDecision - def when[T](p: T ⇒ Boolean): Graph[FlowShape[T, Source[T, Unit]], Unit] = new Split(Split.SplitBefore, p) - def after[T](p: T ⇒ Boolean): Graph[FlowShape[T, Source[T, Unit]], Unit] = new Split(Split.SplitAfter, p) + def when[T](p: T ⇒ Boolean): Graph[FlowShape[T, Source[T, NotUsed]], NotUsed] = new Split(Split.SplitBefore, p) + def after[T](p: T ⇒ Boolean): Graph[FlowShape[T, Source[T, NotUsed]], NotUsed] = new Split(Split.SplitAfter, p) } /** * INERNAL API */ -final class Split[T](decision: Split.SplitDecision, p: T ⇒ Boolean) extends GraphStage[FlowShape[T, Source[T, Unit]]] { +final class Split[T](decision: Split.SplitDecision, p: T ⇒ Boolean) extends GraphStage[FlowShape[T, Source[T, NotUsed]]] { val in: Inlet[T] = Inlet("Split.in") - val out: Outlet[Source[T, Unit]] = Outlet("Split.out") - override val shape: FlowShape[T, Source[T, Unit]] = FlowShape(in, out) + val out: Outlet[Source[T, NotUsed]] = Outlet("Split.out") + override val shape: FlowShape[T, Source[T, NotUsed]] = FlowShape(in, out) override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new TimerGraphStageLogic(shape) { import Split._ @@ -373,7 +373,9 @@ final class Split[T](decision: Split.SplitDecision, p: T ⇒ Boolean) extends Gr * INTERNAL API */ object SubSink { - val RequestOne = Request(1) // No need to frivolously allocate these + sealed trait Command + case object RequestOne extends Command + case object Cancel extends Command } /** @@ -388,30 +390,30 @@ final class SubSink[T](name: String, externalCallback: ActorSubscriberMessage override def initialAttributes = Attributes.name(s"SubSink($name)") override val shape = SinkShape(in) - val status = new AtomicReference[AnyRef] + private val status = new AtomicReference[AnyRef] def pullSubstream(): Unit = status.get match { case f: AsyncCallback[Any] @unchecked ⇒ f.invoke(RequestOne) case null ⇒ if (!status.compareAndSet(null, RequestOne)) - status.get.asInstanceOf[ActorPublisherMessage ⇒ Unit](RequestOne) + status.get.asInstanceOf[Command ⇒ Unit](RequestOne) } def cancelSubstream(): Unit = status.get match { case f: AsyncCallback[Any] @unchecked ⇒ f.invoke(Cancel) case x ⇒ // a potential RequestOne is overwritten if (!status.compareAndSet(x, Cancel)) - status.get.asInstanceOf[ActorPublisherMessage ⇒ Unit](Cancel) + status.get.asInstanceOf[Command ⇒ Unit](Cancel) } override def createLogic(attr: Attributes) = new GraphStageLogic(shape) with InHandler { setHandler(in, this) - override def onPush(): Unit = externalCallback(OnNext(grab(in))) - override def onUpstreamFinish(): Unit = externalCallback(OnComplete) - override def onUpstreamFailure(ex: Throwable): Unit = externalCallback(OnError(ex)) + override def onPush(): Unit = externalCallback(ActorSubscriberMessage.OnNext(grab(in))) + override def onUpstreamFinish(): Unit = externalCallback(ActorSubscriberMessage.OnComplete) + override def onUpstreamFailure(ex: Throwable): Unit = externalCallback(ActorSubscriberMessage.OnError(ex)) - @tailrec private def setCB(cb: AsyncCallback[ActorPublisherMessage]): Unit = { + @tailrec private def setCB(cb: AsyncCallback[Command]): Unit = { status.get match { case null ⇒ if (!status.compareAndSet(null, cb)) setCB(cb) @@ -427,7 +429,7 @@ final class SubSink[T](name: String, externalCallback: ActorSubscriberMessage } override def preStart(): Unit = { - val ourOwnCallback = getAsyncCallback[ActorPublisherMessage] { + val ourOwnCallback = getAsyncCallback[Command] { case RequestOne ⇒ tryPull(in) case Cancel ⇒ completeStage() case _ ⇒ throw new IllegalStateException("Bug") @@ -450,7 +452,7 @@ object SubSource { private[akka] def kill[T, M](s: Source[T, M]): Unit = { s.module match { case GraphStageModule(_, _, stage: SubSource[_]) ⇒ - stage.externalCallback.invoke(Cancel) + stage.externalCallback.invoke(SubSink.Cancel) case pub: PublisherSource[_] ⇒ pub.create(null)._1.subscribe(new CancellingSubscriber) case m ⇒ @@ -465,7 +467,7 @@ object SubSource { /** * INTERNAL API */ -final class SubSource[T](name: String, private[fusing] val externalCallback: AsyncCallback[ActorPublisherMessage]) +final class SubSource[T](name: String, private[fusing] val externalCallback: AsyncCallback[SubSink.Command]) extends GraphStage[SourceShape[T]] { import SubSink._ @@ -473,48 +475,48 @@ final class SubSource[T](name: String, private[fusing] val externalCallback: Asy override def initialAttributes = Attributes.name(s"SubSource($name)") override val shape: SourceShape[T] = SourceShape(out) - val status = new AtomicReference[AnyRef] + private val status = new AtomicReference[AnyRef] def pushSubstream(elem: T): Unit = status.get match { - case f: AsyncCallback[Any] @unchecked ⇒ f.invoke(OnNext(elem)) + case f: AsyncCallback[Any] @unchecked ⇒ f.invoke(ActorSubscriberMessage.OnNext(elem)) case _ ⇒ throw new IllegalStateException("cannot push to uninitialized substream") } def completeSubstream(): Unit = status.get match { - case f: AsyncCallback[Any] @unchecked ⇒ f.invoke(OnComplete) + case f: AsyncCallback[Any] @unchecked ⇒ f.invoke(ActorSubscriberMessage.OnComplete) case null ⇒ - if (!status.compareAndSet(null, OnComplete)) - status.get.asInstanceOf[AsyncCallback[Any]].invoke(OnComplete) + if (!status.compareAndSet(null, ActorSubscriberMessage.OnComplete)) + status.get.asInstanceOf[AsyncCallback[Any]].invoke(ActorSubscriberMessage.OnComplete) } def failSubstream(ex: Throwable): Unit = status.get match { - case f: AsyncCallback[Any] @unchecked ⇒ f.invoke(OnError(ex)) + case f: AsyncCallback[Any] @unchecked ⇒ f.invoke(ActorSubscriberMessage.OnError(ex)) case null ⇒ - val failure = OnError(ex) + val failure = ActorSubscriberMessage.OnError(ex) if (!status.compareAndSet(null, failure)) status.get.asInstanceOf[AsyncCallback[Any]].invoke(failure) } def timeout(d: FiniteDuration): Boolean = - status.compareAndSet(null, OnError(new SubscriptionTimeoutException(s"Substream Source has not been materialized in $d"))) + status.compareAndSet(null, ActorSubscriberMessage.OnError(new SubscriptionTimeoutException(s"Substream Source has not been materialized in $d"))) override def createLogic(inheritedAttributes: Attributes) = new GraphStageLogic(shape) with OutHandler { setHandler(out, this) @tailrec private def setCB(cb: AsyncCallback[ActorSubscriberMessage]): Unit = { status.get match { - case null ⇒ if (!status.compareAndSet(null, cb)) setCB(cb) - case OnComplete ⇒ completeStage() - case OnError(ex) ⇒ failStage(ex) - case _: AsyncCallback[_] ⇒ failStage(new IllegalStateException("Substream Source cannot be materialized more than once")) + case null ⇒ if (!status.compareAndSet(null, cb)) setCB(cb) + case ActorSubscriberMessage.OnComplete ⇒ completeStage() + case ActorSubscriberMessage.OnError(ex) ⇒ failStage(ex) + case _: AsyncCallback[_] ⇒ failStage(new IllegalStateException("Substream Source cannot be materialized more than once")) } } override def preStart(): Unit = { val ourOwnCallback = getAsyncCallback[ActorSubscriberMessage] { - case OnComplete ⇒ completeStage() - case OnError(ex) ⇒ failStage(ex) - case OnNext(elem) ⇒ push(out, elem.asInstanceOf[T]) + case ActorSubscriberMessage.OnComplete ⇒ completeStage() + case ActorSubscriberMessage.OnError(ex) ⇒ failStage(ex) + case ActorSubscriberMessage.OnNext(elem) ⇒ push(out, elem.asInstanceOf[T]) } setCB(ourOwnCallback) } diff --git a/akka-stream/src/main/scala/akka/stream/impl/io/FilePublisher.scala b/akka-stream/src/main/scala/akka/stream/impl/io/FilePublisher.scala index 993d73c9a5..54c54da084 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/io/FilePublisher.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/io/FilePublisher.scala @@ -1,23 +1,26 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl.io -import java.io.{ File, RandomAccessFile } +import java.io.File import java.nio.ByteBuffer import java.nio.channels.FileChannel +import akka.Done import akka.actor.{ Deploy, ActorLogging, DeadLetterSuppression, Props } import akka.stream.actor.ActorPublisherMessage +import akka.stream.io.IOResult import akka.util.ByteString import scala.annotation.tailrec import scala.concurrent.Promise +import scala.util.{ Failure, Success } import scala.util.control.NonFatal /** INTERNAL API */ private[akka] object FilePublisher { - def props(f: File, completionPromise: Promise[Long], chunkSize: Int, initialBuffer: Int, maxBuffer: Int) = { + def props(f: File, completionPromise: Promise[IOResult], chunkSize: Int, initialBuffer: Int, maxBuffer: Int) = { require(chunkSize > 0, s"chunkSize must be > 0 (was $chunkSize)") require(initialBuffer > 0, s"initialBuffer must be > 0 (was $initialBuffer)") require(maxBuffer >= initialBuffer, s"maxBuffer must be >= initialBuffer (was $maxBuffer)") @@ -26,12 +29,13 @@ private[akka] object FilePublisher { .withDeploy(Deploy.local) } - private final case object Continue extends DeadLetterSuppression + private case object Continue extends DeadLetterSuppression + val Read = java.util.Collections.singleton(java.nio.file.StandardOpenOption.READ) } /** INTERNAL API */ -private[akka] final class FilePublisher(f: File, bytesReadPromise: Promise[Long], chunkSize: Int, initialBuffer: Int, maxBuffer: Int) +private[akka] final class FilePublisher(f: File, completionPromise: Promise[IOResult], chunkSize: Int, initialBuffer: Int, maxBuffer: Int) extends akka.stream.actor.ActorPublisher[ByteString] with ActorLogging { import FilePublisher._ @@ -41,13 +45,11 @@ private[akka] final class FilePublisher(f: File, bytesReadPromise: Promise[Long] var readBytesTotal = 0L var availableChunks: Vector[ByteString] = Vector.empty // TODO possibly resign read-ahead-ing and make fusable as Stage - private var raf: RandomAccessFile = _ private var chan: FileChannel = _ override def preStart() = { try { - raf = new RandomAccessFile(f, "r") // best way to express this in JDK6, OpenOption are available since JDK7 - chan = raf.getChannel + chan = FileChannel.open(f.toPath, FilePublisher.Read) } catch { case ex: Exception ⇒ onErrorThenStop(ex) @@ -80,7 +82,7 @@ private[akka] final class FilePublisher(f: File, bytesReadPromise: Promise[Long] } /** BLOCKING I/O READ */ - @tailrec final def readAhead(maxChunks: Int, chunks: Vector[ByteString]): Vector[ByteString] = + @tailrec def readAhead(maxChunks: Int, chunks: Vector[ByteString]): Vector[ByteString] = if (chunks.size <= maxChunks && isActive) { (try chan.read(buf) catch { case NonFatal(ex) ⇒ onErrorThenStop(ex); Int.MinValue }) match { case -1 ⇒ // EOF @@ -98,13 +100,18 @@ private[akka] final class FilePublisher(f: File, bytesReadPromise: Promise[Long] } } else chunks - private final def eofEncountered: Boolean = eofReachedAtOffset != Long.MinValue + private def eofEncountered: Boolean = eofReachedAtOffset != Long.MinValue override def postStop(): Unit = { super.postStop() - bytesReadPromise.trySuccess(readBytesTotal) - try if (chan ne null) chan.close() - finally if (raf ne null) raf.close() + try { + if (chan ne null) chan.close() + } catch { + case ex: Exception ⇒ + completionPromise.success(IOResult(readBytesTotal, Failure(ex))) + } + + completionPromise.trySuccess(IOResult(readBytesTotal, Success(Done))) } } diff --git a/akka-stream/src/main/scala/akka/stream/impl/io/FileSubscriber.scala b/akka-stream/src/main/scala/akka/stream/impl/io/FileSubscriber.scala index b282e524e3..5f7a2a6a67 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/io/FileSubscriber.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/io/FileSubscriber.scala @@ -1,49 +1,52 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl.io -import java.io.{ File, RandomAccessFile } +import java.io.File import java.nio.channels.FileChannel +import java.util.Collections +import akka.Done import akka.actor.{ Deploy, ActorLogging, Props } +import akka.stream.io.IOResult import akka.stream.actor.{ ActorSubscriberMessage, WatermarkRequestStrategy } import akka.util.ByteString import scala.concurrent.Promise +import scala.util.{ Failure, Success } /** INTERNAL API */ private[akka] object FileSubscriber { - def props(f: File, completionPromise: Promise[Long], bufSize: Int, append: Boolean) = { + def props(f: File, completionPromise: Promise[IOResult], bufSize: Int, append: Boolean) = { require(bufSize > 0, "buffer size must be > 0") Props(classOf[FileSubscriber], f, completionPromise, bufSize, append).withDeploy(Deploy.local) } + import java.nio.file.StandardOpenOption._ + val Write = Collections.singleton(WRITE) + val Append = Collections.singleton(APPEND) } /** INTERNAL API */ -private[akka] class FileSubscriber(f: File, bytesWrittenPromise: Promise[Long], bufSize: Int, append: Boolean) +private[akka] class FileSubscriber(f: File, completionPromise: Promise[IOResult], bufSize: Int, append: Boolean) extends akka.stream.actor.ActorSubscriber with ActorLogging { override protected val requestStrategy = WatermarkRequestStrategy(highWatermark = bufSize) - private var raf: RandomAccessFile = _ private var chan: FileChannel = _ private var bytesWritten: Long = 0 override def preStart(): Unit = try { - raf = new RandomAccessFile(f, "rw") // best way to express this in JDK6, OpenOption are available since JDK7 - chan = raf.getChannel - - // manually supporting appending to files - in Java 7 we could use OpenModes: FileChannel.open(f, openOptions.asJava) - if (append) chan.position(chan.size()) + val openOptions = if (append) FileSubscriber.Append else FileSubscriber.Write + chan = FileChannel.open(f.toPath, openOptions) super.preStart() } catch { case ex: Exception ⇒ - bytesWrittenPromise.failure(ex) + completionPromise.success(IOResult(bytesWritten, Failure(ex))) cancel() } @@ -53,12 +56,13 @@ private[akka] class FileSubscriber(f: File, bytesWrittenPromise: Promise[Long], bytesWritten += chan.write(bytes.asByteBuffer) } catch { case ex: Exception ⇒ - bytesWrittenPromise.failure(ex) + completionPromise.success(IOResult(bytesWritten, Failure(ex))) cancel() } - case ActorSubscriberMessage.OnError(cause) ⇒ - log.error(cause, "Tearing down FileSink({}) due to upstream error", f.getAbsolutePath) + case ActorSubscriberMessage.OnError(ex) ⇒ + log.error(ex, "Tearing down FileSink({}) due to upstream error", f.getAbsolutePath) + completionPromise.success(IOResult(bytesWritten, Failure(ex))) context.stop(self) case ActorSubscriberMessage.OnComplete ⇒ @@ -66,16 +70,20 @@ private[akka] class FileSubscriber(f: File, bytesWrittenPromise: Promise[Long], chan.force(true) } catch { case ex: Exception ⇒ - bytesWrittenPromise.failure(ex) + completionPromise.success(IOResult(bytesWritten, Failure(ex))) } context.stop(self) } override def postStop(): Unit = { - bytesWrittenPromise.trySuccess(bytesWritten) + try { + if (chan ne null) chan.close() + } catch { + case ex: Exception ⇒ + completionPromise.success(IOResult(bytesWritten, Failure(ex))) + } - if (chan ne null) chan.close() - if (raf ne null) raf.close() + completionPromise.trySuccess(IOResult(bytesWritten, Success(Done))) super.postStop() } } diff --git a/akka-stream/src/main/scala/akka/stream/impl/io/IOSinks.scala b/akka-stream/src/main/scala/akka/stream/impl/io/IOSinks.scala index c7fad7964c..245c12a957 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/io/IOSinks.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/io/IOSinks.scala @@ -1,9 +1,10 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl.io import java.io.{ File, OutputStream } +import akka.stream.io.IOResult import akka.stream.impl.SinkModule import akka.stream.impl.StreamLayout.Module import akka.stream.impl.Stages.DefaultAttributes.IODispatcher @@ -18,21 +19,21 @@ import scala.concurrent.{ Future, Promise } * (creating it before hand if necessary). */ private[akka] final class FileSink(f: File, append: Boolean, val attributes: Attributes, shape: SinkShape[ByteString]) - extends SinkModule[ByteString, Future[Long]](shape) { + extends SinkModule[ByteString, Future[IOResult]](shape) { override def create(context: MaterializationContext) = { val materializer = ActorMaterializer.downcast(context.materializer) val settings = materializer.effectiveSettings(context.effectiveAttributes) - val bytesWrittenPromise = Promise[Long]() - val props = FileSubscriber.props(f, bytesWrittenPromise, settings.maxInputBufferSize, append) + val ioResultPromise = Promise[IOResult]() + val props = FileSubscriber.props(f, ioResultPromise, settings.maxInputBufferSize, append) val dispatcher = context.effectiveAttributes.get[Dispatcher](IODispatcher).dispatcher val ref = materializer.actorOf(context, props.withDispatcher(dispatcher)) - (akka.stream.actor.ActorSubscriber[ByteString](ref), bytesWrittenPromise.future) + (akka.stream.actor.ActorSubscriber[ByteString](ref), ioResultPromise.future) } - override protected def newInstance(shape: SinkShape[ByteString]): SinkModule[ByteString, Future[Long]] = + override protected def newInstance(shape: SinkShape[ByteString]): SinkModule[ByteString, Future[IOResult]] = new FileSink(f, append, attributes, shape) override def withAttributes(attr: Attributes): Module = @@ -44,26 +45,26 @@ private[akka] final class FileSink(f: File, append: Boolean, val attributes: Att * Creates simple synchronous (Java 6 compatible) Sink which writes all incoming elements to the given file * (creating it before hand if necessary). */ -private[akka] final class OutputStreamSink(createOutput: () ⇒ OutputStream, val attributes: Attributes, shape: SinkShape[ByteString]) - extends SinkModule[ByteString, Future[Long]](shape) { +private[akka] final class OutputStreamSink(createOutput: () ⇒ OutputStream, val attributes: Attributes, shape: SinkShape[ByteString], autoFlush: Boolean) + extends SinkModule[ByteString, Future[IOResult]](shape) { override def create(context: MaterializationContext) = { val materializer = ActorMaterializer.downcast(context.materializer) val settings = materializer.effectiveSettings(context.effectiveAttributes) - val bytesWrittenPromise = Promise[Long]() + val ioResultPromise = Promise[IOResult]() val os = createOutput() // if it fails, we fail the materialization - val props = OutputStreamSubscriber.props(os, bytesWrittenPromise, settings.maxInputBufferSize) + val props = OutputStreamSubscriber.props(os, ioResultPromise, settings.maxInputBufferSize, autoFlush) val ref = materializer.actorOf(context, props) - (akka.stream.actor.ActorSubscriber[ByteString](ref), bytesWrittenPromise.future) + (akka.stream.actor.ActorSubscriber[ByteString](ref), ioResultPromise.future) } - override protected def newInstance(shape: SinkShape[ByteString]): SinkModule[ByteString, Future[Long]] = - new OutputStreamSink(createOutput, attributes, shape) + override protected def newInstance(shape: SinkShape[ByteString]): SinkModule[ByteString, Future[IOResult]] = + new OutputStreamSink(createOutput, attributes, shape, autoFlush) override def withAttributes(attr: Attributes): Module = - new OutputStreamSink(createOutput, attr, amendShape(attr)) + new OutputStreamSink(createOutput, attr, amendShape(attr), autoFlush) } diff --git a/akka-stream/src/main/scala/akka/stream/impl/io/IOSources.scala b/akka-stream/src/main/scala/akka/stream/impl/io/IOSources.scala index 2880364714..b116c84da3 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/io/IOSources.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/io/IOSources.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl.io @@ -7,6 +7,7 @@ import java.io.{ File, InputStream } import akka.stream._ import akka.stream.ActorAttributes.Dispatcher +import akka.stream.io.IOResult import akka.stream.impl.StreamLayout.Module import akka.stream.impl.Stages.DefaultAttributes.IODispatcher import akka.stream.impl.{ ErrorPublisher, SourceModule } @@ -19,23 +20,23 @@ import scala.concurrent.{ Future, Promise } * Creates simple synchronous (Java 6 compatible) Source backed by the given file. */ private[akka] final class FileSource(f: File, chunkSize: Int, val attributes: Attributes, shape: SourceShape[ByteString]) - extends SourceModule[ByteString, Future[Long]](shape) { + extends SourceModule[ByteString, Future[IOResult]](shape) { require(chunkSize > 0, "chunkSize must be greater than 0") override def create(context: MaterializationContext) = { // FIXME rewrite to be based on GraphStage rather than dangerous downcasts val materializer = ActorMaterializer.downcast(context.materializer) val settings = materializer.effectiveSettings(context.effectiveAttributes) - val bytesReadPromise = Promise[Long]() - val props = FilePublisher.props(f, bytesReadPromise, chunkSize, settings.initialInputBufferSize, settings.maxInputBufferSize) + val ioResultPromise = Promise[IOResult]() + val props = FilePublisher.props(f, ioResultPromise, chunkSize, settings.initialInputBufferSize, settings.maxInputBufferSize) val dispatcher = context.effectiveAttributes.get[Dispatcher](IODispatcher).dispatcher val ref = materializer.actorOf(context, props.withDispatcher(dispatcher)) - (akka.stream.actor.ActorPublisher[ByteString](ref), bytesReadPromise.future) + (akka.stream.actor.ActorPublisher[ByteString](ref), ioResultPromise.future) } - override protected def newInstance(shape: SourceShape[ByteString]): SourceModule[ByteString, Future[Long]] = + override protected def newInstance(shape: SourceShape[ByteString]): SourceModule[ByteString, Future[IOResult]] = new FileSource(f, chunkSize, attributes, shape) override def withAttributes(attr: Attributes): Module = @@ -47,28 +48,28 @@ private[akka] final class FileSource(f: File, chunkSize: Int, val attributes: At * Source backed by the given input stream. */ private[akka] final class InputStreamSource(createInputStream: () ⇒ InputStream, chunkSize: Int, val attributes: Attributes, shape: SourceShape[ByteString]) - extends SourceModule[ByteString, Future[Long]](shape) { + extends SourceModule[ByteString, Future[IOResult]](shape) { override def create(context: MaterializationContext) = { val materializer = ActorMaterializer.downcast(context.materializer) - val bytesReadPromise = Promise[Long]() + val ioResultPromise = Promise[IOResult]() val pub = try { val is = createInputStream() // can throw, i.e. FileNotFound - val props = InputStreamPublisher.props(is, bytesReadPromise, chunkSize) + val props = InputStreamPublisher.props(is, ioResultPromise, chunkSize) val ref = materializer.actorOf(context, props) akka.stream.actor.ActorPublisher[ByteString](ref) } catch { case ex: Exception ⇒ - bytesReadPromise.failure(ex) + ioResultPromise.failure(ex) ErrorPublisher(ex, attributes.nameOrDefault("inputStreamSource")).asInstanceOf[Publisher[ByteString]] } - (pub, bytesReadPromise.future) + (pub, ioResultPromise.future) } - override protected def newInstance(shape: SourceShape[ByteString]): SourceModule[ByteString, Future[Long]] = + override protected def newInstance(shape: SourceShape[ByteString]): SourceModule[ByteString, Future[IOResult]] = new InputStreamSource(createInputStream, chunkSize, attributes, shape) override def withAttributes(attr: Attributes): Module = diff --git a/akka-stream/src/main/scala/akka/stream/impl/io/InputStreamPublisher.scala b/akka-stream/src/main/scala/akka/stream/impl/io/InputStreamPublisher.scala index 7300f9652f..c4e93264a2 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/io/InputStreamPublisher.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/io/InputStreamPublisher.scala @@ -1,23 +1,26 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl.io import java.io.InputStream +import akka.Done import akka.actor.{ Deploy, ActorLogging, DeadLetterSuppression, Props } import akka.io.DirectByteBufferPool import akka.stream.actor.ActorPublisherMessage +import akka.stream.io.IOResult import akka.util.ByteString import akka.util.ByteString.ByteString1C import scala.annotation.tailrec import scala.concurrent.Promise +import scala.util.{ Failure, Success } /** INTERNAL API */ private[akka] object InputStreamPublisher { - def props(is: InputStream, completionPromise: Promise[Long], chunkSize: Int): Props = { + def props(is: InputStream, completionPromise: Promise[IOResult], chunkSize: Int): Props = { require(chunkSize > 0, s"chunkSize must be > 0 (was $chunkSize)") Props(classOf[InputStreamPublisher], is, completionPromise, chunkSize).withDeploy(Deploy.local) @@ -27,7 +30,7 @@ private[akka] object InputStreamPublisher { } /** INTERNAL API */ -private[akka] class InputStreamPublisher(is: InputStream, bytesReadPromise: Promise[Long], chunkSize: Int) +private[akka] class InputStreamPublisher(is: InputStream, completionPromise: Promise[IOResult], chunkSize: Int) extends akka.stream.actor.ActorPublisher[ByteString] with ActorLogging { @@ -73,8 +76,14 @@ private[akka] class InputStreamPublisher(is: InputStream, bytesReadPromise: Prom override def postStop(): Unit = { super.postStop() - bytesReadPromise.trySuccess(readBytesTotal) - if (is ne null) is.close() + try { + if (is ne null) is.close() + } catch { + case ex: Exception ⇒ + completionPromise.success(IOResult(readBytesTotal, Failure(ex))) + } + + completionPromise.trySuccess(IOResult(readBytesTotal, Success(Done))) } } diff --git a/akka-stream/src/main/scala/akka/stream/impl/io/InputStreamSinkStage.scala b/akka-stream/src/main/scala/akka/stream/impl/io/InputStreamSinkStage.scala index d5cf4a3f32..0578e21aa7 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/io/InputStreamSinkStage.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/io/InputStreamSinkStage.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl.io diff --git a/akka-stream/src/main/scala/akka/stream/impl/io/OutputStreamSourceStage.scala b/akka-stream/src/main/scala/akka/stream/impl/io/OutputStreamSourceStage.scala index c4c1a82157..accacb2d01 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/io/OutputStreamSourceStage.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/io/OutputStreamSourceStage.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl.io diff --git a/akka-stream/src/main/scala/akka/stream/impl/io/OutputStreamSubscriber.scala b/akka-stream/src/main/scala/akka/stream/impl/io/OutputStreamSubscriber.scala index a82f875019..aad2c10caf 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/io/OutputStreamSubscriber.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/io/OutputStreamSubscriber.scala @@ -1,27 +1,30 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl.io import java.io.OutputStream +import akka.Done import akka.actor.{ Deploy, ActorLogging, Props } import akka.stream.actor.{ ActorSubscriberMessage, WatermarkRequestStrategy } +import akka.stream.io.IOResult import akka.util.ByteString import scala.concurrent.Promise +import scala.util.{ Failure, Success } /** INTERNAL API */ private[akka] object OutputStreamSubscriber { - def props(os: OutputStream, completionPromise: Promise[Long], bufSize: Int) = { + def props(os: OutputStream, completionPromise: Promise[IOResult], bufSize: Int, autoFlush: Boolean) = { require(bufSize > 0, "buffer size must be > 0") - Props(classOf[OutputStreamSubscriber], os, completionPromise, bufSize).withDeploy(Deploy.local) + Props(classOf[OutputStreamSubscriber], os, completionPromise, bufSize, autoFlush).withDeploy(Deploy.local) } } /** INTERNAL API */ -private[akka] class OutputStreamSubscriber(os: OutputStream, bytesWrittenPromise: Promise[Long], bufSize: Int) +private[akka] class OutputStreamSubscriber(os: OutputStream, completionPromise: Promise[IOResult], bufSize: Int, autoFlush: Boolean) extends akka.stream.actor.ActorSubscriber with ActorLogging { @@ -35,14 +38,16 @@ private[akka] class OutputStreamSubscriber(os: OutputStream, bytesWrittenPromise // blocking write os.write(bytes.toArray) bytesWritten += bytes.length + if (autoFlush) os.flush() } catch { case ex: Exception ⇒ - bytesWrittenPromise.failure(ex) + completionPromise.success(IOResult(bytesWritten, Failure(ex))) cancel() } - case ActorSubscriberMessage.OnError(cause) ⇒ - log.error(cause, "Tearing down OutputStreamSink due to upstream error, wrote bytes: {}", bytesWritten) + case ActorSubscriberMessage.OnError(ex) ⇒ + log.error(ex, "Tearing down OutputStreamSink due to upstream error, wrote bytes: {}", bytesWritten) + completionPromise.success(IOResult(bytesWritten, Failure(ex))) context.stop(self) case ActorSubscriberMessage.OnComplete ⇒ @@ -51,9 +56,14 @@ private[akka] class OutputStreamSubscriber(os: OutputStream, bytesWrittenPromise } override def postStop(): Unit = { - bytesWrittenPromise.trySuccess(bytesWritten) + try { + if (os ne null) os.close() + } catch { + case ex: Exception ⇒ + completionPromise.success(IOResult(bytesWritten, Failure(ex))) + } - if (os ne null) os.close() + completionPromise.trySuccess(IOResult(bytesWritten, Success(Done))) super.postStop() } } diff --git a/akka-stream/src/main/scala/akka/stream/impl/io/SslTlsCipherActor.scala b/akka-stream/src/main/scala/akka/stream/impl/io/SslTlsCipherActor.scala index 323e08c853..e8f567ea05 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/io/SslTlsCipherActor.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/io/SslTlsCipherActor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.impl.io @@ -14,7 +14,7 @@ import akka.stream.impl.FanIn.InputBunch import akka.stream.impl.FanOut.OutputBunch import akka.stream.impl._ import akka.util.ByteString -import com.typesafe.sslconfig.akka.{ AkkaSSLConfig, SSLEngineConfigurator } +import com.typesafe.sslconfig.akka.AkkaSSLConfig import scala.annotation.tailrec import akka.stream.io._ @@ -158,16 +158,15 @@ private[akka] class SslTlsCipherActor(settings: ActorMaterializerSettings, applySessionParameters(firstSession) def applySessionParameters(params: NegotiateNewSession): Unit = { - import params._ - enabledCipherSuites foreach (cs ⇒ engine.setEnabledCipherSuites(cs.toArray)) - enabledProtocols foreach (p ⇒ engine.setEnabledProtocols(p.toArray)) - clientAuth match { + params.enabledCipherSuites foreach (cs ⇒ engine.setEnabledCipherSuites(cs.toArray)) + params.enabledProtocols foreach (p ⇒ engine.setEnabledProtocols(p.toArray)) + params.clientAuth match { case Some(ClientAuth.None) ⇒ engine.setNeedClientAuth(false) case Some(ClientAuth.Want) ⇒ engine.setWantClientAuth(true) case Some(ClientAuth.Need) ⇒ engine.setNeedClientAuth(true) - case None ⇒ // do nothing + case _ ⇒ // do nothing } - sslParameters foreach (p ⇒ engine.setSSLParameters(p)) + params.sslParameters foreach (p ⇒ engine.setSSLParameters(p)) engine.beginHandshake() lastHandshakeStatus = engine.getHandshakeStatus diff --git a/akka-stream/src/main/scala/akka/stream/impl/io/TcpStages.scala b/akka-stream/src/main/scala/akka/stream/impl/io/TcpStages.scala index 48c7de3a34..789c49211a 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/io/TcpStages.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/io/TcpStages.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.impl.io @@ -16,7 +16,7 @@ import akka.stream.impl.ReactiveStreamsCompliance import akka.stream.impl.fusing.GraphStages.detacher import akka.stream.scaladsl.Tcp.{ OutgoingConnection, ServerBinding } import akka.stream.scaladsl.{ BidiFlow, Flow, Tcp ⇒ StreamTcp } -import akka.stream.stage.GraphStageLogic.StageActorRef +import akka.stream.stage.GraphStageLogic.StageActor import akka.stream.stage._ import akka.util.ByteString @@ -48,12 +48,12 @@ private[stream] class ConnectionSourceStage(val tcpManager: ActorRef, val bindingPromise = Promise[ServerBinding] val logic = new TimerGraphStageLogic(shape) { - implicit var self: StageActorRef = _ + implicit def self: ActorRef = stageActor.ref var listener: ActorRef = _ var unbindPromise = Promise[Unit]() override def preStart(): Unit = { - self = getStageActorRef(receive) + getStageActor(receive) tcpManager ! Tcp.Bind(self, endpoint, backlog, options, pullMode = true) } @@ -63,7 +63,7 @@ private[stream] class ConnectionSourceStage(val tcpManager: ActorRef, msg match { case Bound(localAddress) ⇒ listener = sender - self.watch(listener) + stageActor.watch(listener) if (isAvailable(out)) listener ! ResumeAccepting(1) val target = self bindingPromise.success(ServerBinding(localAddress)(() ⇒ { target ! Unbind; unbindPromise.future })) @@ -118,7 +118,7 @@ private[stream] class ConnectionSourceStage(val tcpManager: ActorRef, private def tryUnbind(): Unit = { if (listener ne null) { - self.unwatch(listener) + stageActor.unwatch(listener) setKeepGoing(true) listener ! Unbind } @@ -169,7 +169,7 @@ private[stream] object TcpConnectionStage { * easier to maintain and understand. */ class TcpStreamLogic(val shape: FlowShape[ByteString, ByteString], val role: TcpRole) extends GraphStageLogic(shape) { - implicit private var self: StageActorRef = _ + implicit def self: ActorRef = stageActor.ref private def bytesIn = shape.in private def bytesOut = shape.out @@ -185,14 +185,12 @@ private[stream] object TcpConnectionStage { role match { case Inbound(conn, _) ⇒ setHandler(bytesOut, readHandler) - self = getStageActorRef(connected) connection = conn - self.watch(connection) + getStageActor(connected).watch(connection) connection ! Register(self, keepOpenOnPeerClosed = true, useResumeWriting = false) pull(bytesIn) case ob @ Outbound(manager, cmd, _, _) ⇒ - self = getStageActorRef(connecting(ob)) - self.watch(manager) + getStageActor(connecting(ob)).watch(manager) manager ! cmd } } @@ -207,9 +205,9 @@ private[stream] object TcpConnectionStage { role.asInstanceOf[Outbound].localAddressPromise.success(c.localAddress) connection = sender setHandler(bytesOut, readHandler) - self.unwatch(ob.manager) - self = getStageActorRef(connected) - self.watch(connection) + stageActor.unwatch(ob.manager) + stageActor.become(connected) + stageActor.watch(connection) connection ! Register(self, keepOpenOnPeerClosed = true, useResumeWriting = false) if (isAvailable(bytesOut)) connection ! ResumeReading pull(bytesIn) @@ -263,11 +261,13 @@ private[stream] object TcpConnectionStage { // (or half-close is turned off) if (isClosed(bytesOut) || !role.halfClose) connection ! Close // We still read, so we only close the write side - else connection ! ConfirmedClose + else if (connection != null) connection ! ConfirmedClose + else completeStage() } override def onUpstreamFailure(ex: Throwable): Unit = { - connection ! Abort + if (connection != null) connection ! Abort + else failStage(ex) } }) diff --git a/akka-stream/src/main/scala/akka/stream/io/ByteStringParser.scala b/akka-stream/src/main/scala/akka/stream/io/ByteStringParser.scala index 64817557d6..d034c3dcbc 100644 --- a/akka-stream/src/main/scala/akka/stream/io/ByteStringParser.scala +++ b/akka-stream/src/main/scala/akka/stream/io/ByteStringParser.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.io @@ -19,30 +19,35 @@ abstract class ByteStringParser[T] extends GraphStage[FlowShape[ByteString, T]] final override val shape = FlowShape(bytesIn, objOut) class ParsingLogic extends GraphStageLogic(shape) { + var pullOnParserRequest = false override def preStart(): Unit = pull(bytesIn) setHandler(objOut, eagerTerminateOutput) private var buffer = ByteString.empty private var current: ParseStep[T] = FinishedParser + private var acceptUpstreamFinish: Boolean = true final protected def startWith(step: ParseStep[T]): Unit = current = step @tailrec private def doParse(): Unit = if (buffer.nonEmpty) { + val reader = new ByteReader(buffer) val cont = try { - val reader = new ByteReader(buffer) - val (elem, next) = current.parse(reader) - emit(objOut, elem) - if (next == FinishedParser) { + val parseResult = current.parse(reader) + acceptUpstreamFinish = parseResult.acceptUpstreamFinish + parseResult.result.map(emit(objOut, _)) + if (parseResult.nextStep == FinishedParser) { completeStage() false } else { buffer = reader.remainingData - current = next + current = parseResult.nextStep true } } catch { case NeedMoreData ⇒ + acceptUpstreamFinish = false + if (current.canWorkWithPartialData) buffer = reader.remainingData pull(bytesIn) false } @@ -51,11 +56,12 @@ abstract class ByteStringParser[T] extends GraphStage[FlowShape[ByteString, T]] setHandler(bytesIn, new InHandler { override def onPush(): Unit = { + pullOnParserRequest = false buffer ++= grab(bytesIn) doParse() } override def onUpstreamFinish(): Unit = - if (buffer.isEmpty) completeStage() + if (buffer.isEmpty && acceptUpstreamFinish) completeStage() else current.onTruncation() }) } @@ -63,13 +69,28 @@ abstract class ByteStringParser[T] extends GraphStage[FlowShape[ByteString, T]] object ByteStringParser { + /** + * @param result - parser can return some element for downstream or return None if no element was generated + * @param nextStep - next parser + * @param acceptUpstreamFinish - if true - stream will complete when received `onUpstreamFinish`, if "false" + * - onTruncation will be called + */ + case class ParseResult[+T](result: Option[T], + nextStep: ParseStep[T], + acceptUpstreamFinish: Boolean = true) + trait ParseStep[+T] { - def parse(reader: ByteReader): (T, ParseStep[T]) + /** + * Must return true when NeedMoreData will clean buffer. If returns false - next pulled + * data will be appended to existing data in buffer + */ + def canWorkWithPartialData: Boolean = false + def parse(reader: ByteReader): ParseResult[T] def onTruncation(): Unit = throw new IllegalStateException("truncated data in ByteStringParser") } object FinishedParser extends ParseStep[Nothing] { - def parse(reader: ByteReader) = + override def parse(reader: ByteReader) = throw new IllegalStateException("no initial parser installed: you must use startWith(...)") } @@ -83,6 +104,7 @@ object ByteStringParser { def remainingSize: Int = input.size - off def currentOffset: Int = off + def remainingData: ByteString = input.drop(off) def fromStartToHere: ByteString = input.take(off) diff --git a/akka-stream/src/main/scala/akka/stream/io/Framing.scala b/akka-stream/src/main/scala/akka/stream/io/Framing.scala index 46452294b9..09ce0bc93e 100644 --- a/akka-stream/src/main/scala/akka/stream/io/Framing.scala +++ b/akka-stream/src/main/scala/akka/stream/io/Framing.scala @@ -1,10 +1,11 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.io import java.nio.ByteOrder +import akka.NotUsed import akka.stream.scaladsl.{ Keep, BidiFlow, Flow } import akka.stream.stage._ import akka.util.{ ByteIterator, ByteStringBuilder, ByteString } @@ -30,7 +31,7 @@ object Framing { * exceeded this Flow will fail the stream. * @return */ - def delimiter(delimiter: ByteString, maximumFrameLength: Int, allowTruncation: Boolean = false): Flow[ByteString, ByteString, Unit] = + def delimiter(delimiter: ByteString, maximumFrameLength: Int, allowTruncation: Boolean = false): Flow[ByteString, ByteString, NotUsed] = Flow[ByteString].transform(() ⇒ new DelimiterFramingStage(delimiter, maximumFrameLength, allowTruncation)) .named("delimiterFraming") @@ -52,7 +53,7 @@ object Framing { def lengthField(fieldLength: Int, fieldOffset: Int = 0, maximumFrameLength: Int, - byteOrder: ByteOrder = ByteOrder.LITTLE_ENDIAN): Flow[ByteString, ByteString, Unit] = { + byteOrder: ByteOrder = ByteOrder.LITTLE_ENDIAN): Flow[ByteString, ByteString, NotUsed] = { require(fieldLength >= 1 && fieldLength <= 4, "Length field length must be 1, 2, 3 or 4.") Flow[ByteString].transform(() ⇒ new LengthFieldFramingStage(fieldLength, fieldOffset, maximumFrameLength, byteOrder)) .named("lengthFieldFraming") @@ -77,7 +78,7 @@ object Framing { * included in this limit. * @return */ - def simpleFramingProtocol(maximumMessageLength: Int): BidiFlow[ByteString, ByteString, ByteString, ByteString, Unit] = { + def simpleFramingProtocol(maximumMessageLength: Int): BidiFlow[ByteString, ByteString, ByteString, ByteString, NotUsed] = { val decoder = lengthField(4, 0, maximumMessageLength + 4, ByteOrder.BIG_ENDIAN).map(_.drop(4)) val encoder = Flow[ByteString].transform(() ⇒ new PushStage[ByteString, ByteString] { diff --git a/akka-stream/src/main/scala/akka/stream/io/IOResult.scala b/akka-stream/src/main/scala/akka/stream/io/IOResult.scala new file mode 100644 index 0000000000..c65e2d4e32 --- /dev/null +++ b/akka-stream/src/main/scala/akka/stream/io/IOResult.scala @@ -0,0 +1,36 @@ +/** + * Copyright (C) 2016 Typesafe Inc. + */ +package akka.stream.io + +import akka.Done +import scala.util.{ Failure, Success, Try } + +/** + * Holds a result of an IO operation. + * + * @param count Numeric value depending on context, for example IO operations performed or bytes processed. + * @param status Status of the result. Can be either [[akka.Done]] or an exception. + */ +final case class IOResult private[stream] (count: Long, status: Try[Done]) { + + /** + * Java API: Numeric value depending on context, for example IO operations performed or bytes processed. + */ + def getCount: Long = count + + /** + * Java API: Indicates whether IO operation completed successfully or not. + */ + def wasSuccessful: Boolean = status.isSuccess + + /** + * Java API: If the IO operation resulted in an error, returns the corresponding [[Throwable]] + * or throws [[UnsupportedOperationException]] otherwise. + */ + def getError: Throwable = status match { + case Failure(t) ⇒ t + case Success(_) ⇒ throw new UnsupportedOperationException("IO operation was successfull.") + } + +} diff --git a/akka-stream/src/main/scala/akka/stream/io/SslTls.scala b/akka-stream/src/main/scala/akka/stream/io/SslTls.scala index 04ec83a536..913bdbae91 100644 --- a/akka-stream/src/main/scala/akka/stream/io/SslTls.scala +++ b/akka-stream/src/main/scala/akka/stream/io/SslTls.scala @@ -1,19 +1,21 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.io import java.lang.{ Integer ⇒ jInteger } import java.security.Principal +import java.util.Optional -import akka.japi +import akka.{ NotUsed, japi } import akka.stream._ import akka.stream.impl.StreamLayout.Module import akka.util.ByteString import javax.net.ssl._ + import scala.annotation.varargs import scala.collection.immutable -import java.security.cert.Certificate +import scala.compat.java8.OptionConverters /** * Stream cipher support based upon JSSE. @@ -53,8 +55,8 @@ import java.security.cert.Certificate */ object SslTls { - type ScalaFlow = scaladsl.BidiFlow[SslTlsOutbound, ByteString, ByteString, SslTlsInbound, Unit] - type JavaFlow = javadsl.BidiFlow[SslTlsOutbound, ByteString, ByteString, SslTlsInbound, Unit] + type ScalaFlow = scaladsl.BidiFlow[SslTlsOutbound, ByteString, ByteString, SslTlsInbound, NotUsed] + type JavaFlow = javadsl.BidiFlow[SslTlsOutbound, ByteString, ByteString, SslTlsInbound, NotUsed] /** * Scala API: create a StreamTls [[akka.stream.scaladsl.BidiFlow]]. The @@ -106,8 +108,8 @@ object SslTls { * The SSLEngine may use this information e.g. when an endpoint identification algorithm was * configured using [[SSLParameters.setEndpointIdentificationAlgorithm]]. */ - def create(sslContext: SSLContext, firstSession: NegotiateNewSession, role: Role, hostInfo: japi.Option[japi.Pair[String, jInteger]], closing: Closing): JavaFlow = - new javadsl.BidiFlow(apply(sslContext, firstSession, role, closing, hostInfo.asScala.map(e ⇒ (e.first, e.second)))) + def create(sslContext: SSLContext, firstSession: NegotiateNewSession, role: Role, hostInfo: Optional[japi.Pair[String, jInteger]], closing: Closing): JavaFlow = + new javadsl.BidiFlow(apply(sslContext, firstSession, role, closing, OptionConverters.toScala(hostInfo).map(e ⇒ (e.first, e.second)))) /** * INTERNAL API. @@ -147,7 +149,7 @@ object SslTls { } /** - * This object holds simple wrapping [[BidiFlow]] implementations that can + * This object holds simple wrapping [[akka.stream.scaladsl.BidiFlow]] implementations that can * be used instead of [[SslTls]] when no encryption is desired. The flows will * just adapt the message protocol by wrapping into [[SessionBytes]] and * unwrapping [[SendBytes]]. @@ -156,13 +158,13 @@ object SslTlsPlacebo { // this constructs a session for (invalid) protocol SSL_NULL_WITH_NULL_NULL private[akka] val dummySession = SSLContext.getDefault.createSSLEngine.getSession - val forScala: scaladsl.BidiFlow[SslTlsOutbound, ByteString, ByteString, SessionBytes, Unit] = + val forScala: scaladsl.BidiFlow[SslTlsOutbound, ByteString, ByteString, SessionBytes, NotUsed] = scaladsl.BidiFlow.fromGraph(scaladsl.GraphDSL.create() { implicit b ⇒ val top = b.add(scaladsl.Flow[SslTlsOutbound].collect { case SendBytes(bytes) ⇒ bytes }) val bottom = b.add(scaladsl.Flow[ByteString].map(SessionBytes(dummySession, _))) BidiShape.fromFlows(top, bottom) }) - val forJava: javadsl.BidiFlow[SslTlsOutbound, ByteString, ByteString, SessionBytes, Unit] = + val forJava: javadsl.BidiFlow[SslTlsOutbound, ByteString, ByteString, SessionBytes, NotUsed] = new javadsl.BidiFlow(forScala) } @@ -380,6 +382,10 @@ sealed trait SslTlsOutbound * - `enabledProtocols` will be passed to `SSLEngine::setEnabledProtocols()` * - `clientAuth` will be passed to `SSLEngine::setWantClientAuth()` or `SSLEngine.setNeedClientAuth()`, respectively * - `sslParameters` will be passed to `SSLEngine::setSSLParameters()` + * + * Please note that passing `clientAuth = None` means that no change is done + * on client authentication requirements while `clientAuth = Some(ClientAuth.None)` + * switches off client authentication. */ case class NegotiateNewSession( enabledCipherSuites: Option[immutable.Seq[String]], @@ -416,6 +422,7 @@ object NegotiateNewSession extends NegotiateNewSession(None, None, None, None) { * settings unchanged). */ def withDefaults = this + } /** @@ -431,8 +438,7 @@ case class SendBytes(bytes: ByteString) extends SslTlsOutbound * and verifies them if provided, and `None` disables peer certificate * verification. * - * See the documentation for `SSLEngine::setWantClientAuth` for more - * information. + * See the documentation for `SSLEngine::setWantClientAuth` for more information. */ sealed abstract class ClientAuth object ClientAuth { diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/BidiFlow.scala b/akka-stream/src/main/scala/akka/stream/javadsl/BidiFlow.scala index 490932f5f0..3dd91761ab 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/BidiFlow.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/BidiFlow.scala @@ -1,14 +1,21 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.javadsl +import akka.NotUsed import akka.japi.function import akka.stream._ import scala.concurrent.duration.FiniteDuration object BidiFlow { + + private[this] val _identity: BidiFlow[Object, Object, Object, Object, NotUsed] = + BidiFlow.fromFlows(Flow.of(classOf[Object]), Flow.of(classOf[Object])) + + def identity[A, B]: BidiFlow[A, A, B, B, NotUsed] = _identity.asInstanceOf[BidiFlow[A, A, B, B, NotUsed]] + /** * A graph with the shape of a BidiFlow logically is a BidiFlow, this method makes * it so also in type. @@ -46,7 +53,7 @@ object BidiFlow { } /** - * Wraps two Flows to create a ''BidiFlow''. The materialized value of the resulting BidiFlow is Unit. + * Wraps two Flows to create a ''BidiFlow''. The materialized value of the resulting BidiFlow is NotUsed. * * {{{ * +----------------------------+ @@ -65,14 +72,14 @@ object BidiFlow { */ def fromFlows[I1, O1, I2, O2, M1, M2]( flow1: Graph[FlowShape[I1, O1], M1], - flow2: Graph[FlowShape[I2, O2], M2]): BidiFlow[I1, O1, I2, O2, Unit] = + flow2: Graph[FlowShape[I2, O2], M2]): BidiFlow[I1, O1, I2, O2, NotUsed] = new BidiFlow(scaladsl.BidiFlow.fromFlows(flow1, flow2)) /** * Create a BidiFlow where the top and bottom flows are just one simple mapping * stage each, expressed by the two functions. */ - def fromFunctions[I1, O1, I2, O2](top: function.Function[I1, O1], bottom: function.Function[I2, O2]): BidiFlow[I1, O1, I2, O2, Unit] = + def fromFunctions[I1, O1, I2, O2](top: function.Function[I1, O1], bottom: function.Function[I2, O2]): BidiFlow[I1, O1, I2, O2, NotUsed] = new BidiFlow(scaladsl.BidiFlow.fromFunctions(top.apply _, bottom.apply _)) /** @@ -84,7 +91,7 @@ object BidiFlow { * every second in one direction, but no elements are flowing in the other direction. I.e. this stage considers * the *joint* frequencies of the elements in both directions. */ - def bidirectionalIdleTimeout[I, O](timeout: FiniteDuration): BidiFlow[I, I, O, O, Unit] = + def bidirectionalIdleTimeout[I, O](timeout: FiniteDuration): BidiFlow[I, I, O, O, NotUsed] = new BidiFlow(scaladsl.BidiFlow.bidirectionalIdleTimeout(timeout)) } diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/FileIO.scala b/akka-stream/src/main/scala/akka/stream/javadsl/FileIO.scala index 35349c70f4..6299133654 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/FileIO.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/FileIO.scala @@ -1,15 +1,14 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.javadsl import java.io.{ InputStream, OutputStream, File } - import akka.japi.function import akka.stream.{ scaladsl, javadsl, ActorAttributes } +import akka.stream.io.IOResult import akka.util.ByteString - -import scala.concurrent.Future +import java.util.concurrent.CompletionStage /** * Factories to create sinks and sources from files @@ -21,20 +20,22 @@ object FileIO { * Overwrites existing files, if you want to append to an existing file use [[#file(File, Boolean)]] and * pass in `true` as the Boolean argument. * - * Materializes a [[Future]] that will be completed with the size of the file (in bytes) at the streams completion. + * Materializes a [[java.util.concurrent.CompletionStage]] of [[IOResult]] that will be completed with the size of the file (in bytes) at the streams completion, + * and a possible exception if IO operation was not completed successfully. * * You can configure the default dispatcher for this Source by changing the `akka.stream.blocking-io-dispatcher` or * set it for a given Source by using [[ActorAttributes]]. * * @param f The file to write to */ - def toFile(f: File): javadsl.Sink[ByteString, Future[java.lang.Long]] = toFile(f, append = false) + def toFile(f: File): javadsl.Sink[ByteString, CompletionStage[IOResult]] = toFile(f, append = false) /** * Creates a Sink that writes incoming [[ByteString]] elements to the given file and either overwrites * or appends to it. * - * Materializes a [[Future]] that will be completed with the size of the file (in bytes) at the streams completion. + * Materializes a [[java.util.concurrent.CompletionStage]] of [[IOResult]] that will be completed with the size of the file (in bytes) at the streams completion, + * and a possible exception if IO operation was not completed successfully. * * You can configure the default dispatcher for this Source by changing the `akka.stream.blocking-io-dispatcher` or * set it for a given Source by using [[ActorAttributes]]. @@ -42,8 +43,8 @@ object FileIO { * @param f The file to write to * @param append Whether or not the file should be overwritten or appended to */ - def toFile(f: File, append: Boolean): javadsl.Sink[ByteString, Future[java.lang.Long]] = - new Sink(scaladsl.FileIO.toFile(f, append)).asInstanceOf[javadsl.Sink[ByteString, Future[java.lang.Long]]] + def toFile(f: File, append: Boolean): javadsl.Sink[ByteString, CompletionStage[IOResult]] = + new Sink(scaladsl.FileIO.toFile(f, append).toCompletionStage()) /** * Creates a Source from a Files contents. @@ -53,9 +54,10 @@ object FileIO { * You can configure the default dispatcher for this Source by changing the `akka.stream.blocking-io-dispatcher` or * set it for a given Source by using [[ActorAttributes]]. * - * It materializes a [[Future]] containing the number of bytes read from the source file upon completion. + * It materializes a [[java.util.concurrent.CompletionStage]] of [[IOResult]] containing the number of bytes read from the source file upon completion, + * and a possible exception if IO operation was not completed successfully. */ - def fromFile(f: File): javadsl.Source[ByteString, Future[java.lang.Long]] = fromFile(f, 8192) + def fromFile(f: File): javadsl.Source[ByteString, CompletionStage[IOResult]] = fromFile(f, 8192) /** * Creates a synchronous (Java 6 compatible) Source from a Files contents. @@ -65,9 +67,10 @@ object FileIO { * You can configure the default dispatcher for this Source by changing the `akka.stream.blocking-io-dispatcher` or * set it for a given Source by using [[ActorAttributes]]. * - * It materializes a [[Future]] containing the number of bytes read from the source file upon completion. + * It materializes a [[java.util.concurrent.CompletionStage]] of [[IOResult]] containing the number of bytes read from the source file upon completion, + * and a possible exception if IO operation was not completed successfully. */ - def fromFile(f: File, chunkSize: Int): javadsl.Source[ByteString, Future[java.lang.Long]] = - new Source(scaladsl.FileIO.fromFile(f, chunkSize)).asInstanceOf[Source[ByteString, Future[java.lang.Long]]] + def fromFile(f: File, chunkSize: Int): javadsl.Source[ByteString, CompletionStage[IOResult]] = + new Source(scaladsl.FileIO.fromFile(f, chunkSize).toCompletionStage()) } diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/Flow.scala b/akka-stream/src/main/scala/akka/stream/javadsl/Flow.scala index 6a1a7c1c12..4e2573831b 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/Flow.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/Flow.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.javadsl +import akka.{ NotUsed, Done } import akka.event.LoggingAdapter import akka.japi.{ function, Pair } import akka.stream.impl.Timers.{ DelayInitial, IdleInject } @@ -12,19 +13,20 @@ import akka.stream.stage.Stage import org.reactivestreams.Processor import scala.annotation.unchecked.uncheckedVariance import scala.collection.immutable -import scala.concurrent.Future import scala.concurrent.duration.FiniteDuration import akka.japi.Util import java.util.Comparator +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters._ object Flow { private[this] val _identity = new javadsl.Flow(scaladsl.Flow[Any]) /** Create a `Flow` which can process elements of type `T`. */ - def create[T](): javadsl.Flow[T, T, Unit] = fromGraph(scaladsl.Flow[T]) + def create[T](): javadsl.Flow[T, T, NotUsed] = fromGraph(scaladsl.Flow[T]) - def fromProcessor[I, O](processorFactory: function.Creator[Processor[I, O]]): javadsl.Flow[I, O, Unit] = + def fromProcessor[I, O](processorFactory: function.Creator[Processor[I, O]]): javadsl.Flow[I, O, NotUsed] = new Flow(scaladsl.Flow.fromProcessor(() ⇒ processorFactory.create())) def fromProcessorMat[I, O, Mat](processorFactory: function.Creator[Pair[Processor[I, O], Mat]]): javadsl.Flow[I, O, Mat] = @@ -34,7 +36,7 @@ object Flow { }) /** Create a `Flow` which can process elements of type `T`. */ - def of[T](clazz: Class[T]): javadsl.Flow[T, T, Unit] = create[T]() + def of[T](clazz: Class[T]): javadsl.Flow[T, T, NotUsed] = create[T]() /** * A graph with the shape of a flow logically is a flow, this method makes it so also in type. @@ -49,7 +51,7 @@ object Flow { /** * Helper to create `Flow` from a `Sink`and a `Source`. */ - def fromSinkAndSource[I, O](sink: Graph[SinkShape[I], _], source: Graph[SourceShape[O], _]): Flow[I, O, Unit] = + def fromSinkAndSource[I, O](sink: Graph[SinkShape[I], _], source: Graph[SourceShape[O], _]): Flow[I, O, NotUsed] = new Flow(scaladsl.Flow.fromSinkAndSourceMat(sink, source)(scaladsl.Keep.none)) /** @@ -321,22 +323,22 @@ final class Flow[-In, +Out, +Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends /** * Transform this stream by applying the given function to each of the elements - * as they pass through this processing step. The function returns a `Future` and the + * as they pass through this processing step. The function returns a `CompletionStage` and the * value of that future will be emitted downstreams. As many futures as requested elements by * downstream may run in parallel and may complete in any order, but the elements that * are emitted downstream are in the same order as received from upstream. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#stop]] * the stream will be completed with failure. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#resume]] or * [[akka.stream.Supervision#restart]] the element is dropped and the stream continues. * * The function `f` is always invoked on the elements in the order they arrive. * - * '''Emits when''' the Future returned by the provided function finishes for the next element in sequence + * '''Emits when''' the CompletionStage returned by the provided function finishes for the next element in sequence * * '''Backpressures when''' the number of futures reaches the configured parallelism and the downstream * backpressures or the first future is not completed @@ -347,29 +349,29 @@ final class Flow[-In, +Out, +Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends * * @see [[#mapAsyncUnordered]] */ - def mapAsync[T](parallelism: Int, f: function.Function[Out, Future[T]]): javadsl.Flow[In, T, Mat] = - new Flow(delegate.mapAsync(parallelism)(f.apply)) + def mapAsync[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): javadsl.Flow[In, T, Mat] = + new Flow(delegate.mapAsync(parallelism)(x => f(x).toScala)) /** * Transform this stream by applying the given function to each of the elements - * as they pass through this processing step. The function returns a `Future` and the + * as they pass through this processing step. The function returns a `CompletionStage` and the * value of that future will be emitted downstreams. As many futures as requested elements by * downstream may run in parallel and each processed element will be emitted downstream * as soon as it is ready, i.e. it is possible that the elements are not emitted downstream * in the same order as received from upstream. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#stop]] * the stream will be completed with failure. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#resume]] or * [[akka.stream.Supervision#restart]] the element is dropped and the stream continues. * * The function `f` is always invoked on the elements in the order they arrive (even though the result of the futures * returned by `f` might be emitted in a different order). * - * '''Emits when''' any of the Futures returned by the provided function complete + * '''Emits when''' any of the CompletionStages returned by the provided function complete * * '''Backpressures when''' the number of futures reaches the configured parallelism and the downstream backpressures * @@ -379,8 +381,8 @@ final class Flow[-In, +Out, +Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends * * @see [[#mapAsync]] */ - def mapAsyncUnordered[T](parallelism: Int, f: function.Function[Out, Future[T]]): javadsl.Flow[In, T, Mat] = - new Flow(delegate.mapAsyncUnordered(parallelism)(f.apply)) + def mapAsyncUnordered[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): javadsl.Flow[In, T, Mat] = + new Flow(delegate.mapAsyncUnordered(parallelism)(x => f(x).toScala)) /** * Only pass on those elements that satisfy the given predicate. @@ -554,6 +556,22 @@ final class Flow[-In, +Out, +Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends def fold[T](zero: T)(f: function.Function2[T, Out, T]): javadsl.Flow[In, T, Mat] = new Flow(delegate.fold(zero)(f.apply)) + /** + * Similar to `fold` but uses first element as zero element. + * Applies the given function towards its current and next value, + * yielding the next current value. + * + * '''Emits when''' upstream completes + * + * '''Backpressures when''' downstream backpressures + * + * '''Completes when''' upstream completes + * + * '''Cancels when''' downstream cancels + */ + def reduce(f: function.Function2[Out, Out, Out @uncheckedVariance]): javadsl.Flow[In, Out, Mat] = + new Flow(delegate.reduce(f.apply)) + /** * Intersperses stream with provided element, similar to how [[scala.collection.immutable.List.mkString]] * injects a separator between a List's elements. @@ -802,6 +820,8 @@ final class Flow[-In, +Out, +Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends * * '''Cancels when''' downstream cancels * + * see also [[Flow.batch]] [[Flow.batchWeighted]] + * * @param seed Provides the first state for a conflated value using the first unconsumed element as a start * @param aggregate Takes the currently aggregated value and the current pending element to produce a new aggregate * @@ -809,6 +829,62 @@ final class Flow[-In, +Out, +Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends def conflate[S](seed: function.Function[Out, S], aggregate: function.Function2[S, Out, S]): javadsl.Flow[In, S, Mat] = new Flow(delegate.conflate(seed.apply)(aggregate.apply)) + /** + * Allows a faster upstream to progress independently of a slower subscriber by aggregating elements into batches + * until the subscriber is ready to accept them. For example a batch step might store received elements in + * an array up to the allowed max limit if the upstream publisher is faster. + * + * This element only rolls up elements if the upstream is faster, but if the downstream is faster it will not + * duplicate elements. + * + * '''Emits when''' downstream stops backpressuring and there is an aggregated element available + * + * '''Backpressures when''' there are `max` batched elements and 1 pending element and downstream backpressures + * + * '''Completes when''' upstream completes and there is no batched/pending element waiting + * + * '''Cancels when''' downstream cancels + * + * See also [[Flow.conflate]], [[Flow.batchWeighted]] + * + * @param max maximum number of elements to batch before backpressuring upstream (must be positive non-zero) + * @param seed Provides the first state for a batched value using the first unconsumed element as a start + * @param aggregate Takes the currently batched value and the current pending element to produce a new aggregate + */ + def batch[S](max: Long, seed: function.Function[Out, S], aggregate: function.Function2[S, Out, S]): javadsl.Flow[In, S, Mat] = + new Flow(delegate.batch(max, seed.apply)(aggregate.apply)) + + /** + * Allows a faster upstream to progress independently of a slower subscriber by aggregating elements into batches + * until the subscriber is ready to accept them. For example a batch step might concatenate `ByteString` + * elements up to the allowed max limit if the upstream publisher is faster. + * + * This element only rolls up elements if the upstream is faster, but if the downstream is faster it will not + * duplicate elements. + * + * Batching will apply for all elements, even if a single element cost is greater than the total allowed limit. + * In this case, previous batched elements will be emitted, then the "heavy" element will be emitted (after + * being applied with the `seed` function) without batching further elements with it, and then the rest of the + * incoming elements are batched. + * + * '''Emits when''' downstream stops backpressuring and there is a batched element available + * + * '''Backpressures when''' there are `max` weighted batched elements + 1 pending element and downstream backpressures + * + * '''Completes when''' upstream completes and there is no batched/pending element waiting + * + * '''Cancels when''' downstream cancels + * + * See also [[Flow.conflate]], [[Flow.batch]] + * + * @param max maximum weight of elements to batch before backpressuring upstream (must be positive non-zero) + * @param costFn a function to compute a single element weight + * @param seed Provides the first state for a batched value using the first unconsumed element as a start + * @param aggregate Takes the currently batched value and the current pending element to produce a new batch + */ + def batchWeighted[S](max: Long, costFn: function.Function[Out, Long], seed: function.Function[Out, S], aggregate: function.Function2[S, Out, S]): javadsl.Flow[In, S, Mat] = + new Flow(delegate.batchWeighted(max, costFn.apply, seed.apply)(aggregate.apply)) + /** * Allows a faster downstream to progress independently of a slower publisher by extrapolating elements from an older * element until new element comes from the upstream. For example an expand step might repeat the last element for @@ -823,7 +899,7 @@ final class Flow[-In, +Out, +Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends * * '''Emits when''' downstream stops backpressuring * - * '''Backpressures when''' downstream backpressures + * '''Backpressures when''' downstream backpressures or iterator runs emtpy * * '''Completes when''' upstream completes * @@ -833,11 +909,8 @@ final class Flow[-In, +Out, +Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends * @param extrapolate Takes the current extrapolation state to produce an output element and the next extrapolation * state. */ - def expand[S, U](seed: function.Function[Out, S], extrapolate: function.Function[S, akka.japi.Pair[U, S]]): javadsl.Flow[In, U, Mat] = - new Flow(delegate.expand(seed(_))(s ⇒ { - val p = extrapolate(s) - (p.first, p.second) - })) + def expand[U](extrapolate: function.Function[Out, java.util.Iterator[U]]): javadsl.Flow[In, U, Mat] = + new Flow(delegate.expand(in ⇒ extrapolate(in).asScala)) /** * Adds a fixed size buffer in the flow that allows to store elements from a faster upstream until it becomes full. @@ -890,7 +963,7 @@ final class Flow[-In, +Out, +Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends * * '''Cancels when''' downstream cancels or substream cancels */ - def prefixAndTail(n: Int): javadsl.Flow[In, akka.japi.Pair[java.util.List[Out @uncheckedVariance], javadsl.Source[Out @uncheckedVariance, Unit]], Mat] = + def prefixAndTail(n: Int): javadsl.Flow[In, akka.japi.Pair[java.util.List[Out @uncheckedVariance], javadsl.Source[Out @uncheckedVariance, NotUsed]], Mat] = new Flow(delegate.prefixAndTail(n).map { case (taken, tail) ⇒ akka.japi.Pair(taken.asJava, tail.asJava) }) /** @@ -1314,7 +1387,7 @@ final class Flow[-In, +Out, +Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends def zipMat[T, M, M2](that: Graph[SourceShape[T], M], matF: function.Function2[Mat, M, M2]): javadsl.Flow[In, Out @uncheckedVariance Pair T, M2] = this.viaMat(Flow.fromGraph(GraphDSL.create(that, - new function.Function2[GraphDSL.Builder[M], SourceShape[T], FlowShape[Out, Out @ uncheckedVariance Pair T]] { + new function.Function2[GraphDSL.Builder[M], SourceShape[T], FlowShape[Out, Out @uncheckedVariance Pair T]] { def apply(b: GraphDSL.Builder[M], s: SourceShape[T]): FlowShape[Out, Out @uncheckedVariance Pair T] = { val zip: FanInShape2[Out, T, Out Pair T] = b.add(Zip.create[Out, T]) b.from(s).toInlet(zip.in1) @@ -1484,6 +1557,15 @@ final class Flow[-In, +Out, +Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends */ def detach: javadsl.Flow[In, Out, Mat] = new Flow(delegate.detach) + /** + * Materializes to `Future[Done]` that completes on getting termination message. + * The Future completes with success when received complete message from upstream or cancel + * from downstream. It fails with the same error when received error message from + * downstream. + */ + def watchTermination[M]()(matF: function.Function2[Mat, CompletionStage[Done], M]): javadsl.Flow[In, Out, M] = + new Flow(delegate.watchTermination()((left, right) => matF(left, right.toJava))) + /** * Delays the initial element by the specified duration. * diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/Graph.scala b/akka-stream/src/main/scala/akka/stream/javadsl/Graph.scala index e8a49ab2a8..6958159cfe 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/Graph.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/Graph.scala @@ -1,10 +1,11 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.javadsl +import akka.NotUsed import akka.stream._ -import akka.japi.Pair +import akka.japi.{ Pair, function } import scala.annotation.unchecked.uncheckedVariance import akka.stream.impl.ConstantFun @@ -25,13 +26,13 @@ object Merge { /** * Create a new `Merge` stage with the specified output type. */ - def create[T](inputPorts: Int): Graph[UniformFanInShape[T, T], Unit] = + def create[T](inputPorts: Int): Graph[UniformFanInShape[T, T], NotUsed] = scaladsl.Merge(inputPorts) /** * Create a new `Merge` stage with the specified output type. */ - def create[T](clazz: Class[T], inputPorts: Int): Graph[UniformFanInShape[T, T], Unit] = create(inputPorts) + def create[T](clazz: Class[T], inputPorts: Int): Graph[UniformFanInShape[T, T], NotUsed] = create(inputPorts) /** * Create a new `Merge` stage with the specified output type. @@ -39,7 +40,7 @@ object Merge { * @param eagerComplete set to true in order to make this stage eagerly * finish as soon as one of its inputs completes */ - def create[T](inputPorts: Int, eagerComplete: Boolean): Graph[UniformFanInShape[T, T], Unit] = + def create[T](inputPorts: Int, eagerComplete: Boolean): Graph[UniformFanInShape[T, T], NotUsed] = scaladsl.Merge(inputPorts, eagerComplete = eagerComplete) /** @@ -48,7 +49,7 @@ object Merge { * @param eagerComplete set to true in order to make this stage eagerly * finish as soon as one of its inputs completes */ - def create[T](clazz: Class[T], inputPorts: Int, eagerComplete: Boolean): Graph[UniformFanInShape[T, T], Unit] = + def create[T](clazz: Class[T], inputPorts: Int, eagerComplete: Boolean): Graph[UniformFanInShape[T, T], NotUsed] = create(inputPorts, eagerComplete) } @@ -69,13 +70,13 @@ object MergePreferred { /** * Create a new `MergePreferred` stage with the specified output type. */ - def create[T](secondaryPorts: Int): Graph[scaladsl.MergePreferred.MergePreferredShape[T], Unit] = + def create[T](secondaryPorts: Int): Graph[scaladsl.MergePreferred.MergePreferredShape[T], NotUsed] = scaladsl.MergePreferred(secondaryPorts) /** * Create a new `MergePreferred` stage with the specified output type. */ - def create[T](clazz: Class[T], secondaryPorts: Int): Graph[scaladsl.MergePreferred.MergePreferredShape[T], Unit] = create(secondaryPorts) + def create[T](clazz: Class[T], secondaryPorts: Int): Graph[scaladsl.MergePreferred.MergePreferredShape[T], NotUsed] = create(secondaryPorts) /** * Create a new `MergePreferred` stage with the specified output type. @@ -83,7 +84,7 @@ object MergePreferred { * @param eagerComplete set to true in order to make this stage eagerly * finish as soon as one of its inputs completes */ - def create[T](secondaryPorts: Int, eagerComplete: Boolean): Graph[scaladsl.MergePreferred.MergePreferredShape[T], Unit] = + def create[T](secondaryPorts: Int, eagerComplete: Boolean): Graph[scaladsl.MergePreferred.MergePreferredShape[T], NotUsed] = scaladsl.MergePreferred(secondaryPorts, eagerComplete = eagerComplete) /** @@ -92,7 +93,7 @@ object MergePreferred { * @param eagerComplete set to true in order to make this stage eagerly * finish as soon as one of its inputs completes */ - def create[T](clazz: Class[T], secondaryPorts: Int, eagerComplete: Boolean): Graph[scaladsl.MergePreferred.MergePreferredShape[T], Unit] = + def create[T](clazz: Class[T], secondaryPorts: Int, eagerComplete: Boolean): Graph[scaladsl.MergePreferred.MergePreferredShape[T], NotUsed] = create(secondaryPorts, eagerComplete) } @@ -118,7 +119,7 @@ object Broadcast { * @param outputCount number of output ports * @param eagerCancel if true, broadcast cancels upstream if any of its downstreams cancel. */ - def create[T](outputCount: Int, eagerCancel: Boolean): Graph[UniformFanOutShape[T, T], Unit] = + def create[T](outputCount: Int, eagerCancel: Boolean): Graph[UniformFanOutShape[T, T], NotUsed] = scaladsl.Broadcast(outputCount, eagerCancel = eagerCancel) /** @@ -126,12 +127,46 @@ object Broadcast { * * @param outputCount number of output ports */ - def create[T](outputCount: Int): Graph[UniformFanOutShape[T, T], Unit] = create(outputCount, eagerCancel = false) + def create[T](outputCount: Int): Graph[UniformFanOutShape[T, T], NotUsed] = create(outputCount, eagerCancel = false) /** * Create a new `Broadcast` stage with the specified input type. */ - def create[T](clazz: Class[T], outputCount: Int): Graph[UniformFanOutShape[T, T], Unit] = create(outputCount) + def create[T](clazz: Class[T], outputCount: Int): Graph[UniformFanOutShape[T, T], NotUsed] = create(outputCount) + +} + +/** + * Fan-out the stream to several streams. emitting an incoming upstream element to one downstream consumer according + * to the partitioner function applied to the element + * + * '''Emits when''' all of the outputs stops backpressuring and there is an input element available + * + * '''Backpressures when''' one of the outputs backpressure + * + * '''Completes when''' upstream completes + * + * '''Cancels when''' + * when one of the downstreams cancel + */ +object Partition { + /** + * Create a new `Partition` stage with the specified input type. + * + * @param outputCount number of output ports + * @param partitioner function deciding which output each element will be targeted + */ + def create[T](outputCount: Int, partitioner: function.Function[T, Int]): Graph[UniformFanOutShape[T, T], NotUsed] = + scaladsl.Partition(outputCount, partitioner = (t: T) ⇒ partitioner.apply(t)) + + /** + * Create a new `Partition` stage with the specified input type. + * + * @param outputCount number of output ports + * @param partitioner function deciding which output each element will be targeted + */ + def create[T](clazz: Class[T], outputCount: Int, partitioner: function.Function[T, Int]): Graph[UniformFanOutShape[T, T], NotUsed] = + create(outputCount, partitioner) } @@ -155,19 +190,19 @@ object Balance { * @param waitForAllDownstreams if `true` it will not start emitting * elements to downstream outputs until all of them have requested at least one element */ - def create[T](outputCount: Int, waitForAllDownstreams: Boolean): Graph[UniformFanOutShape[T, T], Unit] = + def create[T](outputCount: Int, waitForAllDownstreams: Boolean): Graph[UniformFanOutShape[T, T], NotUsed] = scaladsl.Balance(outputCount, waitForAllDownstreams) /** * Create a new `Balance` stage with the specified input type. */ - def create[T](outputCount: Int): Graph[UniformFanOutShape[T, T], Unit] = + def create[T](outputCount: Int): Graph[UniformFanOutShape[T, T], NotUsed] = create(outputCount, waitForAllDownstreams = false) /** * Create a new `Balance` stage with the specified input type. */ - def create[T](clazz: Class[T], outputCount: Int): Graph[UniformFanOutShape[T, T], Unit] = + def create[T](clazz: Class[T], outputCount: Int): Graph[UniformFanOutShape[T, T], NotUsed] = create(outputCount) /** @@ -176,7 +211,7 @@ object Balance { * @param waitForAllDownstreams if `true` it will not start emitting * elements to downstream outputs until all of them have requested at least one element */ - def create[T](clazz: Class[T], outputCount: Int, waitForAllDownstreams: Boolean): Graph[UniformFanOutShape[T, T], Unit] = + def create[T](clazz: Class[T], outputCount: Int, waitForAllDownstreams: Boolean): Graph[UniformFanOutShape[T, T], NotUsed] = create(outputCount, waitForAllDownstreams) } @@ -201,7 +236,7 @@ object Zip { * Create a new `Zip` stage with the specified input types and zipping-function * which creates `akka.japi.Pair`s. */ - def create[A, B]: Graph[FanInShape2[A, B, A Pair B], Unit] = + def create[A, B]: Graph[FanInShape2[A, B, A Pair B], NotUsed] = ZipWith.create(_toPair.asInstanceOf[Function2[A, B, A Pair B]]) private[this] final val _toPair: Function2[Any, Any, Any Pair Any] = @@ -222,18 +257,17 @@ object Zip { * '''Cancels when''' any downstream cancels */ object Unzip { - import akka.japi.function.Function /** * Creates a new `Unzip` stage with the specified output types. */ - def create[A, B](): Graph[FanOutShape2[A Pair B, A, B], Unit] = + def create[A, B](): Graph[FanOutShape2[A Pair B, A, B], NotUsed] = UnzipWith.create(ConstantFun.javaIdentityFunction[Pair[A, B]]) /** * Creates a new `Unzip` stage with the specified output types. */ - def create[A, B](left: Class[A], right: Class[B]): Graph[FanOutShape2[A Pair B, A, B], Unit] = create[A, B]() + def create[A, B](left: Class[A], right: Class[B]): Graph[FanOutShape2[A Pair B, A, B], NotUsed] = create[A, B]() } @@ -254,17 +288,17 @@ object Concat { /** * Create a new anonymous `Concat` stage with the specified input types. */ - def create[T](): Graph[UniformFanInShape[T, T], Unit] = scaladsl.Concat[T]() + def create[T](): Graph[UniformFanInShape[T, T], NotUsed] = scaladsl.Concat[T]() /** * Create a new anonymous `Concat` stage with the specified input types. */ - def create[T](inputCount: Int): Graph[UniformFanInShape[T, T], Unit] = scaladsl.Concat[T](inputCount) + def create[T](inputCount: Int): Graph[UniformFanInShape[T, T], NotUsed] = scaladsl.Concat[T](inputCount) /** * Create a new anonymous `Concat` stage with the specified input types. */ - def create[T](clazz: Class[T]): Graph[UniformFanInShape[T, T], Unit] = create() + def create[T](clazz: Class[T]): Graph[UniformFanInShape[T, T], NotUsed] = create() } diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/Keep.scala b/akka-stream/src/main/scala/akka/stream/javadsl/Keep.scala index dc47d50092..1c7fe2ffa2 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/Keep.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/Keep.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.javadsl +import akka.NotUsed import akka.japi.function import akka.japi.Pair @@ -10,10 +11,10 @@ object Keep { private val _left = new function.Function2[Any, Any, Any] with ((Any, Any) ⇒ Any) { def apply(l: Any, r: Any) = l } private val _right = new function.Function2[Any, Any, Any] with ((Any, Any) ⇒ Any) { def apply(l: Any, r: Any) = r } private val _both = new function.Function2[Any, Any, Any] with ((Any, Any) ⇒ Any) { def apply(l: Any, r: Any) = new akka.japi.Pair(l, r) } - private val _none = new function.Function2[Any, Any, Unit] with ((Any, Any) ⇒ Unit) { def apply(l: Any, r: Any) = () } + private val _none = new function.Function2[Any, Any, NotUsed] with ((Any, Any) ⇒ NotUsed) { def apply(l: Any, r: Any) = NotUsed } def left[L, R]: function.Function2[L, R, L] = _left.asInstanceOf[function.Function2[L, R, L]] def right[L, R]: function.Function2[L, R, R] = _right.asInstanceOf[function.Function2[L, R, R]] def both[L, R]: function.Function2[L, R, L Pair R] = _both.asInstanceOf[function.Function2[L, R, L Pair R]] - def none[L, R]: function.Function2[L, R, Unit] = _none.asInstanceOf[function.Function2[L, R, Unit]] + def none[L, R]: function.Function2[L, R, NotUsed] = _none.asInstanceOf[function.Function2[L, R, NotUsed]] } diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/Queue.scala b/akka-stream/src/main/scala/akka/stream/javadsl/Queue.scala new file mode 100644 index 0000000000..f0e1c8ccf8 --- /dev/null +++ b/akka-stream/src/main/scala/akka/stream/javadsl/Queue.scala @@ -0,0 +1,48 @@ +/** + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package akka.stream.javadsl + +import akka.Done +import java.util.concurrent.CompletionStage +import java.util.Optional +import akka.stream.QueueOfferResult + +/** + * This trait allows to have the queue as a data source for some stream. + */ +trait SourceQueue[T] { + + /** + * Method offers next element to a stream and returns future that: + * - completes with `Enqueued` if element is consumed by a stream + * - completes with `Dropped` when stream dropped offered element + * - completes with `QueueClosed` when stream is completed during future is active + * - completes with `Failure(f)` when failure to enqueue element from upstream + * - fails when stream is completed or you cannot call offer in this moment because of implementation rules + * (like for backpressure mode and full buffer you need to wait for last offer call Future completion) + * + * @param elem element to send to a stream + */ + def offer(elem: T): CompletionStage[QueueOfferResult] + + /** + * Method returns future that completes when stream is completed and fails when stream failed + */ + def watchCompletion(): CompletionStage[Done] +} + +/** + * Trait allows to have the queue as a sink for some stream. + * "SinkQueue" pulls data from stream with backpressure mechanism. + */ +trait SinkQueue[T] { + + /** + * Method pulls elements from stream and returns future that: + * - fails if stream is failed + * - completes with None in case if stream is completed + * - completes with `Some(element)` in case next element is available from stream. + */ + def pull(): CompletionStage[Optional[T]] +} diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/Sink.scala b/akka-stream/src/main/scala/akka/stream/javadsl/Sink.scala index a74c4376c3..1f615b1e57 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/Sink.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/Sink.scala @@ -1,52 +1,62 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.javadsl -import java.io.{ InputStream, OutputStream, File } - +import java.util.Optional +import akka.{ Done, NotUsed } import akka.actor.{ ActorRef, Props } import akka.dispatch.ExecutionContexts import akka.japi.function -import akka.stream.impl.Stages.DefaultAttributes import akka.stream.impl.StreamLayout import akka.stream.{ javadsl, scaladsl, _ } -import akka.util.ByteString import org.reactivestreams.{ Publisher, Subscriber } - -import scala.concurrent.duration.FiniteDuration -import scala.concurrent.{ ExecutionContext, Future } +import scala.compat.java8.OptionConverters._ +import scala.concurrent.ExecutionContext import scala.util.Try +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters.FutureOps +import akka.stream.impl.SinkQueueAdapter /** Java API */ object Sink { /** * A `Sink` that will invoke the given function for every received element, giving it its previous * output (or the given `zero` value) and the element as input. - * The returned [[scala.concurrent.Future]] will be completed with value of the final + * The returned [[java.util.concurrent.CompletionStage]] will be completed with value of the final * function evaluation when the input stream ends, or completed with `Failure` * if there is a failure is signaled in the stream. */ - def fold[U, In](zero: U, f: function.Function2[U, In, U]): javadsl.Sink[In, Future[U]] = - new Sink(scaladsl.Sink.fold[U, In](zero)(f.apply)) + def fold[U, In](zero: U, f: function.Function2[U, In, U]): javadsl.Sink[In, CompletionStage[U]] = + new Sink(scaladsl.Sink.fold[U, In](zero)(f.apply).toCompletionStage()) + + /** + * A `Sink` that will invoke the given function for every received element, giving it its previous + * output (from the second element) and the element as input. + * The returned [[java.util.concurrent.CompletionStage]] will be completed with value of the final + * function evaluation when the input stream ends, or completed with `Failure` + * if there is a failure signaled in the stream. + */ + def reduce[In](f: function.Function2[In, In, In]): Sink[In, CompletionStage[In]] = + new Sink(scaladsl.Sink.reduce[In](f.apply).toCompletionStage()) /** * Helper to create [[Sink]] from `Subscriber`. */ - def fromSubscriber[In](subs: Subscriber[In]): Sink[In, Unit] = + def fromSubscriber[In](subs: Subscriber[In]): Sink[In, NotUsed] = new Sink(scaladsl.Sink.fromSubscriber(subs)) /** * A `Sink` that immediately cancels its upstream after materialization. */ - def cancelled[T](): Sink[T, Unit] = + def cancelled[T](): Sink[T, NotUsed] = new Sink(scaladsl.Sink.cancelled) /** * A `Sink` that will consume the stream and discard the elements. */ - def ignore[T](): Sink[T, Future[Unit]] = - new Sink(scaladsl.Sink.ignore) + def ignore[T](): Sink[T, CompletionStage[Done]] = + new Sink(scaladsl.Sink.ignore.toCompletionStage()) /** * A `Sink` that materializes into a [[org.reactivestreams.Publisher]]. @@ -59,93 +69,95 @@ object Sink { * If `fanout` is `false` then the materialized `Publisher` will only support a single `Subscriber` and * reject any additional `Subscriber`s. */ - def asPublisher[T](fanout: Boolean): Sink[T, Publisher[T]] = - new Sink(scaladsl.Sink.asPublisher(fanout)) + def asPublisher[T](fanout: AsPublisher): Sink[T, Publisher[T]] = + new Sink(scaladsl.Sink.asPublisher(fanout == AsPublisher.WITH_FANOUT)) /** * A `Sink` that will invoke the given procedure for each received element. The sink is materialized - * into a [[scala.concurrent.Future]] will be completed with `Success` when reaching the + * into a [[java.util.concurrent.CompletionStage]] will be completed with `Success` when reaching the * normal end of the stream, or completed with `Failure` if there is a failure is signaled in * the stream.. */ - def foreach[T](f: function.Procedure[T]): Sink[T, Future[Unit]] = - new Sink(scaladsl.Sink.foreach(f.apply)) + def foreach[T](f: function.Procedure[T]): Sink[T, CompletionStage[Done]] = + new Sink(scaladsl.Sink.foreach(f.apply).toCompletionStage()) /** * A `Sink` that will invoke the given procedure for each received element in parallel. The sink is materialized - * into a [[scala.concurrent.Future]]. + * into a [[java.util.concurrent.CompletionStage]]. * * If `f` throws an exception and the supervision decision is - * [[akka.stream.Supervision.Stop]] the `Future` will be completed with failure. + * [[akka.stream.Supervision.Stop]] the `CompletionStage` will be completed with failure. * * If `f` throws an exception and the supervision decision is * [[akka.stream.Supervision.Resume]] or [[akka.stream.Supervision.Restart]] the * element is dropped and the stream continues. */ - def foreachParallel[T](parallel: Int)(f: function.Procedure[T])(ec: ExecutionContext): Sink[T, Future[Unit]] = - new Sink(scaladsl.Sink.foreachParallel(parallel)(f.apply)(ec)) + def foreachParallel[T](parallel: Int)(f: function.Procedure[T])(ec: ExecutionContext): Sink[T, CompletionStage[Done]] = + new Sink(scaladsl.Sink.foreachParallel(parallel)(f.apply)(ec).toCompletionStage()) /** * A `Sink` that when the flow is completed, either through a failure or normal * completion, apply the provided function with [[scala.util.Success]] * or [[scala.util.Failure]]. */ - def onComplete[In](callback: function.Procedure[Try[Unit]]): Sink[In, Unit] = + def onComplete[In](callback: function.Procedure[Try[Done]]): Sink[In, NotUsed] = new Sink(scaladsl.Sink.onComplete[In](x ⇒ callback.apply(x))) /** - * A `Sink` that materializes into a `Future` of the first value received. - * If the stream completes before signaling at least a single element, the Future will be failed with a [[NoSuchElementException]]. - * If the stream signals an error errors before signaling at least a single element, the Future will be failed with the streams exception. + * A `Sink` that materializes into a `CompletionStage` of the first value received. + * If the stream completes before signaling at least a single element, the CompletionStage will be failed with a [[NoSuchElementException]]. + * If the stream signals an error errors before signaling at least a single element, the CompletionStage will be failed with the streams exception. * * See also [[headOption]]. */ - def head[In](): Sink[In, Future[In]] = - new Sink(scaladsl.Sink.head[In]) + def head[In](): Sink[In, CompletionStage[In]] = + new Sink(scaladsl.Sink.head[In].toCompletionStage()) /** - * A `Sink` that materializes into a `Future` of the optional first value received. - * If the stream completes before signaling at least a single element, the value of the Future will be an empty [[akka.japi.Option]]. - * If the stream signals an error errors before signaling at least a single element, the Future will be failed with the streams exception. + * A `Sink` that materializes into a `CompletionStage` of the optional first value received. + * If the stream completes before signaling at least a single element, the value of the CompletionStage will be an empty [[java.util.Optional]]. + * If the stream signals an error errors before signaling at least a single element, the CompletionStage will be failed with the streams exception. * * See also [[head]]. */ - def headOption[In](): Sink[In, Future[akka.japi.Option[In]]] = + def headOption[In](): Sink[In, CompletionStage[Optional[In]]] = new Sink(scaladsl.Sink.headOption[In].mapMaterializedValue( - _.map(akka.japi.Option.fromScalaOption)(ExecutionContexts.sameThreadExecutionContext))) + _.map(_.asJava)(ExecutionContexts.sameThreadExecutionContext).toJava)) /** - * A `Sink` that materializes into a `Future` of the last value received. - * If the stream completes before signaling at least a single element, the Future will be failed with a [[NoSuchElementException]]. - * If the stream signals an error errors before signaling at least a single element, the Future will be failed with the streams exception. + * A `Sink` that materializes into a `CompletionStage` of the last value received. + * If the stream completes before signaling at least a single element, the CompletionStage will be failed with a [[NoSuchElementException]]. + * If the stream signals an error errors before signaling at least a single element, the CompletionStage will be failed with the streams exception. * * See also [[lastOption]]. */ - def last[In](): Sink[In, Future[In]] = - new Sink(scaladsl.Sink.last[In]) + def last[In](): Sink[In, CompletionStage[In]] = + new Sink(scaladsl.Sink.last[In].toCompletionStage()) /** - * A `Sink` that materializes into a `Future` of the optional last value received. - * If the stream completes before signaling at least a single element, the value of the Future will be an empty [[akka.japi.Option]]. - * If the stream signals an error errors before signaling at least a single element, the Future will be failed with the streams exception. + * A `Sink` that materializes into a `CompletionStage` of the optional last value received. + * If the stream completes before signaling at least a single element, the value of the CompletionStage will be an empty [[java.util.Optional]]. + * If the stream signals an error errors before signaling at least a single element, the CompletionStage will be failed with the streams exception. * * See also [[head]]. */ - def lastOption[In](): Sink[In, Future[akka.japi.Option[In]]] = + def lastOption[In](): Sink[In, CompletionStage[Optional[In]]] = new Sink(scaladsl.Sink.lastOption[In].mapMaterializedValue( - _.map(akka.japi.Option.fromScalaOption)(ExecutionContexts.sameThreadExecutionContext))) + _.map(_.asJava)(ExecutionContexts.sameThreadExecutionContext).toJava)) /** * A `Sink` that keeps on collecting incoming elements until upstream terminates. * As upstream may be unbounded, `Flow[T].take` or the stricter `Flow[T].limit` (and their variants) * may be used to ensure boundedness. - * Materializes into a `Future` of `Seq[T]` containing all the collected elements. + * Materializes into a `CompletionStage` of `Seq[T]` containing all the collected elements. + * `List` is limited to `Integer.MAX_VALUE` elements, this Sink will cancel the stream + * after having received that many elements. * * See also [[Flow.limit]], [[Flow.limitWeighted]], [[Flow.take]], [[Flow.takeWithin]], [[Flow.takeWhile]] */ - def seq[In]: Sink[In, Future[java.util.List[In]]] = { + def seq[In]: Sink[In, CompletionStage[java.util.List[In]]] = { import scala.collection.JavaConverters._ - new Sink(scaladsl.Sink.seq[In].mapMaterializedValue(fut ⇒ fut.map(sq ⇒ sq.asJava)(ExecutionContexts.sameThreadExecutionContext))) + new Sink(scaladsl.Sink.seq[In].mapMaterializedValue(fut ⇒ fut.map(sq ⇒ sq.asJava)(ExecutionContexts.sameThreadExecutionContext).toJava)) } /** @@ -164,7 +176,7 @@ object Sink { * limiting stage in front of this `Sink`. * */ - def actorRef[In](ref: ActorRef, onCompleteMessage: Any): Sink[In, Unit] = + def actorRef[In](ref: ActorRef, onCompleteMessage: Any): Sink[In, NotUsed] = new Sink(scaladsl.Sink.actorRef[In](ref, onCompleteMessage)) /** @@ -181,7 +193,7 @@ object Sink { * message will be sent to the destination actor. */ def actorRefWithAck[In](ref: ActorRef, onInitMessage: Any, ackMessage: Any, onCompleteMessage: Any, - onFailureMessage: function.Function[Throwable, Any]): Sink[In, Unit] = + onFailureMessage: function.Function[Throwable, Any]): Sink[In, NotUsed] = new Sink(scaladsl.Sink.actorRefWithAck[In](ref, onInitMessage, ackMessage, onCompleteMessage, onFailureMessage.apply)) /** @@ -205,7 +217,7 @@ object Sink { /** * Combine several sinks with fan-out strategy like `Broadcast` or `Balance` and returns `Sink`. */ - def combine[T, U](output1: Sink[U, _], output2: Sink[U, _], rest: java.util.List[Sink[U, _]], strategy: function.Function[java.lang.Integer, Graph[UniformFanOutShape[T, U], Unit]]): Sink[T, Unit] = { + def combine[T, U](output1: Sink[U, _], output2: Sink[U, _], rest: java.util.List[Sink[U, _]], strategy: function.Function[java.lang.Integer, Graph[UniformFanOutShape[T, U], NotUsed]]): Sink[T, NotUsed] = { import scala.collection.JavaConverters._ val seq = if (rest != null) rest.asScala.map(_.asScala) else Seq() new Sink(scaladsl.Sink.combine(output1.asScala, output2.asScala, seq: _*)(num ⇒ strategy.apply(num))) @@ -213,10 +225,10 @@ object Sink { /** * Creates a `Sink` that is materialized as an [[akka.stream.SinkQueue]]. - * [[akka.stream.SinkQueue.pull]] method is pulling element from the stream and returns ``Future[Option[T]]``. - * `Future` completes when element is available. + * [[akka.stream.SinkQueue.pull]] method is pulling element from the stream and returns ``CompletionStage[Option[T]]``. + * `CompletionStage` completes when element is available. * - * Before calling pull method second time you need to wait until previous Future completes. + * Before calling pull method second time you need to wait until previous CompletionStage completes. * Pull returns Failed future with ''IllegalStateException'' if previous future has not yet completed. * * `Sink` will request at most number of elements equal to size of `inputBuffer` from @@ -229,7 +241,7 @@ object Sink { * @see [[akka.stream.SinkQueue]] */ def queue[T](): Sink[T, SinkQueue[T]] = - new Sink(scaladsl.Sink.queue()) + new Sink(scaladsl.Sink.queue[T]().mapMaterializedValue(new SinkQueueAdapter(_))) } diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/Source.scala b/akka-stream/src/main/scala/akka/stream/javadsl/Source.scala index e2867f67c4..bd8addaae2 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/Source.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/Source.scala @@ -1,17 +1,18 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.javadsl import java.io.{ OutputStream, InputStream, File } import java.util - +import java.util.Optional +import akka.{ Done, NotUsed } import akka.actor.{ ActorRef, Cancellable, Props } import akka.event.LoggingAdapter import akka.japi.{ Pair, Util, function } import akka.stream.Attributes._ import akka.stream._ -import akka.stream.impl.fusing.Delay +import akka.stream.impl.fusing.{ GraphStages, Delay } import akka.stream.impl.{ ConstantFun, StreamLayout } import akka.stream.stage.Stage import akka.util.ByteString @@ -23,30 +24,43 @@ import scala.collection.immutable.Range.Inclusive import scala.concurrent.duration.FiniteDuration import scala.concurrent.{ Future, Promise } import scala.language.{ higherKinds, implicitConversions } +import scala.compat.java8.OptionConverters._ +import java.util.concurrent.CompletionStage +import java.util.concurrent.CompletableFuture +import scala.compat.java8.FutureConverters._ +import akka.stream.impl.SourceQueueAdapter /** Java API */ object Source { - private[this] val _empty = new Source[Any, Unit](scaladsl.Source.empty) + private[this] val _empty = new Source[Any, NotUsed](scaladsl.Source.empty) /** * Create a `Source` with no elements, i.e. an empty stream that is completed immediately * for every connected `Sink`. */ - def empty[O](): Source[O, Unit] = _empty.asInstanceOf[Source[O, Unit]] + def empty[O](): Source[O, NotUsed] = _empty.asInstanceOf[Source[O, NotUsed]] /** - * Create a `Source` which materializes a [[scala.concurrent.Promise]] which controls what element + * Create a `Source` which materializes a [[java.util.concurrent.CompletableFuture]] which controls what element * will be emitted by the Source. - * If the materialized promise is completed with a Some, that value will be produced downstream, + * If the materialized promise is completed with a filled Optional, that value will be produced downstream, * followed by completion. - * If the materialized promise is completed with a None, no value will be produced downstream and completion will + * If the materialized promise is completed with an empty Optional, no value will be produced downstream and completion will * be signalled immediately. * If the materialized promise is completed with a failure, then the returned source will terminate with that error. * If the downstream of this source cancels before the promise has been completed, then the promise will be completed - * with None. + * with an empty Optional. */ - def maybe[T]: Source[T, Promise[Option[T]]] = - new Source(scaladsl.Source.maybe[T]) + def maybe[T]: Source[T, CompletableFuture[Optional[T]]] = { + new Source(scaladsl.Source.maybe[T].mapMaterializedValue { scalaOptionPromise: Promise[Option[T]] ⇒ + val javaOptionPromise = new CompletableFuture[Optional[T]]() + scalaOptionPromise.completeWith( + javaOptionPromise.toScala + .map(_.asScala)(akka.dispatch.ExecutionContexts.sameThreadExecutionContext)) + + javaOptionPromise + }) + } /** * Helper to create [[Source]] from `Publisher`. @@ -56,7 +70,7 @@ object Source { * that mediate the flow of elements downstream and the propagation of * back-pressure upstream. */ - def fromPublisher[O](publisher: Publisher[O]): javadsl.Source[O, Unit] = + def fromPublisher[O](publisher: Publisher[O]): javadsl.Source[O, NotUsed] = new Source(scaladsl.Source.fromPublisher(publisher)) /** @@ -77,7 +91,7 @@ object Source { * in accordance with the demand coming from the downstream transformation * steps. */ - def fromIterator[O](f: function.Creator[java.util.Iterator[O]]): javadsl.Source[O, Unit] = + def fromIterator[O](f: function.Creator[java.util.Iterator[O]]): javadsl.Source[O, NotUsed] = new Source(scaladsl.Source.fromIterator(() ⇒ f.create().asScala)) /** @@ -100,7 +114,7 @@ object Source { * being used as a `Source`. Otherwise the stream may fail with * `ConcurrentModificationException` or other more subtle errors may occur. */ - def from[O](iterable: java.lang.Iterable[O]): javadsl.Source[O, Unit] = { + def from[O](iterable: java.lang.Iterable[O]): javadsl.Source[O, NotUsed] = { // this adapter is not immutable if the the underlying java.lang.Iterable is modified // but there is not anything we can do to prevent that from happening. // ConcurrentModificationException will be thrown in some cases. @@ -121,7 +135,7 @@ object Source { * * @see [[scala.collection.immutable.Range.inclusive(Int, Int)]] */ - def range(start: Int, end: Int): javadsl.Source[Integer, Unit] = range(start, end, 1) + def range(start: Int, end: Int): javadsl.Source[Integer, NotUsed] = range(start, end, 1) /** * Creates [[Source]] that represents integer values in range ''[start;end]'', with the given step. @@ -131,7 +145,7 @@ object Source { * * @see [[scala.collection.immutable.Range.inclusive(Int, Int, Int)]] */ - def range(start: Int, end: Int, step: Int): javadsl.Source[Integer, Unit] = + def range(start: Int, end: Int, step: Int): javadsl.Source[Integer, NotUsed] = fromIterator[Integer](new function.Creator[util.Iterator[Integer]]() { def create(): util.Iterator[Integer] = new Inclusive(start, end, step) { @@ -145,9 +159,18 @@ object Source { * may happen before or after materializing the `Flow`. * The stream terminates with a failure if the `Future` is completed with a failure. */ - def fromFuture[O](future: Future[O]): javadsl.Source[O, Unit] = + def fromFuture[O](future: Future[O]): javadsl.Source[O, NotUsed] = new Source(scaladsl.Source.fromFuture(future)) + /** + * Start a new `Source` from the given `CompletionStage`. The stream will consist of + * one element when the `CompletionStage` is completed with a successful value, which + * may happen before or after materializing the `Flow`. + * The stream terminates with a failure if the `CompletionStage` is completed with a failure. + */ + def fromCompletionStage[O](future: CompletionStage[O]): javadsl.Source[O, NotUsed] = + new Source(scaladsl.Source.fromCompletionStage(future)) + /** * Elements are emitted periodically with the specified interval. * The tick element will be delivered to downstream consumers that has requested any elements. @@ -162,39 +185,34 @@ object Source { * Create a `Source` with one element. * Every connected `Sink` of this stream will see an individual stream consisting of one element. */ - def single[T](element: T): Source[T, Unit] = + def single[T](element: T): Source[T, NotUsed] = new Source(scaladsl.Source.single(element)) /** * Create a `Source` that will continually emit the given element. */ - def repeat[T](element: T): Source[T, Unit] = + def repeat[T](element: T): Source[T, NotUsed] = new Source(scaladsl.Source.repeat(element)) /** * Create a `Source` that will unfold a value of type `S` into * a pair of the next state `S` and output elements of type `E`. */ - def unfold[S, E](s: S, f: function.Function[S, Option[(S, E)]]): Source[E, Unit] = - new Source(scaladsl.Source.unfold(s)((s: S) ⇒ f.apply(s))) + def unfold[S, E](s: S, f: function.Function[S, Optional[(S, E)]]): Source[E, NotUsed] = + new Source(scaladsl.Source.unfold(s)((s: S) ⇒ f.apply(s).asScala)) /** * Same as [[unfold]], but uses an async function to generate the next state-element tuple. */ - def unfoldAsync[S, E](s: S, f: function.Function[S, Future[Option[(S, E)]]]): Source[E, Unit] = - new Source(scaladsl.Source.unfoldAsync(s)((s: S) ⇒ f.apply(s))) - - /** - * Simpler [[unfold]], for infinite sequences. - */ - def unfoldInf[S, E](s: S, f: function.Function[S, (S, E)]): Source[E, Unit] = { - new Source(scaladsl.Source.unfoldInf(s)((s: S) ⇒ f.apply(s))) - } + def unfoldAsync[S, E](s: S, f: function.Function[S, CompletionStage[Optional[(S, E)]]]): Source[E, NotUsed] = + new Source( + scaladsl.Source.unfoldAsync(s)( + (s: S) ⇒ f.apply(s).toScala.map(_.asScala)(akka.dispatch.ExecutionContexts.sameThreadExecutionContext))) /** * Create a `Source` that immediately ends the stream with the `cause` failure to every connected `Sink`. */ - def failed[T](cause: Throwable): Source[T, Unit] = + def failed[T](cause: Throwable): Source[T, NotUsed] = new Source(scaladsl.Source.failed(cause)) /** @@ -255,8 +273,8 @@ object Source { /** * Combines several sources with fan-in strategy like `Merge` or `Concat` and returns `Source`. */ - def combine[T, U](first: Source[T, _], second: Source[T, _], rest: java.util.List[Source[T, _]], - strategy: function.Function[java.lang.Integer, _ <: Graph[UniformFanInShape[T, U], Unit]]): Source[U, Unit] = { + def combine[T, U](first: Source[T, _ <: Any], second: Source[T, _ <: Any], rest: java.util.List[Source[T, _ <: Any]], + strategy: function.Function[java.lang.Integer, _ <: Graph[UniformFanInShape[T, U], NotUsed]]): Source[U, NotUsed] = { import scala.collection.JavaConverters._ val seq = if (rest != null) rest.asScala.map(_.asScala) else Seq() new Source(scaladsl.Source.combine(first.asScala, second.asScala, seq: _*)(num ⇒ strategy.apply(num))) @@ -265,28 +283,34 @@ object Source { /** * Creates a `Source` that is materialized as an [[akka.stream.SourceQueue]]. * You can push elements to the queue and they will be emitted to the stream if there is demand from downstream, - * otherwise they will be buffered until request for demand is received. + * otherwise they will be buffered until request for demand is received. Elements in the buffer will be discarded + * if downstream is terminated. * * Depending on the defined [[akka.stream.OverflowStrategy]] it might drop elements if * there is no space available in the buffer. * * Acknowledgement mechanism is available. - * [[akka.stream.SourceQueue.offer]] returns ``Future[Boolean]`` which completes with true - * if element was added to buffer or sent downstream. It completes - * with false if element was dropped. + * [[akka.stream.SourceQueue.offer]] returns `CompletionStage>` which completes with `Success(true)` + * if element was added to buffer or sent downstream. It completes with `Success(false)` if element was dropped. Can also complete + * with [[akka.stream.StreamCallbackStatus.Failure]] - when stream failed or [[akka.stream.StreamCallbackStatus.StreamCompleted]] + * when downstream is completed. * - * The strategy [[akka.stream.OverflowStrategy.backpressure]] will not complete `offer():Future` until buffer is full. + * The strategy [[akka.stream.OverflowStrategy.backpressure]] will not complete last `offer():CompletionStage` + * call when buffer is full. * - * The buffer can be disabled by using `bufferSize` of 0 and then received messages are dropped - * if there is no demand from downstream. When `bufferSize` is 0 the `overflowStrategy` does - * not matter. + * You can watch accessibility of stream with [[akka.stream.SourceQueue.watchCompletion]]. + * It returns future that completes with success when stream is completed or fail when stream is failed. * - * @param bufferSize The size of the buffer in element count + * The buffer can be disabled by using `bufferSize` of 0 and then received message will wait for downstream demand. + * When `bufferSize` is 0 the `overflowStrategy` does not matter. + * + * SourceQueue that current source is materialized to is for single thread usage only. + * + * @param bufferSize size of buffer in element count * @param overflowStrategy Strategy that is used when incoming elements cannot fit inside the buffer - * @param timeout Timeout for ``SourceQueue.offer(T):Future[Boolean]`` */ - def queue[T](bufferSize: Int, overflowStrategy: OverflowStrategy, timeout: FiniteDuration): Source[T, SourceQueue[T]] = - new Source(scaladsl.Source.queue(bufferSize, overflowStrategy, timeout)) + def queue[T](bufferSize: Int, overflowStrategy: OverflowStrategy): Source[T, SourceQueue[T]] = + new Source(scaladsl.Source.queue[T](bufferSize, overflowStrategy).mapMaterializedValue(new SourceQueueAdapter(_))) } @@ -443,13 +467,24 @@ final class Source[+Out, +Mat](delegate: scaladsl.Source[Out, Mat]) extends Grap * Shortcut for running this `Source` with a fold function. * The given function is invoked for every received element, giving it its previous * output (or the given `zero` value) and the element as input. - * The returned [[scala.concurrent.Future]] will be completed with value of the final + * The returned [[java.util.concurrent.CompletionStage]] will be completed with value of the final * function evaluation when the input stream ends, or completed with `Failure` * if there is a failure is signaled in the stream. */ - def runFold[U](zero: U, f: function.Function2[U, Out, U], materializer: Materializer): Future[U] = + def runFold[U](zero: U, f: function.Function2[U, Out, U], materializer: Materializer): CompletionStage[U] = runWith(Sink.fold(zero, f), materializer) + /** + * Shortcut for running this `Source` with a reduce function. + * The given function is invoked for every received element, giving it its previous + * output (from the second ones) an the element as input. + * The returned [[java.util.concurrent.CompletionStage]] will be completed with value of the final + * function evaluation when the input stream ends, or completed with `Failure` + * if there is a failure is signaled in the stream. + */ + def runReduce[U >: Out](f: function.Function2[U, U, U], materializer: Materializer): CompletionStage[U] = + runWith(Sink.reduce(f), materializer) + /** * Concatenate this [[Source]] with the given one, meaning that once current * is exhausted and all result elements have been generated, @@ -668,7 +703,7 @@ final class Source[+Out, +Mat](delegate: scaladsl.Source[Out, Mat]) extends Grap */ def zipMat[T, M, M2](that: Graph[SourceShape[T], M], matF: function.Function2[Mat, M, M2]): javadsl.Source[Out @uncheckedVariance Pair T, M2] = - this.viaMat(Flow.create[Out].zipMat(that, Keep.right[Unit, M]), matF) + this.viaMat(Flow.create[Out].zipMat(that, Keep.right[NotUsed, M]), matF) /** * Put together the elements of current [[Source]] and the given one @@ -700,11 +735,11 @@ final class Source[+Out, +Mat](delegate: scaladsl.Source[Out, Mat]) extends Grap /** * Shortcut for running this `Source` with a foreach procedure. The given procedure is invoked * for each received element. - * The returned [[scala.concurrent.Future]] will be completed with `Success` when reaching the - * normal end of the stream, or completed with `Failure` if there is a failure is signaled in + * The returned [[java.util.concurrent.CompletionStage]] will be completed normally when reaching the + * normal end of the stream, or completed exceptionally if there is a failure is signaled in * the stream. */ - def runForeach(f: function.Procedure[Out], materializer: Materializer): Future[Unit] = + def runForeach(f: function.Procedure[Out], materializer: Materializer): CompletionStage[Done] = runWith(Sink.foreach(f), materializer) // COMMON OPS // @@ -766,66 +801,66 @@ final class Source[+Out, +Mat](delegate: scaladsl.Source[Out, Mat]) extends Grap /** * Transform this stream by applying the given function to each of the elements - * as they pass through this processing step. The function returns a `Future` and the - * value of that future will be emitted downstreams. As many futures as requested elements by + * as they pass through this processing step. The function returns a `CompletionStage` and the + * value of that future will be emitted downstreams. As many CompletionStages as requested elements by * downstream may run in parallel and may complete in any order, but the elements that * are emitted downstream are in the same order as received from upstream. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#stop]] * the stream will be completed with failure. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#resume]] or * [[akka.stream.Supervision#restart]] the element is dropped and the stream continues. * * The function `f` is always invoked on the elements in the order they arrive. * - * '''Emits when''' the Future returned by the provided function finishes for the next element in sequence + * '''Emits when''' the CompletionStage returned by the provided function finishes for the next element in sequence * - * '''Backpressures when''' the number of futures reaches the configured parallelism and the downstream - * backpressures or the first future is not completed + * '''Backpressures when''' the number of CompletionStages reaches the configured parallelism and the downstream + * backpressures or the first CompletionStage is not completed * - * '''Completes when''' upstream completes and all futures has been completed and all elements has been emitted + * '''Completes when''' upstream completes and all CompletionStages has been completed and all elements has been emitted * * '''Cancels when''' downstream cancels * * @see [[#mapAsyncUnordered]] */ - def mapAsync[T](parallelism: Int, f: function.Function[Out, Future[T]]): javadsl.Source[T, Mat] = - new Source(delegate.mapAsync(parallelism)(f.apply)) + def mapAsync[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): javadsl.Source[T, Mat] = + new Source(delegate.mapAsync(parallelism)(x => f(x).toScala)) /** * Transform this stream by applying the given function to each of the elements - * as they pass through this processing step. The function returns a `Future` and the - * value of that future will be emitted downstreams. As many futures as requested elements by + * as they pass through this processing step. The function returns a `CompletionStage` and the + * value of that future will be emitted downstreams. As many CompletionStages as requested elements by * downstream may run in parallel and each processed element will be emitted downstream * as soon as it is ready, i.e. it is possible that the elements are not emitted downstream * in the same order as received from upstream. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#stop]] * the stream will be completed with failure. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#resume]] or * [[akka.stream.Supervision#restart]] the element is dropped and the stream continues. * - * The function `f` is always invoked on the elements in the order they arrive (even though the result of the futures + * The function `f` is always invoked on the elements in the order they arrive (even though the result of the CompletionStages * returned by `f` might be emitted in a different order). * - * '''Emits when''' any of the Futures returned by the provided function complete + * '''Emits when''' any of the CompletionStages returned by the provided function complete * - * '''Backpressures when''' the number of futures reaches the configured parallelism and the downstream backpressures + * '''Backpressures when''' the number of CompletionStages reaches the configured parallelism and the downstream backpressures * - * '''Completes when''' upstream completes and all futures has been completed and all elements has been emitted + * '''Completes when''' upstream completes and all CompletionStages has been completed and all elements has been emitted * * '''Cancels when''' downstream cancels * * @see [[#mapAsync]] */ - def mapAsyncUnordered[T](parallelism: Int, f: function.Function[Out, Future[T]]): javadsl.Source[T, Mat] = - new Source(delegate.mapAsyncUnordered(parallelism)(f.apply)) + def mapAsyncUnordered[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): javadsl.Source[T, Mat] = + new Source(delegate.mapAsyncUnordered(parallelism)(x => f(x).toScala)) /** * Only pass on those elements that satisfy the given predicate. @@ -999,6 +1034,22 @@ final class Source[+Out, +Mat](delegate: scaladsl.Source[Out, Mat]) extends Grap def fold[T](zero: T)(f: function.Function2[T, Out, T]): javadsl.Source[T, Mat] = new Source(delegate.fold(zero)(f.apply)) + /** + * Similar to `fold` but uses first element as zero element. + * Applies the given function towards its current and next value, + * yielding the next current value. + * + * '''Emits when''' upstream completes + * + * '''Backpressures when''' downstream backpressures + * + * '''Completes when''' upstream completes + * + * '''Cancels when''' downstream cancels + */ + def reduce(f: function.Function2[Out, Out, Out @uncheckedVariance]): javadsl.Source[Out, Mat] = + new Source(delegate.reduce(f.apply)) + /** * Intersperses stream with provided element, similar to how [[scala.collection.immutable.List.mkString]] * injects a separator between a List's elements. @@ -1226,12 +1277,70 @@ final class Source[+Out, +Mat](delegate: scaladsl.Source[Out, Mat]) extends Grap * * '''Cancels when''' downstream cancels * + * see also [[Source.batch]] [[Source.batchWeighted]] + * * @param seed Provides the first state for a conflated value using the first unconsumed element as a start * @param aggregate Takes the currently aggregated value and the current pending element to produce a new aggregate */ def conflate[S](seed: function.Function[Out, S], aggregate: function.Function2[S, Out, S]): javadsl.Source[S, Mat] = new Source(delegate.conflate(seed.apply)(aggregate.apply)) + /** + * Allows a faster upstream to progress independently of a slower subscriber by aggregating elements into batches + * until the subscriber is ready to accept them. For example a batch step might store received elements in + * an array up to the allowed max limit if the upstream publisher is faster. + * + * This element only rolls up elements if the upstream is faster, but if the downstream is faster it will not + * duplicate elements. + * + * '''Emits when''' downstream stops backpressuring and there is an aggregated element available + * + * '''Backpressures when''' there are `max` batched elements and 1 pending element and downstream backpressures + * + * '''Completes when''' upstream completes and there is no batched/pending element waiting + * + * '''Cancels when''' downstream cancels + * + * See also [[Source.conflate]], [[Source.batchWeighted]] + * + * @param max maximum number of elements to batch before backpressuring upstream (must be positive non-zero) + * @param seed Provides the first state for a batched value using the first unconsumed element as a start + * @param aggregate Takes the currently batched value and the current pending element to produce a new aggregate + */ + def batch[S](max: Long, seed: function.Function[Out, S], aggregate: function.Function2[S, Out, S]): javadsl.Source[S, Mat] = + new Source(delegate.batch(max, seed.apply)(aggregate.apply)) + + /** + * Allows a faster upstream to progress independently of a slower subscriber by aggregating elements into batches + * until the subscriber is ready to accept them. For example a batch step might concatenate `ByteString` + * elements up to the allowed max limit if the upstream publisher is faster. + * + * This element only rolls up elements if the upstream is faster, but if the downstream is faster it will not + * duplicate elements. + * + * Batching will apply for all elements, even if a single element cost is greater than the total allowed limit. + * In this case, previous batched elements will be emitted, then the "heavy" element will be emitted (after + * being applied with the `seed` function) without batching further elements with it, and then the rest of the + * incoming elements are batched. + * + * '''Emits when''' downstream stops backpressuring and there is a batched element available + * + * '''Backpressures when''' there are `max` weighted batched elements + 1 pending element and downstream backpressures + * + * '''Completes when''' upstream completes and there is no batched/pending element waiting + * + * '''Cancels when''' downstream cancels + * + * See also [[Source.conflate]], [[Source.batch]] + * + * @param max maximum weight of elements to batch before backpressuring upstream (must be positive non-zero) + * @param costFn a function to compute a single element weight + * @param seed Provides the first state for a batched value using the first unconsumed element as a start + * @param aggregate Takes the currently batched value and the current pending element to produce a new batch + */ + def batchWeighted[S](max: Long, costFn: function.Function[Out, Long], seed: function.Function[Out, S], aggregate: function.Function2[S, Out, S]): javadsl.Source[S, Mat] = + new Source(delegate.batchWeighted(max, costFn.apply, seed.apply)(aggregate.apply)) + /** * Allows a faster downstream to progress independently of a slower publisher by extrapolating elements from an older * element until new element comes from the upstream. For example an expand step might repeat the last element for @@ -1246,7 +1355,7 @@ final class Source[+Out, +Mat](delegate: scaladsl.Source[Out, Mat]) extends Grap * * '''Emits when''' downstream stops backpressuring * - * '''Backpressures when''' downstream backpressures + * '''Backpressures when''' downstream backpressures or iterator runs emtpy * * '''Completes when''' upstream completes * @@ -1256,11 +1365,8 @@ final class Source[+Out, +Mat](delegate: scaladsl.Source[Out, Mat]) extends Grap * @param extrapolate Takes the current extrapolation state to produce an output element and the next extrapolation * state. */ - def expand[S, U](seed: function.Function[Out, S], extrapolate: function.Function[S, akka.japi.Pair[U, S]]): javadsl.Source[U, Mat] = - new Source(delegate.expand(seed(_))(s ⇒ { - val p = extrapolate(s) - (p.first, p.second) - })) + def expand[U](extrapolate: function.Function[Out, java.util.Iterator[U]]): javadsl.Source[U, Mat] = + new Source(delegate.expand(in ⇒ extrapolate(in).asScala)) /** * Adds a fixed size buffer in the flow that allows to store elements from a faster upstream until it becomes full. @@ -1313,7 +1419,7 @@ final class Source[+Out, +Mat](delegate: scaladsl.Source[Out, Mat]) extends Grap * * '''Cancels when''' downstream cancels or substream cancels */ - def prefixAndTail(n: Int): javadsl.Source[akka.japi.Pair[java.util.List[Out @uncheckedVariance], javadsl.Source[Out @uncheckedVariance, Unit]], Mat] = + def prefixAndTail(n: Int): javadsl.Source[akka.japi.Pair[java.util.List[Out @uncheckedVariance], javadsl.Source[Out @uncheckedVariance, NotUsed]], Mat] = new Source(delegate.prefixAndTail(n).map { case (taken, tail) ⇒ akka.japi.Pair(taken.asJava, tail.asJava) }) /** @@ -1626,6 +1732,15 @@ final class Source[+Out, +Mat](delegate: scaladsl.Source[Out, Mat]) extends Grap */ def detach: javadsl.Source[Out, Mat] = new Source(delegate.detach) + /** + * Materializes to `Future[Done]` that completes on getting termination message. + * The Future completes with success when received complete message from upstream or cancel + * from downstream. It fails with the same error when received error message from + * downstream. + */ + def watchTermination[M]()(matF: function.Function2[Mat, CompletionStage[Done], M]): javadsl.Source[Out, M] = + new Source(delegate.watchTermination()((left, right) => matF(left, right.toJava))) + /** * Delays the initial element by the specified duration. * diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/StreamConverters.scala b/akka-stream/src/main/scala/akka/stream/javadsl/StreamConverters.scala index 64ae0ade08..18e7c0a76e 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/StreamConverters.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/StreamConverters.scala @@ -1,16 +1,15 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.javadsl import java.io.{ InputStream, OutputStream } - import akka.japi.function import akka.stream.{ scaladsl, javadsl, ActorAttributes } +import akka.stream.io.IOResult import akka.util.ByteString - -import scala.concurrent.Future import scala.concurrent.duration.FiniteDuration +import java.util.concurrent.CompletionStage /** * Converters for interacting with the blocking `java.io` streams APIs @@ -19,15 +18,32 @@ object StreamConverters { /** * Sink which writes incoming [[ByteString]]s to an [[OutputStream]] created by the given function. * - * Materializes a [[Future]] that will be completed with the size of the file (in bytes) at the streams completion. + * Materializes a [[CompletionStage]] of [[IOResult]] that will be completed with the size of the file (in bytes) at the streams completion, + * and a possible exception if IO operation was not completed successfully. + * + * You can configure the default dispatcher for this Source by changing the `akka.stream.blocking-io-dispatcher` or + * set it for a given Source by using [[ActorAttributes]]. + * + * This method uses no auto flush for the [[java.io.OutputStream]] @see [[#fromOutputStream(function.Creator, Boolean)]] if you want to override it. + * + * @param f A Creator which creates an OutputStream to write to + */ + def fromOutputStream(f: function.Creator[OutputStream]): javadsl.Sink[ByteString, CompletionStage[IOResult]] = fromOutputStream(f, autoFlush = false) + + /** + * Sink which writes incoming [[ByteString]]s to an [[OutputStream]] created by the given function. + * + * Materializes a [[CompletionStage]] of [[IOResult]] that will be completed with the size of the file (in bytes) at the streams completion, + * and a possible exception if IO operation was not completed successfully. * * You can configure the default dispatcher for this Source by changing the `akka.stream.blocking-io-dispatcher` or * set it for a given Source by using [[ActorAttributes]]. * * @param f A Creator which creates an OutputStream to write to + * @param autoFlush If true the OutputStream will be flushed whenever a byte array is written */ - def fromOutputStream(f: function.Creator[OutputStream]): javadsl.Sink[ByteString, Future[java.lang.Long]] = - new Sink(scaladsl.StreamConverters.fromOutputStream(() ⇒ f.create())).asInstanceOf[javadsl.Sink[ByteString, Future[java.lang.Long]]] + def fromOutputStream(f: function.Creator[OutputStream], autoFlush: Boolean): javadsl.Sink[ByteString, CompletionStage[IOResult]] = + new Sink(scaladsl.StreamConverters.fromOutputStream(() ⇒ f.create(), autoFlush).toCompletionStage()) /** * Creates a Sink which when materialized will return an [[java.io.InputStream]] which it is possible @@ -65,10 +81,10 @@ object StreamConverters { * You can configure the default dispatcher for this Source by changing the `akka.stream.blocking-io-dispatcher` or * set it for a given Source by using [[ActorAttributes]]. * - * It materializes a [[Future]] containing the number of bytes read from the source file upon completion. + * It materializes a [[CompletionStage]] containing the number of bytes read from the source file upon completion. */ - def fromInputStream(in: function.Creator[InputStream], chunkSize: Int): javadsl.Source[ByteString, Future[java.lang.Long]] = - new Source(scaladsl.StreamConverters.fromInputStream(() ⇒ in.create(), chunkSize)).asInstanceOf[Source[ByteString, Future[java.lang.Long]]] + def fromInputStream(in: function.Creator[InputStream], chunkSize: Int): javadsl.Source[ByteString, CompletionStage[IOResult]] = + new Source(scaladsl.StreamConverters.fromInputStream(() ⇒ in.create(), chunkSize).toCompletionStage()) /** * Creates a Source from an [[java.io.InputStream]] created by the given function. @@ -78,9 +94,10 @@ object StreamConverters { * You can configure the default dispatcher for this Source by changing the `akka.stream.blocking-io-dispatcher` or * set it for a given Source by using [[ActorAttributes]]. * - * It materializes a [[Future]] containing the number of bytes read from the source file upon completion. + * It materializes a [[CompletionStage]] of [[IOResult]] containing the number of bytes read from the source file upon completion, + * and a possible exception if IO operation was not completed successfully. */ - def fromInputStream(in: function.Creator[InputStream]): javadsl.Source[ByteString, Future[java.lang.Long]] = fromInputStream(in, 8192) + def fromInputStream(in: function.Creator[InputStream]): javadsl.Source[ByteString, CompletionStage[IOResult]] = fromInputStream(in, 8192) /** * Creates a Source which when materialized will return an [[java.io.OutputStream]] which it is possible diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/SubFlow.scala b/akka-stream/src/main/scala/akka/stream/javadsl/SubFlow.scala index 2d1ed1f282..a331af06bd 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/SubFlow.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/SubFlow.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.javadsl +import akka.NotUsed import akka.event.LoggingAdapter import akka.japi.function import akka.stream._ @@ -12,10 +13,11 @@ import akka.stream.stage.Stage import scala.collection.immutable import scala.collection.JavaConverters._ import scala.annotation.unchecked.uncheckedVariance -import scala.concurrent.Future import scala.concurrent.duration.FiniteDuration import akka.japi.Util import java.util.Comparator +import scala.compat.java8.FutureConverters._ +import java.util.concurrent.CompletionStage /** * A “stream of streams” sub-flow of data elements, e.g. produced by `groupBy`. @@ -165,66 +167,66 @@ class SubFlow[-In, +Out, +Mat](delegate: scaladsl.SubFlow[Out, Mat, scaladsl.Flo /** * Transform this stream by applying the given function to each of the elements - * as they pass through this processing step. The function returns a `Future` and the - * value of that future will be emitted downstreams. As many futures as requested elements by + * as they pass through this processing step. The function returns a `CompletionStage` and the + * value of that future will be emitted downstreams. As many CompletionStages as requested elements by * downstream may run in parallel and may complete in any order, but the elements that * are emitted downstream are in the same order as received from upstream. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#stop]] * the stream will be completed with failure. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#resume]] or * [[akka.stream.Supervision#restart]] the element is dropped and the stream continues. * * The function `f` is always invoked on the elements in the order they arrive. * - * '''Emits when''' the Future returned by the provided function finishes for the next element in sequence + * '''Emits when''' the CompletionStage returned by the provided function finishes for the next element in sequence * - * '''Backpressures when''' the number of futures reaches the configured parallelism and the downstream - * backpressures or the first future is not completed + * '''Backpressures when''' the number of CompletionStages reaches the configured parallelism and the downstream + * backpressures or the first CompletionStage is not completed * - * '''Completes when''' upstream completes and all futures has been completed and all elements has been emitted + * '''Completes when''' upstream completes and all CompletionStages has been completed and all elements has been emitted * * '''Cancels when''' downstream cancels * * @see [[#mapAsyncUnordered]] */ - def mapAsync[T](parallelism: Int, f: function.Function[Out, Future[T]]): SubFlow[In, T, Mat] = - new SubFlow(delegate.mapAsync(parallelism)(f.apply)) + def mapAsync[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): SubFlow[In, T, Mat] = + new SubFlow(delegate.mapAsync(parallelism)(x => f(x).toScala)) /** * Transform this stream by applying the given function to each of the elements - * as they pass through this processing step. The function returns a `Future` and the - * value of that future will be emitted downstreams. As many futures as requested elements by + * as they pass through this processing step. The function returns a `CompletionStage` and the + * value of that future will be emitted downstreams. As many CompletionStages as requested elements by * downstream may run in parallel and each processed element will be emitted downstream * as soon as it is ready, i.e. it is possible that the elements are not emitted downstream * in the same order as received from upstream. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#stop]] * the stream will be completed with failure. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#resume]] or * [[akka.stream.Supervision#restart]] the element is dropped and the stream continues. * - * The function `f` is always invoked on the elements in the order they arrive (even though the result of the futures + * The function `f` is always invoked on the elements in the order they arrive (even though the result of the CompletionStages * returned by `f` might be emitted in a different order). * - * '''Emits when''' any of the Futures returned by the provided function complete + * '''Emits when''' any of the CompletionStages returned by the provided function complete * - * '''Backpressures when''' the number of futures reaches the configured parallelism and the downstream backpressures + * '''Backpressures when''' the number of CompletionStages reaches the configured parallelism and the downstream backpressures * - * '''Completes when''' upstream completes and all futures has been completed and all elements has been emitted + * '''Completes when''' upstream completes and all CompletionStages have been completed and all elements has been emitted * * '''Cancels when''' downstream cancels * * @see [[#mapAsync]] */ - def mapAsyncUnordered[T](parallelism: Int, f: function.Function[Out, Future[T]]): SubFlow[In, T, Mat] = - new SubFlow(delegate.mapAsyncUnordered(parallelism)(f.apply)) + def mapAsyncUnordered[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): SubFlow[In, T, Mat] = + new SubFlow(delegate.mapAsyncUnordered(parallelism)(x => f(x).toScala)) /** * Only pass on those elements that satisfy the given predicate. @@ -399,6 +401,22 @@ class SubFlow[-In, +Out, +Mat](delegate: scaladsl.SubFlow[Out, Mat, scaladsl.Flo def fold[T](zero: T)(f: function.Function2[T, Out, T]): SubFlow[In, T, Mat] = new SubFlow(delegate.fold(zero)(f.apply)) + /** + * Similar to `fold` but uses first element as zero element. + * Applies the given function towards its current and next value, + * yielding the next current value. + * + * '''Emits when''' upstream completes + * + * '''Backpressures when''' downstream backpressures + * + * '''Completes when''' upstream completes + * + * '''Cancels when''' downstream cancels + */ + def reduce(f: function.Function2[Out, Out, Out @uncheckedVariance]): SubFlow[In, Out, Mat] = + new SubFlow(delegate.reduce(f.apply)) + /** * Intersperses stream with provided element, similar to how [[scala.collection.immutable.List.mkString]] * injects a separator between a List's elements. @@ -644,6 +662,8 @@ class SubFlow[-In, +Out, +Mat](delegate: scaladsl.SubFlow[Out, Mat, scaladsl.Flo * * '''Cancels when''' downstream cancels * + * see also [[SubFlow.batch]] [[SubFlow.batchWeighted]] + * * @param seed Provides the first state for a conflated value using the first unconsumed element as a start * @param aggregate Takes the currently aggregated value and the current pending element to produce a new aggregate * @@ -651,6 +671,62 @@ class SubFlow[-In, +Out, +Mat](delegate: scaladsl.SubFlow[Out, Mat, scaladsl.Flo def conflate[S](seed: function.Function[Out, S], aggregate: function.Function2[S, Out, S]): SubFlow[In, S, Mat] = new SubFlow(delegate.conflate(seed.apply)(aggregate.apply)) + /** + * Allows a faster upstream to progress independently of a slower subscriber by aggregating elements into batches + * until the subscriber is ready to accept them. For example a batch step might store received elements in + * an array up to the allowed max limit if the upstream publisher is faster. + * + * This element only rolls up elements if the upstream is faster, but if the downstream is faster it will not + * duplicate elements. + * + * '''Emits when''' downstream stops backpressuring and there is an aggregated element available + * + * '''Backpressures when''' there are `max` batched elements and 1 pending element and downstream backpressures + * + * '''Completes when''' upstream completes and there is no batched/pending element waiting + * + * '''Cancels when''' downstream cancels + * + * See also [[SubFlow.conflate]], [[SubFlow.batchWeighted]] + * + * @param max maximum number of elements to batch before backpressuring upstream (must be positive non-zero) + * @param seed Provides the first state for a batched value using the first unconsumed element as a start + * @param aggregate Takes the currently batched value and the current pending element to produce a new aggregate + */ + def batch[S](max: Long, seed: function.Function[Out, S], aggregate: function.Function2[S, Out, S]): SubFlow[In, S, Mat] = + new SubFlow(delegate.batch(max, seed.apply)(aggregate.apply)) + + /** + * Allows a faster upstream to progress independently of a slower subscriber by aggregating elements into batches + * until the subscriber is ready to accept them. For example a batch step might concatenate `ByteString` + * elements up to the allowed max limit if the upstream publisher is faster. + * + * This element only rolls up elements if the upstream is faster, but if the downstream is faster it will not + * duplicate elements. + * + * Batching will apply for all elements, even if a single element cost is greater than the total allowed limit. + * In this case, previous batched elements will be emitted, then the "heavy" element will be emitted (after + * being applied with the `seed` function) without batching further elements with it, and then the rest of the + * incoming elements are batched. + * + * '''Emits when''' downstream stops backpressuring and there is a batched element available + * + * '''Backpressures when''' there are `max` weighted batched elements + 1 pending element and downstream backpressures + * + * '''Completes when''' upstream completes and there is no batched/pending element waiting + * + * '''Cancels when''' downstream cancels + * + * See also [[SubFlow.conflate]], [[SubFlow.batch]] + * + * @param max maximum weight of elements to batch before backpressuring upstream (must be positive non-zero) + * @param costFn a function to compute a single element weight + * @param seed Provides the first state for a batched value using the first unconsumed element as a start + * @param aggregate Takes the currently batched value and the current pending element to produce a new batch + */ + def batchWeighted[S](max: Long, costFn: function.Function[Out, Long], seed: function.Function[Out, S], aggregate: function.Function2[S, Out, S]): SubFlow[In, S, Mat] = + new SubFlow(delegate.batchWeighted(max, costFn.apply, seed.apply)(aggregate.apply)) + /** * Allows a faster downstream to progress independently of a slower publisher by extrapolating elements from an older * element until new element comes from the upstream. For example an expand step might repeat the last element for @@ -665,7 +741,7 @@ class SubFlow[-In, +Out, +Mat](delegate: scaladsl.SubFlow[Out, Mat, scaladsl.Flo * * '''Emits when''' downstream stops backpressuring * - * '''Backpressures when''' downstream backpressures + * '''Backpressures when''' downstream backpressures or iterator runs emtpy * * '''Completes when''' upstream completes * @@ -675,11 +751,8 @@ class SubFlow[-In, +Out, +Mat](delegate: scaladsl.SubFlow[Out, Mat, scaladsl.Flo * @param extrapolate Takes the current extrapolation state to produce an output element and the next extrapolation * state. */ - def expand[S, U](seed: function.Function[Out, S], extrapolate: function.Function[S, akka.japi.Pair[U, S]]): SubFlow[In, U, Mat] = - new SubFlow(delegate.expand(seed(_))(s ⇒ { - val p = extrapolate(s) - (p.first, p.second) - })) + def expand[U](extrapolate: function.Function[Out, java.util.Iterator[U]]): SubFlow[In, U, Mat] = + new SubFlow(delegate.expand(in ⇒ extrapolate(in).asScala)) /** * Adds a fixed size buffer in the flow that allows to store elements from a faster upstream until it becomes full. @@ -732,7 +805,7 @@ class SubFlow[-In, +Out, +Mat](delegate: scaladsl.SubFlow[Out, Mat, scaladsl.Flo * * '''Cancels when''' downstream cancels or substream cancels */ - def prefixAndTail(n: Int): SubFlow[In, akka.japi.Pair[java.util.List[Out @uncheckedVariance], javadsl.Source[Out @uncheckedVariance, Unit]], Mat] = + def prefixAndTail(n: Int): SubFlow[In, akka.japi.Pair[java.util.List[Out @uncheckedVariance], javadsl.Source[Out @uncheckedVariance, NotUsed]], Mat] = new SubFlow(delegate.prefixAndTail(n).map { case (taken, tail) ⇒ akka.japi.Pair(taken.asJava, tail.asJava) }) /** diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/SubSource.scala b/akka-stream/src/main/scala/akka/stream/javadsl/SubSource.scala index 3ca68defdb..f970db98e9 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/SubSource.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/SubSource.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.javadsl +import akka.NotUsed import akka.event.LoggingAdapter import akka.japi.function import akka.stream._ @@ -12,10 +13,11 @@ import akka.stream.stage.Stage import scala.collection.immutable import scala.collection.JavaConverters._ import scala.annotation.unchecked.uncheckedVariance -import scala.concurrent.Future import scala.concurrent.duration.FiniteDuration import akka.japi.Util import java.util.Comparator +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters._ /** * A “stream of streams” sub-flow of data elements, e.g. produced by `groupBy`. @@ -161,66 +163,66 @@ class SubSource[+Out, +Mat](delegate: scaladsl.SubFlow[Out, Mat, scaladsl.Source /** * Transform this stream by applying the given function to each of the elements - * as they pass through this processing step. The function returns a `Future` and the - * value of that future will be emitted downstreams. As many futures as requested elements by + * as they pass through this processing step. The function returns a `CompletionStage` and the + * value of that future will be emitted downstreams. As many CompletionStages as requested elements by * downstream may run in parallel and may complete in any order, but the elements that * are emitted downstream are in the same order as received from upstream. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#stop]] * the stream will be completed with failure. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#resume]] or * [[akka.stream.Supervision#restart]] the element is dropped and the stream continues. * * The function `f` is always invoked on the elements in the order they arrive. * - * '''Emits when''' the Future returned by the provided function finishes for the next element in sequence + * '''Emits when''' the CompletionStage returned by the provided function finishes for the next element in sequence * - * '''Backpressures when''' the number of futures reaches the configured parallelism and the downstream - * backpressures or the first future is not completed + * '''Backpressures when''' the number of CompletionStages reaches the configured parallelism and the downstream + * backpressures or the first CompletionStage is not completed * - * '''Completes when''' upstream completes and all futures has been completed and all elements has been emitted + * '''Completes when''' upstream completes and all CompletionStages has been completed and all elements has been emitted * * '''Cancels when''' downstream cancels * * @see [[#mapAsyncUnordered]] */ - def mapAsync[T](parallelism: Int, f: function.Function[Out, Future[T]]): SubSource[T, Mat] = - new SubSource(delegate.mapAsync(parallelism)(f.apply)) + def mapAsync[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): SubSource[T, Mat] = + new SubSource(delegate.mapAsync(parallelism)(x => f(x).toScala)) /** * Transform this stream by applying the given function to each of the elements - * as they pass through this processing step. The function returns a `Future` and the - * value of that future will be emitted downstreams. As many futures as requested elements by + * as they pass through this processing step. The function returns a `CompletionStage` and the + * value of that future will be emitted downstreams. As many CompletionStages as requested elements by * downstream may run in parallel and each processed element will be emitted downstream * as soon as it is ready, i.e. it is possible that the elements are not emitted downstream * in the same order as received from upstream. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#stop]] * the stream will be completed with failure. * - * If the function `f` throws an exception or if the `Future` is completed + * If the function `f` throws an exception or if the `CompletionStage` is completed * with failure and the supervision decision is [[akka.stream.Supervision#resume]] or * [[akka.stream.Supervision#restart]] the element is dropped and the stream continues. * * The function `f` is always invoked on the elements in the order they arrive (even though the result of the futures * returned by `f` might be emitted in a different order). * - * '''Emits when''' any of the Futures returned by the provided function complete + * '''Emits when''' any of the CompletionStage returned by the provided function complete * - * '''Backpressures when''' the number of futures reaches the configured parallelism and the downstream backpressures + * '''Backpressures when''' the number of CompletionStage reaches the configured parallelism and the downstream backpressures * - * '''Completes when''' upstream completes and all futures has been completed and all elements has been emitted + * '''Completes when''' upstream completes and all CompletionStage has been completed and all elements has been emitted * * '''Cancels when''' downstream cancels * * @see [[#mapAsync]] */ - def mapAsyncUnordered[T](parallelism: Int, f: function.Function[Out, Future[T]]): SubSource[T, Mat] = - new SubSource(delegate.mapAsyncUnordered(parallelism)(f.apply)) + def mapAsyncUnordered[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): SubSource[T, Mat] = + new SubSource(delegate.mapAsyncUnordered(parallelism)(x => f(x).toScala)) /** * Only pass on those elements that satisfy the given predicate. @@ -395,6 +397,22 @@ class SubSource[+Out, +Mat](delegate: scaladsl.SubFlow[Out, Mat, scaladsl.Source def fold[T](zero: T)(f: function.Function2[T, Out, T]): SubSource[T, Mat] = new SubSource(delegate.fold(zero)(f.apply)) + /** + * Similar to `fold` but uses first element as zero element. + * Applies the given function towards its current and next value, + * yielding the next current value. + * + * '''Emits when''' upstream completes + * + * '''Backpressures when''' downstream backpressures + * + * '''Completes when''' upstream completes + * + * '''Cancels when''' downstream cancels + */ + def reduce(f: function.Function2[Out, Out, Out @uncheckedVariance]): SubSource[Out, Mat] = + new SubSource(delegate.reduce(f.apply)) + /** * Intersperses stream with provided element, similar to how [[scala.collection.immutable.List.mkString]] * injects a separator between a List's elements. @@ -640,6 +658,8 @@ class SubSource[+Out, +Mat](delegate: scaladsl.SubFlow[Out, Mat, scaladsl.Source * * '''Cancels when''' downstream cancels * + * see also [[SubSource.batch]] [[SubSource.batchWeighted]] + * * @param seed Provides the first state for a conflated value using the first unconsumed element as a start * @param aggregate Takes the currently aggregated value and the current pending element to produce a new aggregate * @@ -647,6 +667,62 @@ class SubSource[+Out, +Mat](delegate: scaladsl.SubFlow[Out, Mat, scaladsl.Source def conflate[S](seed: function.Function[Out, S], aggregate: function.Function2[S, Out, S]): SubSource[S, Mat] = new SubSource(delegate.conflate(seed.apply)(aggregate.apply)) + /** + * Allows a faster upstream to progress independently of a slower subscriber by aggregating elements into batches + * until the subscriber is ready to accept them. For example a batch step might store received elements in + * an array up to the allowed max limit if the upstream publisher is faster. + * + * This element only rolls up elements if the upstream is faster, but if the downstream is faster it will not + * duplicate elements. + * + * '''Emits when''' downstream stops backpressuring and there is an aggregated element available + * + * '''Backpressures when''' there are `max` batched elements and 1 pending element and downstream backpressures + * + * '''Completes when''' upstream completes and there is no batched/pending element waiting + * + * '''Cancels when''' downstream cancels + * + * See also [[SubSource.conflate]], [[SubSource.batchWeighted]] + * + * @param max maximum number of elements to batch before backpressuring upstream (must be positive non-zero) + * @param seed Provides the first state for a batched value using the first unconsumed element as a start + * @param aggregate Takes the currently batched value and the current pending element to produce a new aggregate + */ + def batch[S](max: Long, seed: function.Function[Out, S], aggregate: function.Function2[S, Out, S]): SubSource[S, Mat] = + new SubSource(delegate.batch(max, seed.apply)(aggregate.apply)) + + /** + * Allows a faster upstream to progress independently of a slower subscriber by aggregating elements into batches + * until the subscriber is ready to accept them. For example a batch step might concatenate `ByteString` + * elements up to the allowed max limit if the upstream publisher is faster. + * + * This element only rolls up elements if the upstream is faster, but if the downstream is faster it will not + * duplicate elements. + * + * Batching will apply for all elements, even if a single element cost is greater than the total allowed limit. + * In this case, previous batched elements will be emitted, then the "heavy" element will be emitted (after + * being applied with the `seed` function) without batching further elements with it, and then the rest of the + * incoming elements are batched. + * + * '''Emits when''' downstream stops backpressuring and there is a batched element available + * + * '''Backpressures when''' there are `max` weighted batched elements + 1 pending element and downstream backpressures + * + * '''Completes when''' upstream completes and there is no batched/pending element waiting + * + * '''Cancels when''' downstream cancels + * + * See also [[SubSource.conflate]], [[SubSource.batch]] + * + * @param max maximum weight of elements to batch before backpressuring upstream (must be positive non-zero) + * @param costFn a function to compute a single element weight + * @param seed Provides the first state for a batched value using the first unconsumed element as a start + * @param aggregate Takes the currently batched value and the current pending element to produce a new batch + */ + def batchWeighted[S](max: Long, costFn: function.Function[Out, Long], seed: function.Function[Out, S], aggregate: function.Function2[S, Out, S]): SubSource[S, Mat] = + new SubSource(delegate.batchWeighted(max, costFn.apply, seed.apply)(aggregate.apply)) + /** * Allows a faster downstream to progress independently of a slower publisher by extrapolating elements from an older * element until new element comes from the upstream. For example an expand step might repeat the last element for @@ -661,7 +737,7 @@ class SubSource[+Out, +Mat](delegate: scaladsl.SubFlow[Out, Mat, scaladsl.Source * * '''Emits when''' downstream stops backpressuring * - * '''Backpressures when''' downstream backpressures + * '''Backpressures when''' downstream backpressures or iterator runs emtpy * * '''Completes when''' upstream completes * @@ -671,11 +747,8 @@ class SubSource[+Out, +Mat](delegate: scaladsl.SubFlow[Out, Mat, scaladsl.Source * @param extrapolate Takes the current extrapolation state to produce an output element and the next extrapolation * state. */ - def expand[S, U](seed: function.Function[Out, S], extrapolate: function.Function[S, akka.japi.Pair[U, S]]): SubSource[U, Mat] = - new SubSource(delegate.expand(seed(_))(s ⇒ { - val p = extrapolate(s) - (p.first, p.second) - })) + def expand[U](extrapolate: function.Function[Out, java.util.Iterator[U]]): SubSource[U, Mat] = + new SubSource(delegate.expand(in ⇒ extrapolate(in).asScala)) /** * Adds a fixed size buffer in the flow that allows to store elements from a faster upstream until it becomes full. @@ -728,7 +801,7 @@ class SubSource[+Out, +Mat](delegate: scaladsl.SubFlow[Out, Mat, scaladsl.Source * * '''Cancels when''' downstream cancels or substream cancels */ - def prefixAndTail(n: Int): SubSource[akka.japi.Pair[java.util.List[Out @uncheckedVariance], javadsl.Source[Out @uncheckedVariance, Unit]], Mat] = + def prefixAndTail(n: Int): SubSource[akka.japi.Pair[java.util.List[Out @uncheckedVariance], javadsl.Source[Out @uncheckedVariance, NotUsed]], Mat] = new SubSource(delegate.prefixAndTail(n).map { case (taken, tail) ⇒ akka.japi.Pair(taken.asJava, tail.asJava) }) /** diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/Tcp.scala b/akka-stream/src/main/scala/akka/stream/javadsl/Tcp.scala index d7aa15935e..021573ede0 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/Tcp.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/Tcp.scala @@ -1,13 +1,14 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.javadsl import java.lang.{ Iterable ⇒ JIterable } +import java.util.Optional +import akka.NotUsed import scala.collection.immutable import scala.concurrent.duration._ import java.net.InetSocketAddress -import scala.concurrent.Future import scala.util.control.NoStackTrace import akka.actor.ActorSystem import akka.actor.ExtendedActorSystem @@ -18,6 +19,9 @@ import akka.stream.scaladsl import akka.util.ByteString import akka.japi.Util.immutableSeq import akka.io.Inet.SocketOption +import scala.compat.java8.OptionConverters._ +import scala.compat.java8.FutureConverters._ +import java.util.concurrent.CompletionStage object Tcp extends ExtensionId[Tcp] with ExtensionIdProvider { @@ -34,9 +38,9 @@ object Tcp extends ExtensionId[Tcp] with ExtensionIdProvider { * Asynchronously triggers the unbinding of the port that was bound by the materialization of the `connections` * [[Source]]. * - * The produced [[scala.concurrent.Future]] is fulfilled when the unbinding has been completed. + * The produced [[java.util.concurrent.CompletionStage]] is fulfilled when the unbinding has been completed. */ - def unbind(): Future[Unit] = delegate.unbind() + def unbind(): CompletionStage[Unit] = delegate.unbind().toJava } /** @@ -66,7 +70,7 @@ object Tcp extends ExtensionId[Tcp] with ExtensionIdProvider { * A flow representing the client on the other side of the connection. * This flow can be materialized only once. */ - def flow: Flow[ByteString, ByteString, Unit] = new Flow(delegate.flow) + def flow: Flow[ByteString, ByteString, NotUsed] = new Flow(delegate.flow) } /** @@ -123,10 +127,10 @@ class Tcp(system: ExtendedActorSystem) extends akka.actor.Extension { backlog: Int, options: JIterable[SocketOption], halfClose: Boolean, - idleTimeout: Duration): Source[IncomingConnection, Future[ServerBinding]] = + idleTimeout: Duration): Source[IncomingConnection, CompletionStage[ServerBinding]] = Source.fromGraph(delegate.bind(interface, port, backlog, immutableSeq(options), halfClose, idleTimeout) .map(new IncomingConnection(_)) - .mapMaterializedValue(_.map(new ServerBinding(_))(ec))) + .mapMaterializedValue(_.map(new ServerBinding(_))(ec).toJava)) /** * Creates a [[Tcp.ServerBinding]] without specifying options. @@ -136,10 +140,10 @@ class Tcp(system: ExtendedActorSystem) extends akka.actor.Extension { * [[akka.stream.scaladsl.RunnableGraph]] the server is not immediately available. Only after the materialized future * completes is the server ready to accept client connections. */ - def bind(interface: String, port: Int): Source[IncomingConnection, Future[ServerBinding]] = + def bind(interface: String, port: Int): Source[IncomingConnection, CompletionStage[ServerBinding]] = Source.fromGraph(delegate.bind(interface, port) .map(new IncomingConnection(_)) - .mapMaterializedValue(_.map(new ServerBinding(_))(ec))) + .mapMaterializedValue(_.map(new ServerBinding(_))(ec).toJava)) /** * Creates an [[Tcp.OutgoingConnection]] instance representing a prospective TCP client connection to the given endpoint. @@ -158,20 +162,20 @@ class Tcp(system: ExtendedActorSystem) extends akka.actor.Extension { * independently whether the server is still attempting to write. */ def outgoingConnection(remoteAddress: InetSocketAddress, - localAddress: Option[InetSocketAddress], + localAddress: Optional[InetSocketAddress], options: JIterable[SocketOption], halfClose: Boolean, connectTimeout: Duration, - idleTimeout: Duration): Flow[ByteString, ByteString, Future[OutgoingConnection]] = - Flow.fromGraph(delegate.outgoingConnection(remoteAddress, localAddress, immutableSeq(options), halfClose, connectTimeout, idleTimeout) - .mapMaterializedValue(_.map(new OutgoingConnection(_))(ec))) + idleTimeout: Duration): Flow[ByteString, ByteString, CompletionStage[OutgoingConnection]] = + Flow.fromGraph(delegate.outgoingConnection(remoteAddress, localAddress.asScala, immutableSeq(options), halfClose, connectTimeout, idleTimeout) + .mapMaterializedValue(_.map(new OutgoingConnection(_))(ec).toJava)) /** * Creates an [[Tcp.OutgoingConnection]] without specifying options. * It represents a prospective TCP client connection to the given endpoint. */ - def outgoingConnection(host: String, port: Int): Flow[ByteString, ByteString, Future[OutgoingConnection]] = + def outgoingConnection(host: String, port: Int): Flow[ByteString, ByteString, CompletionStage[OutgoingConnection]] = Flow.fromGraph(delegate.outgoingConnection(new InetSocketAddress(host, port)) - .mapMaterializedValue(_.map(new OutgoingConnection(_))(ec))) + .mapMaterializedValue(_.map(new OutgoingConnection(_))(ec).toJava)) } diff --git a/akka-stream/src/main/scala/akka/stream/javadsl/package.scala b/akka-stream/src/main/scala/akka/stream/javadsl/package.scala index 6a0e1b1ad1..cef096a023 100644 --- a/akka-stream/src/main/scala/akka/stream/javadsl/package.scala +++ b/akka-stream/src/main/scala/akka/stream/javadsl/package.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/BidiFlow.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/BidiFlow.scala index 9b652997e8..46a5e92f53 100644 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/BidiFlow.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/BidiFlow.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl +import akka.NotUsed import akka.stream._ import akka.stream.impl.StreamLayout.Module import akka.stream.impl.Timers @@ -155,6 +156,11 @@ final class BidiFlow[-I1, +O1, -I2, +O2, +Mat](private[stream] override val modu } object BidiFlow { + private[this] val _identity: BidiFlow[Any, Any, Any, Any, NotUsed] = + BidiFlow.fromFlows(Flow[Any], Flow[Any]) + + def identity[A, B]: BidiFlow[A, A, B, B, NotUsed] = _identity.asInstanceOf[BidiFlow[A, A, B, B, NotUsed]] + /** * A graph with the shape of a flow logically is a flow, this method makes * it so also in type. @@ -211,14 +217,14 @@ object BidiFlow { * */ def fromFlows[I1, O1, I2, O2, M1, M2](flow1: Graph[FlowShape[I1, O1], M1], - flow2: Graph[FlowShape[I2, O2], M2]): BidiFlow[I1, O1, I2, O2, Unit] = + flow2: Graph[FlowShape[I2, O2], M2]): BidiFlow[I1, O1, I2, O2, NotUsed] = fromFlowsMat(flow1, flow2)(Keep.none) /** * Create a BidiFlow where the top and bottom flows are just one simple mapping * stage each, expressed by the two functions. */ - def fromFunctions[I1, O1, I2, O2](outbound: I1 ⇒ O1, inbound: I2 ⇒ O2): BidiFlow[I1, O1, I2, O2, Unit] = + def fromFunctions[I1, O1, I2, O2](outbound: I1 ⇒ O1, inbound: I2 ⇒ O2): BidiFlow[I1, O1, I2, O2, NotUsed] = fromFlows(Flow[I1].map(outbound), Flow[I2].map(inbound)) /** @@ -230,6 +236,6 @@ object BidiFlow { * every second in one direction, but no elements are flowing in the other direction. I.e. this stage considers * the *joint* frequencies of the elements in both directions. */ - def bidirectionalIdleTimeout[I, O](timeout: FiniteDuration): BidiFlow[I, I, O, O, Unit] = + def bidirectionalIdleTimeout[I, O](timeout: FiniteDuration): BidiFlow[I, I, O, O, NotUsed] = fromGraph(new Timers.IdleTimeoutBidi(timeout)) } diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/FileIO.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/FileIO.scala index 82006c58d8..4af4417f7e 100644 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/FileIO.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/FileIO.scala @@ -1,11 +1,12 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.scaladsl import java.io.{ OutputStream, InputStream, File } import akka.stream.ActorAttributes +import akka.stream.io.IOResult import akka.stream.impl.Stages.DefaultAttributes import akka.stream.impl.io._ import akka.util.ByteString @@ -29,24 +30,26 @@ object FileIO { * You can configure the default dispatcher for this Source by changing the `akka.stream.blocking-io-dispatcher` or * set it for a given Source by using [[ActorAttributes]]. * - * It materializes a [[Future]] containing the number of bytes read from the source file upon completion. + * It materializes a [[Future]] of [[IOResult]] containing the number of bytes read from the source file upon completion, + * and a possible exception if IO operation was not completed successfully. * * @param f the File to read from * @param chunkSize the size of each read operation, defaults to 8192 */ - def fromFile(f: File, chunkSize: Int = 8192): Source[ByteString, Future[Long]] = + def fromFile(f: File, chunkSize: Int = 8192): Source[ByteString, Future[IOResult]] = new Source(new FileSource(f, chunkSize, DefaultAttributes.fileSource, sourceShape("FileSource"))) /** * Creates a Sink which writes incoming [[ByteString]] elements to the given file and either overwrites * or appends to it. * - * Materializes a [[Future]] that will be completed with the size of the file (in bytes) at the streams completion. + * Materializes a [[Future]] of [[IOResult]] that will be completed with the size of the file (in bytes) at the streams completion, + * and a possible exception if IO operation was not completed successfully. * * This source is backed by an Actor which will use the dedicated `akka.stream.blocking-io-dispatcher`, * unless configured otherwise by using [[ActorAttributes]]. */ - def toFile(f: File, append: Boolean = false): Sink[ByteString, Future[Long]] = + def toFile(f: File, append: Boolean = false): Sink[ByteString, Future[IOResult]] = new Sink(new FileSink(f, append, DefaultAttributes.fileSink, sinkShape("FileSink"))) -} \ No newline at end of file +} diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/Flow.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/Flow.scala index ef51d07c6d..88e8e0d9dd 100644 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/Flow.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/Flow.scala @@ -1,14 +1,16 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl import akka.event.LoggingAdapter import akka.stream.Attributes._ import akka.stream._ +import akka.Done import akka.stream.impl.Stages.{ DirectProcessor, StageModule } import akka.stream.impl.StreamLayout.{ EmptyModule, Module } import akka.stream.impl._ +import akka.stream.impl.fusing.GraphStages.TerminationWatcher import akka.stream.impl.fusing._ import akka.stream.stage.AbstractStage.{ PushPullGraphStage, PushPullGraphStageWithMaterializedValue } import akka.stream.stage._ @@ -20,6 +22,8 @@ import scala.concurrent.duration.{ Duration, FiniteDuration } import scala.language.higherKinds import akka.stream.impl.fusing.FlattenMerge +import akka.NotUsed + /** * A `Flow` is a set of stream processing steps that has one open input and one open output. */ @@ -41,7 +45,7 @@ final class Flow[-In, +Out, +Mat](private[stream] override val module: Module) override def viaMat[T, Mat2, Mat3](flow: Graph[FlowShape[Out, T], Mat2])(combine: (Mat, Mat2) ⇒ Mat3): Flow[In, T, Mat3] = if (this.isIdentity) { Flow.fromGraph(flow.asInstanceOf[Graph[FlowShape[In, T], Mat2]]) - .mapMaterializedValue(combine(().asInstanceOf[Mat], _)) + .mapMaterializedValue(combine(NotUsed.asInstanceOf[Mat], _)) } else { val flowCopy = flow.module.carbonCopy new Flow( @@ -259,13 +263,13 @@ final class Flow[-In, +Out, +Mat](private[stream] override val module: Module) } object Flow { - private[this] val identity: Flow[Any, Any, Unit] = new Flow[Any, Any, Unit](GraphStages.Identity.module) + private[this] val identity: Flow[Any, Any, NotUsed] = new Flow[Any, Any, NotUsed](GraphStages.Identity.module) /** * Creates a Flow from a Reactive Streams [[org.reactivestreams.Processor]] */ - def fromProcessor[I, O](processorFactory: () ⇒ Processor[I, O]): Flow[I, O, Unit] = { - fromProcessorMat(() ⇒ (processorFactory(), ())) + def fromProcessor[I, O](processorFactory: () ⇒ Processor[I, O]): Flow[I, O, NotUsed] = { + fromProcessorMat(() ⇒ (processorFactory(), NotUsed)) } /** @@ -279,7 +283,7 @@ object Flow { * Helper to create `Flow` without a [[Source]] or a [[Sink]]. * Example usage: `Flow[Int]` */ - def apply[T]: Flow[T, T, Unit] = identity.asInstanceOf[Flow[T, T, Unit]] + def apply[T]: Flow[T, T, NotUsed] = identity.asInstanceOf[Flow[T, T, NotUsed]] /** * A graph with the shape of a flow logically is a flow, this method makes @@ -295,7 +299,7 @@ object Flow { /** * Helper to create `Flow` from a `Sink`and a `Source`. */ - def fromSinkAndSource[I, O](sink: Graph[SinkShape[I], _], source: Graph[SourceShape[O], _]): Flow[I, O, Unit] = + def fromSinkAndSource[I, O](sink: Graph[SinkShape[I], _], source: Graph[SourceShape[O], _]): Flow[I, O, NotUsed] = fromSinkAndSourceMat(sink, source)(Keep.none) /** @@ -534,7 +538,7 @@ trait FlowOps[+Out, +Mat] { * '''Cancels when''' downstream cancels */ def filterNot(p: Out ⇒ Boolean): Repr[Out] = - via(Flow[Out].filter(!p(_)).withAttributes(name("filterNot"))) + via(Flow[Out].filter(!p(_)).withAttributes(DefaultAttributes.filterNot)) /** * Terminate processing (and cancel the upstream publisher) after predicate @@ -704,9 +708,28 @@ trait FlowOps[+Out, +Mat] { * '''Completes when''' upstream completes * * '''Cancels when''' downstream cancels + * + * See also [[FlowOps.scan]] */ def fold[T](zero: T)(f: (T, Out) ⇒ T): Repr[T] = andThen(Fold(zero, f)) + /** + * Similar to `fold` but uses first element as zero element. + * Applies the given function towards its current and next value, + * yielding the next current value. + * + * '''Emits when''' upstream completes + * + * '''Backpressures when''' downstream backpressures + * + * '''Completes when''' upstream completes + * + * '''Cancels when''' downstream cancels + * + * See also [[FlowOps.fold]] + */ + def reduce[T >: Out](f: (T, T) ⇒ T): Repr[T] = via(new Reduce[T](f)) + /** * Intersperses stream with provided element, similar to how [[scala.collection.immutable.List.mkString]] * injects a separator between a List's elements. @@ -738,12 +761,8 @@ trait FlowOps[+Out, +Mat] { * * '''Cancels when''' downstream cancels */ - def intersperse[T >: Out](start: T, inject: T, end: T): Repr[T] = { - ReactiveStreamsCompliance.requireNonNullElement(start) - ReactiveStreamsCompliance.requireNonNullElement(inject) - ReactiveStreamsCompliance.requireNonNullElement(end) + def intersperse[T >: Out](start: T, inject: T, end: T): Repr[T] = via(Intersperse(Some(start), inject, Some(end))) - } /** * Intersperses stream with provided element, similar to how [[scala.collection.immutable.List.mkString]] @@ -767,10 +786,8 @@ trait FlowOps[+Out, +Mat] { * * '''Cancels when''' downstream cancels */ - def intersperse[T >: Out](inject: T): Repr[T] = { - ReactiveStreamsCompliance.requireNonNullElement(inject) + def intersperse[T >: Out](inject: T): Repr[T] = via(Intersperse(None, inject, None)) - } /** * Chunk up this stream into groups of elements received within a time window, @@ -790,11 +807,8 @@ trait FlowOps[+Out, +Mat] { * * '''Cancels when''' downstream completes */ - def groupedWithin(n: Int, d: FiniteDuration): Repr[immutable.Seq[Out]] = { - require(n > 0, "n must be greater than 0") - require(d > Duration.Zero) - via(new GroupedWithin[Out](n, d).withAttributes(name("groupedWithin"))) - } + def groupedWithin(n: Int, d: FiniteDuration): Repr[immutable.Seq[Out]] = + via(new GroupedWithin[Out](n, d)) /** * Shifts elements emission in time by a specified amount. It allows to store elements @@ -822,7 +836,7 @@ trait FlowOps[+Out, +Mat] { * @param strategy Strategy that is used when incoming elements cannot fit inside the buffer */ def delay(of: FiniteDuration, strategy: DelayOverflowStrategy = DelayOverflowStrategy.dropTail): Repr[Out] = - via(new Delay[Out](of, strategy).withAttributes(name("delay"))) + via(new Delay[Out](of, strategy)) /** * Discard the given number of elements at the beginning of the stream. @@ -850,7 +864,7 @@ trait FlowOps[+Out, +Mat] { * '''Cancels when''' downstream cancels */ def dropWithin(d: FiniteDuration): Repr[Out] = - via(new DropWithin[Out](d).withAttributes(name("dropWithin"))) + via(new DropWithin[Out](d)) /** * Terminate processing (and cancel the upstream publisher) after the given @@ -890,7 +904,7 @@ trait FlowOps[+Out, +Mat] { * * '''Cancels when''' downstream cancels or timer fires */ - def takeWithin(d: FiniteDuration): Repr[Out] = via(new TakeWithin[Out](d).withAttributes(name("takeWithin"))) + def takeWithin(d: FiniteDuration): Repr[Out] = via(new TakeWithin[Out](d)) /** * Allows a faster upstream to progress independently of a slower subscriber by conflating elements into a summary @@ -911,9 +925,67 @@ trait FlowOps[+Out, +Mat] { * @param seed Provides the first state for a conflated value using the first unconsumed element as a start * @param aggregate Takes the currently aggregated value and the current pending element to produce a new aggregate * - * See also [[FlowOps.limit]], [[FlowOps.limitWeighted]] + * See also [[FlowOps.limit]], [[FlowOps.limitWeighted]] [[FlowOps.batch]] [[FlowOps.batchWeighted]] */ def conflate[S](seed: Out ⇒ S)(aggregate: (S, Out) ⇒ S): Repr[S] = andThen(Conflate(seed, aggregate)) + //FIXME: conflate can be expressed as a batch + //via(Batch(1L, ConstantFun.zeroLong, seed, aggregate).withAttributes(DefaultAttributes.conflate)) + + /** + * Allows a faster upstream to progress independently of a slower subscriber by aggregating elements into batches + * until the subscriber is ready to accept them. For example a batch step might store received elements in + * an array up to the allowed max limit if the upstream publisher is faster. + * + * This element only rolls up elements if the upstream is faster, but if the downstream is faster it will not + * duplicate elements. + * + * '''Emits when''' downstream stops backpressuring and there is an aggregated element available + * + * '''Backpressures when''' there are `max` batched elements and 1 pending element and downstream backpressures + * + * '''Completes when''' upstream completes and there is no batched/pending element waiting + * + * '''Cancels when''' downstream cancels + * + * See also [[FlowOps.conflate]], [[FlowOps.batchWeighted]] + * + * @param max maximum number of elements to batch before backpressuring upstream (must be positive non-zero) + * @param seed Provides the first state for a batched value using the first unconsumed element as a start + * @param aggregate Takes the currently batched value and the current pending element to produce a new aggregate + */ + def batch[S](max: Long, seed: Out ⇒ S)(aggregate: (S, Out) ⇒ S): Repr[S] = + via(Batch(max, ConstantFun.oneLong, seed, aggregate).withAttributes(DefaultAttributes.batch)) + + /** + * Allows a faster upstream to progress independently of a slower subscriber by aggregating elements into batches + * until the subscriber is ready to accept them. For example a batch step might concatenate `ByteString` + * elements up to the allowed max limit if the upstream publisher is faster. + * + * This element only rolls up elements if the upstream is faster, but if the downstream is faster it will not + * duplicate elements. + * + * Batching will apply for all elements, even if a single element cost is greater than the total allowed limit. + * In this case, previous batched elements will be emitted, then the "heavy" element will be emitted (after + * being applied with the `seed` function) without batching further elements with it, and then the rest of the + * incoming elements are batched. + * + * '''Emits when''' downstream stops backpressuring and there is a batched element available + * + * '''Backpressures when''' there are `max` weighted batched elements + 1 pending element and downstream backpressures + * + * '''Completes when''' upstream completes and there is no batched/pending element waiting + * + * '''Cancels when''' downstream cancels + * + * See also [[FlowOps.conflate]], [[FlowOps.batch]] + * + * @param max maximum weight of elements to batch before backpressuring upstream (must be positive non-zero) + * @param costFn a function to compute a single element weight + * @param seed Provides the first state for a batched value using the first unconsumed element as a start + * @param aggregate Takes the currently batched value and the current pending element to produce a new batch + */ + def batchWeighted[S](max: Long, costFn: Out ⇒ Long, seed: Out ⇒ S)(aggregate: (S, Out) ⇒ S): Repr[S] = + via(Batch(max, costFn, seed, aggregate).withAttributes(DefaultAttributes.batchWeighted)) /** * Allows a faster downstream to progress independently of a slower publisher by extrapolating elements from an older @@ -929,7 +1001,7 @@ trait FlowOps[+Out, +Mat] { * * '''Emits when''' downstream stops backpressuring * - * '''Backpressures when''' downstream backpressures + * '''Backpressures when''' downstream backpressures or iterator runs emtpy * * '''Completes when''' upstream completes * @@ -939,7 +1011,7 @@ trait FlowOps[+Out, +Mat] { * @param extrapolate Takes the current extrapolation state to produce an output element and the next extrapolation * state. */ - def expand[S, U](seed: Out ⇒ S)(extrapolate: S ⇒ (U, S)): Repr[U] = andThen(Expand(seed, extrapolate)) + def expand[U](extrapolate: Out ⇒ Iterator[U]): Repr[U] = via(new Expand(extrapolate)) /** * Adds a fixed size buffer in the flow that allows to store elements from a faster upstream until it becomes full. @@ -991,7 +1063,7 @@ trait FlowOps[+Out, +Mat] { * * '''Cancels when''' downstream cancels or substream cancels */ - def prefixAndTail[U >: Out](n: Int): Repr[(immutable.Seq[Out], Source[U, Unit])] = + def prefixAndTail[U >: Out](n: Int): Repr[(immutable.Seq[Out], Source[U, NotUsed])] = via(new PrefixAndTail[Out](n)) /** @@ -1036,13 +1108,13 @@ trait FlowOps[+Out, +Mat] { def groupBy[K](maxSubstreams: Int, f: Out ⇒ K): SubFlow[Out, Mat, Repr, Closed] = { implicit def mat = GraphInterpreter.currentInterpreter.materializer val merge = new SubFlowImpl.MergeBack[Out, Repr] { - override def apply[T](flow: Flow[Out, T, Unit], breadth: Int): Repr[T] = - deprecatedAndThen[Source[Out, Unit]](GroupBy(maxSubstreams, f.asInstanceOf[Any ⇒ Any])) + override def apply[T](flow: Flow[Out, T, NotUsed], breadth: Int): Repr[T] = + deprecatedAndThen[Source[Out, NotUsed]](GroupBy(maxSubstreams, f.asInstanceOf[Any ⇒ Any])) .map(_.via(flow)) .via(new FlattenMerge(breadth)) } - val finish: (Sink[Out, Unit]) ⇒ Closed = s ⇒ - deprecatedAndThen[Source[Out, Unit]](GroupBy(maxSubstreams, f.asInstanceOf[Any ⇒ Any])) + val finish: (Sink[Out, NotUsed]) ⇒ Closed = s ⇒ + deprecatedAndThen[Source[Out, NotUsed]](GroupBy(maxSubstreams, f.asInstanceOf[Any ⇒ Any])) .to(Sink.foreach(_.runWith(s))) new SubFlowImpl(Flow[Out], merge, finish) } @@ -1102,12 +1174,12 @@ trait FlowOps[+Out, +Mat] { */ def splitWhen(p: Out ⇒ Boolean): SubFlow[Out, Mat, Repr, Closed] = { val merge = new SubFlowImpl.MergeBack[Out, Repr] { - override def apply[T](flow: Flow[Out, T, Unit], breadth: Int): Repr[T] = + override def apply[T](flow: Flow[Out, T, NotUsed], breadth: Int): Repr[T] = via(Split.when(p)) .map(_.via(flow)) .via(new FlattenMerge(breadth)) } - val finish: (Sink[Out, Unit]) ⇒ Closed = s ⇒ + val finish: (Sink[Out, NotUsed]) ⇒ Closed = s ⇒ via(Split.when(p)) .to(Sink.foreach(_.runWith(s)(GraphInterpreter.currentInterpreter.materializer))) new SubFlowImpl(Flow[Out], merge, finish) @@ -1159,12 +1231,12 @@ trait FlowOps[+Out, +Mat] { */ def splitAfter(p: Out ⇒ Boolean): SubFlow[Out, Mat, Repr, Closed] = { val merge = new SubFlowImpl.MergeBack[Out, Repr] { - override def apply[T](flow: Flow[Out, T, Unit], breadth: Int): Repr[T] = + override def apply[T](flow: Flow[Out, T, NotUsed], breadth: Int): Repr[T] = via(Split.after(p)) .map(_.via(flow)) .via(new FlattenMerge(breadth)) } - val finish: (Sink[Out, Unit]) ⇒ Closed = s ⇒ + val finish: (Sink[Out, NotUsed]) ⇒ Closed = s ⇒ via(Split.after(p)) .to(Sink.foreach(_.runWith(s)(GraphInterpreter.currentInterpreter.materializer))) new SubFlowImpl(Flow[Out], merge, finish) @@ -1285,13 +1357,8 @@ trait FlowOps[+Out, +Mat] { * * '''Cancels when''' downstream cancels */ - def throttle(elements: Int, per: FiniteDuration, maximumBurst: Int, - mode: ThrottleMode): Repr[Out] = { - require(elements > 0, "elements must be > 0") - require(per.toMillis > 0, "per time must be > 0") - require(!(mode == ThrottleMode.Enforcing && maximumBurst < 0), "maximumBurst must be > 0 in Enforcing mode") - via(new Throttle(elements, per, maximumBurst, _ ⇒ 1, mode)) - } + def throttle(elements: Int, per: FiniteDuration, maximumBurst: Int, mode: ThrottleMode): Repr[Out] = + throttle(elements, per, maximumBurst, _ ⇒ 1, mode) /** * Sends elements downstream with speed limited to `cost/per`. Cost is @@ -1320,11 +1387,8 @@ trait FlowOps[+Out, +Mat] { * '''Cancels when''' downstream cancels */ def throttle(cost: Int, per: FiniteDuration, maximumBurst: Int, - costCalculation: (Out) ⇒ Int, mode: ThrottleMode): Repr[Out] = { - require(per.toMillis > 0, "per time must be > 0") - require(!(mode == ThrottleMode.Enforcing && maximumBurst < 0), "maximumBurst must be > 0 in Enforcing mode") + costCalculation: (Out) ⇒ Int, mode: ThrottleMode): Repr[Out] = via(new Throttle(cost, per, maximumBurst, costCalculation, mode)) - } /** * Detaches upstream demand from downstream demand without detaching the @@ -1789,10 +1853,19 @@ trait FlowOpsMat[+Out, +Mat] extends FlowOps[Out, Mat] { def alsoToMat[Mat2, Mat3](that: Graph[SinkShape[Out], Mat2])(matF: (Mat, Mat2) ⇒ Mat3): ReprMat[Out, Mat3] = viaMat(alsoToGraph(that))(matF) + /** + * Materializes to `Future[Done]` that completes on getting termination message. + * The Future completes with success when received complete message from upstream or cancel + * from downstream. It fails with the same error when received error message from + * downstream. + */ + def watchTermination[Mat2]()(matF: (Mat, Future[Done]) ⇒ Mat2): ReprMat[Out, Mat2] = + viaMat(GraphStages.terminationWatcher)(matF) + /** * INTERNAL API. */ private[akka] def transformMaterializing[T, M](mkStageAndMaterialized: () ⇒ (Stage[Out, T], M)): ReprMat[T, M] = - viaMat(new PushPullGraphStageWithMaterializedValue[Out, T, Unit, M]((attr) ⇒ mkStageAndMaterialized(), Attributes.none))(Keep.right) + viaMat(new PushPullGraphStageWithMaterializedValue[Out, T, NotUsed, M]((attr) ⇒ mkStageAndMaterialized(), Attributes.none))(Keep.right) } diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/Graph.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/Graph.scala index 71767d94fb..7cb117b495 100644 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/Graph.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/Graph.scala @@ -1,17 +1,19 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl -import akka.stream.impl.Stages.{ StageModule, SymbolicStage } -import akka.stream.impl._ -import akka.stream.impl.StreamLayout._ +import akka.NotUsed import akka.stream._ +import akka.stream.impl._ +import akka.stream.impl.fusing.GraphStages +import akka.stream.impl.fusing.GraphStages.MaterializedValueSource +import akka.stream.impl.Stages.{ DefaultAttributes, StageModule, SymbolicStage } +import akka.stream.impl.StreamLayout._ import akka.stream.stage.{ OutHandler, InHandler, GraphStageLogic, GraphStage } import scala.annotation.unchecked.uncheckedVariance import scala.annotation.tailrec import scala.collection.immutable -import akka.stream.impl.fusing.GraphStages.MaterializedValueSource object Merge { /** @@ -37,11 +39,12 @@ object Merge { * '''Cancels when''' downstream cancels */ final class Merge[T] private (val inputPorts: Int, val eagerComplete: Boolean) extends GraphStage[UniformFanInShape[T, T]] { - require(inputPorts > 1, "A Merge must have more than 1 input port") + // one input might seem counter intuitive but saves us from special handling in other places + require(inputPorts >= 1, "A Merge must have one or more input ports") val in: immutable.IndexedSeq[Inlet[T]] = Vector.tabulate(inputPorts)(i ⇒ Inlet[T]("Merge.in" + i)) val out: Outlet[T] = Outlet[T]("Merge.out") - override def initialAttributes = Attributes.name("Merge") + override def initialAttributes = DefaultAttributes.merge override val shape: UniformFanInShape[T, T] = UniformFanInShape(out, in: _*) override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { @@ -144,7 +147,7 @@ object MergePreferred { final class MergePreferred[T] private (val secondaryPorts: Int, val eagerComplete: Boolean) extends GraphStage[MergePreferred.MergePreferredShape[T]] { require(secondaryPorts >= 1, "A MergePreferred must have more than 0 secondary input ports") - override def initialAttributes = Attributes.name("MergePreferred") + override def initialAttributes = DefaultAttributes.mergePreferred override val shape: MergePreferred.MergePreferredShape[T] = new MergePreferred.MergePreferredShape(secondaryPorts, "MergePreferred") @@ -159,16 +162,12 @@ final class MergePreferred[T] private (val secondaryPorts: Int, val eagerComplet if (eagerComplete || openInputs == 0) completeStage() } - setHandler(out, new OutHandler { - private var first = true - override def onPull(): Unit = { - if (first) { - first = false - tryPull(preferred) - shape.inSeq.foreach(tryPull) - } - } - }) + override def preStart(): Unit = { + tryPull(preferred) + shape.inSeq.foreach(tryPull) + } + + setHandler(out, eagerTerminateOutput) val pullMe = Array.tabulate(secondaryPorts)(i ⇒ { val port = in(i) @@ -240,8 +239,8 @@ object Interleave { * @param segmentSize number of elements to send downstream before switching to next input port * @param eagerClose if true, interleave completes upstream if any of its upstream completes. */ - def apply[T](inputPorts: Int, segmentSize: Int, eagerClose: Boolean = false): Interleave[T] = - new Interleave(inputPorts, segmentSize, eagerClose) + def apply[T](inputPorts: Int, segmentSize: Int, eagerClose: Boolean = false): Graph[UniformFanInShape[T, T], NotUsed] = + GraphStages.withDetachedInputs(new Interleave[T](inputPorts, segmentSize, eagerClose)) } /** @@ -397,10 +396,11 @@ object Broadcast { * */ final class Broadcast[T](private val outputPorts: Int, eagerCancel: Boolean) extends GraphStage[UniformFanOutShape[T, T]] { - require(outputPorts > 1, "A Broadcast must have more than 1 output ports") + // one output might seem counter intuitive but saves us from special handling in other places + require(outputPorts >= 1, "A Broadcast must have one or more output ports") val in: Inlet[T] = Inlet[T]("Broadast.in") val out: immutable.IndexedSeq[Outlet[T]] = Vector.tabulate(outputPorts)(i ⇒ Outlet[T]("Broadcast.out" + i)) - override def initialAttributes = Attributes.name("Broadcast") + override def initialAttributes = DefaultAttributes.broadcast override val shape: UniformFanOutShape[T, T] = UniformFanOutShape(in, out: _*) override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { @@ -467,6 +467,109 @@ final class Broadcast[T](private val outputPorts: Int, eagerCancel: Boolean) ext } +object Partition { + + /** + * Create a new `Partition` stage with the specified input type. + * + * @param outputPorts number of output ports + * @param partitioner function deciding which output each element will be targeted + */ + def apply[T](outputPorts: Int, partitioner: T ⇒ Int): Partition[T] = new Partition(outputPorts, partitioner) +} + +/** + * Fan-out the stream to several streams. emitting an incoming upstream element to one downstream consumer according + * to the partitioner function applied to the element + * + * '''Emits when''' emits when an element is available from the input and the chosen output has demand + * + * '''Backpressures when''' the currently chosen output back-pressures + * + * '''Completes when''' upstream completes and no output is pending + * + * '''Cancels when''' + * when all downstreams cancel + */ + +final class Partition[T](outputPorts: Int, partitioner: T ⇒ Int) extends GraphStage[UniformFanOutShape[T, T]] { + + val in: Inlet[T] = Inlet[T]("Partition.in") + val out: Seq[Outlet[T]] = Seq.tabulate(outputPorts)(i ⇒ Outlet[T]("Partition.out" + i)) + override val shape: UniformFanOutShape[T, T] = UniformFanOutShape[T, T](in, out: _*) + + override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { + private var outPendingElem: Any = null + private var outPendingIdx: Int = _ + private var downstreamRunning = outputPorts + + setHandler(in, new InHandler { + override def onPush() = { + val elem = grab(in) + val idx = partitioner(elem) + if (idx < 0 || idx >= outputPorts) + failStage(new IndexOutOfBoundsException(s"partitioner must return an index in the range [0,${outputPorts - 1}]. returned: [$idx] for input [$elem].")) + else if (!isClosed(out(idx))) { + if (isAvailable(out(idx))) { + push(out(idx), elem) + if (out.exists(isAvailable(_))) + pull(in) + } else { + outPendingElem = elem + outPendingIdx = idx + } + + } else if (out.exists(isAvailable(_))) + pull(in) + } + + override def onUpstreamFinish(): Unit = { + if (outPendingElem == null) + completeStage() + } + }) + + out.zipWithIndex.foreach { + case (o, idx) ⇒ + setHandler(o, new OutHandler { + override def onPull() = { + + if (outPendingElem != null) { + val elem = outPendingElem.asInstanceOf[T] + if (idx == outPendingIdx) { + push(o, elem) + outPendingElem = null + if (!isClosed(in)) { + if (!hasBeenPulled(in)) { + pull(in) + } + } else + completeStage() + } + } else if (!hasBeenPulled(in)) + pull(in) + } + + override def onDownstreamFinish(): Unit = { + downstreamRunning -= 1 + if (downstreamRunning == 0) + completeStage() + else if (outPendingElem != null) { + if (idx == outPendingIdx) { + outPendingElem = null + if (!hasBeenPulled(in)) + pull(in) + } + } + } + }) + } + } + + override def toString = s"Partition($outputPorts)" + +} + object Balance { /** * Create a new `Balance` with the specified number of output ports. @@ -496,10 +599,11 @@ object Balance { * '''Cancels when''' all downstreams cancel */ final class Balance[T](val outputPorts: Int, waitForAllDownstreams: Boolean) extends GraphStage[UniformFanOutShape[T, T]] { - require(outputPorts > 1, "A Balance must have more than 1 output ports") + // one output might seem counter intuitive but saves us from special handling in other places + require(outputPorts >= 1, "A Balance must have one or more output ports") val in: Inlet[T] = Inlet[T]("Balance.in") val out: immutable.IndexedSeq[Outlet[T]] = Vector.tabulate(outputPorts)(i ⇒ Outlet[T]("Balance.out" + i)) - override def initialAttributes = Attributes.name("Balance") + override def initialAttributes = DefaultAttributes.balance override val shape: UniformFanOutShape[T, T] = UniformFanOutShape[T, T](in, out: _*) override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { @@ -644,7 +748,8 @@ object Concat { /** * Create a new `Concat`. */ - def apply[T](inputPorts: Int = 2): Concat[T] = new Concat(inputPorts) + def apply[T](inputPorts: Int = 2): Graph[UniformFanInShape[T, T], NotUsed] = + GraphStages.withDetachedInputs(new Concat[T](inputPorts)) } /** @@ -666,7 +771,7 @@ final class Concat[T](inputPorts: Int) extends GraphStage[UniformFanInShape[T, T require(inputPorts > 1, "A Concat must have more than 1 input ports") val in: immutable.IndexedSeq[Inlet[T]] = Vector.tabulate(inputPorts)(i ⇒ Inlet[T]("Concat.in" + i)) val out: Outlet[T] = Outlet[T]("Concat.out") - override def initialAttributes = Attributes.name("Concat") + override def initialAttributes = DefaultAttributes.concat override val shape: UniformFanInShape[T, T] = UniformFanInShape(out, in: _*) override def createLogic(inheritedAttributes: Attributes) = new GraphStageLogic(shape) { @@ -893,7 +998,7 @@ object GraphDSL extends GraphApply { // Although Mat is always Unit, it cannot be removed as a type parameter, otherwise the "override type" // won't work below - trait PortOps[+Out] extends FlowOps[Out, Unit] with CombinerBase[Out] { + trait PortOps[+Out] extends FlowOps[Out, NotUsed] with CombinerBase[Out] { override type Repr[+O] = PortOps[O] override type Closed = Unit def outlet: Outlet[Out @uncheckedVariance] @@ -921,8 +1026,9 @@ object GraphDSL extends GraphApply { new PortOpsImpl(op.shape.out.asInstanceOf[Outlet[U]], b) } - def to[Mat2](sink: Graph[SinkShape[Out], Mat2]): Closed = + def to[Mat2](sink: Graph[SinkShape[Out], Mat2]): Closed = { super.~>(sink)(b) + } } private class DisabledPortOps[Out](msg: String) extends PortOpsImpl[Out](null, null) { diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/ImplicitMaterializer.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/ImplicitMaterializer.scala deleted file mode 100644 index 5b53cc02bf..0000000000 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/ImplicitMaterializer.scala +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Copyright (C) 2014 Typesafe Inc. - */ -package akka.stream.scaladsl - -import akka.actor.Actor -import akka.stream.ActorMaterializerSettings -import akka.stream.ActorMaterializer - -/** - * Mix this trait into your [[akka.actor.Actor]] if you need an implicit - * [[akka.stream.Materializer]] in scope. - * - * Subclass may override [[#materializerSettings]] to define custom - * [[akka.stream.ActorMaterializerSettings]] for the `Materializer`. - */ -trait ImplicitMaterializer { this: Actor ⇒ - - /** - * Subclass may override this to define custom - * [[akka.stream.ActorMaterializerSettings]] for the `Materializer`. - */ - def materializerSettings: ActorMaterializerSettings = ActorMaterializerSettings(context.system) - - final implicit val materializer: ActorMaterializer = ActorMaterializer(Some(materializerSettings)) -} diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/Materialization.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/Materialization.scala index 873d59e687..eb6fa0eaca 100644 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/Materialization.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/Materialization.scala @@ -1,8 +1,10 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl +import akka.NotUsed + /** * Convenience functions for often-encountered purposes like keeping only the * left (first) or only the right (second) of two input values. @@ -11,10 +13,10 @@ object Keep { private val _left = (l: Any, r: Any) ⇒ l private val _right = (l: Any, r: Any) ⇒ r private val _both = (l: Any, r: Any) ⇒ (l, r) - private val _none = (l: Any, r: Any) ⇒ () + private val _none = (l: Any, r: Any) ⇒ NotUsed def left[L, R]: (L, R) ⇒ L = _left.asInstanceOf[(L, R) ⇒ L] def right[L, R]: (L, R) ⇒ R = _right.asInstanceOf[(L, R) ⇒ R] def both[L, R]: (L, R) ⇒ (L, R) = _both.asInstanceOf[(L, R) ⇒ (L, R)] - def none[L, R]: (L, R) ⇒ Unit = _none.asInstanceOf[(L, R) ⇒ Unit] + def none[L, R]: (L, R) ⇒ NotUsed = _none.asInstanceOf[(L, R) ⇒ NotUsed] } diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/One2OneBidiFlow.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/One2OneBidiFlow.scala index a198d39389..d78e1dba00 100644 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/One2OneBidiFlow.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/One2OneBidiFlow.scala @@ -1,8 +1,10 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl +import akka.NotUsed + import scala.util.control.NoStackTrace import akka.stream._ import akka.stream.stage.{ OutHandler, InHandler, GraphStageLogic, GraphStage } @@ -23,7 +25,7 @@ object One2OneBidiFlow { * 3. Backpressures the input side if the maximum number of pending output elements has been reached, * which is given via the ``maxPending`` parameter. You can use -1 to disable this feature. */ - def apply[I, O](maxPending: Int): BidiFlow[I, I, O, O, Unit] = + def apply[I, O](maxPending: Int): BidiFlow[I, I, O, O, NotUsed] = BidiFlow.fromGraph(new One2OneBidi[I, O](maxPending)) class One2OneBidi[I, O](maxPending: Int) extends GraphStage[BidiShape[I, I, O, O]] { diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/Queue.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/Queue.scala new file mode 100644 index 0000000000..207b6f1147 --- /dev/null +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/Queue.scala @@ -0,0 +1,47 @@ +/** + * Copyright (C) 2015-2016 Typesafe Inc. + */ +package akka.stream.scaladsl + +import scala.concurrent.Future +import akka.Done +import akka.stream.QueueOfferResult + +/** + * This trait allows to have the queue as a data source for some stream. + */ +trait SourceQueue[T] { + + /** + * Method offers next element to a stream and returns future that: + * - completes with `Enqueued` if element is consumed by a stream + * - completes with `Dropped` when stream dropped offered element + * - completes with `QueueClosed` when stream is completed during future is active + * - completes with `Failure(f)` when failure to enqueue element from upstream + * - fails when stream is completed or you cannot call offer in this moment because of implementation rules + * (like for backpressure mode and full buffer you need to wait for last offer call Future completion) + * + * @param elem element to send to a stream + */ + def offer(elem: T): Future[QueueOfferResult] + + /** + * Method returns future that completes when stream is completed and fails when stream failed + */ + def watchCompletion(): Future[Done] +} + +/** + * Trait allows to have the queue as a sink for some stream. + * "SinkQueue" pulls data from stream with backpressure mechanism. + */ +trait SinkQueue[T] { + + /** + * Method pulls elements from stream and returns future that: + * - fails if stream is failed + * - completes with None in case if stream is completed + * - completes with `Some(element)` in case next element is available from stream. + */ + def pull(): Future[Option[T]] +} diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/Sink.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/Sink.scala index 862f32b882..ef4d2d9658 100644 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/Sink.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/Sink.scala @@ -1,9 +1,10 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl import java.io.{ InputStream, OutputStream, File } +import akka.{ Done, NotUsed } import akka.dispatch.ExecutionContexts import akka.actor.{ Status, ActorRef, Props } import akka.stream.actor.ActorSubscriber @@ -15,7 +16,6 @@ import akka.stream.stage.{ Context, PushStage, SyncDirective, TerminationDirecti import akka.stream.{ javadsl, _ } import akka.util.ByteString import org.reactivestreams.{ Publisher, Subscriber } - import scala.annotation.tailrec import scala.concurrent.duration.{ FiniteDuration, _ } import scala.concurrent.{ ExecutionContext, Future } @@ -87,14 +87,14 @@ object Sink { /** * Helper to create [[Sink]] from `Subscriber`. */ - def fromSubscriber[T](subscriber: Subscriber[T]): Sink[T, Unit] = + def fromSubscriber[T](subscriber: Subscriber[T]): Sink[T, NotUsed] = new Sink(new SubscriberSink(subscriber, DefaultAttributes.subscriberSink, shape("SubscriberSink"))) /** * A `Sink` that immediately cancels its upstream after materialization. */ - def cancelled[T]: Sink[T, Unit] = - new Sink[Any, Unit](new CancelSink(DefaultAttributes.cancelledSink, shape("CancelledSink"))) + def cancelled[T]: Sink[T, NotUsed] = + new Sink[Any, NotUsed](new CancelSink(DefaultAttributes.cancelledSink, shape("CancelledSink"))) /** * A `Sink` that materializes into a `Future` of the first value received. @@ -141,6 +141,8 @@ object Sink { * As upstream may be unbounded, `Flow[T].take` or the stricter `Flow[T].limit` (and their variants) * may be used to ensure boundedness. * Materializes into a `Future` of `Seq[T]` containing all the collected elements. + * `Seq` is limited to `Int.MaxValue` elements, this Sink will cancel the stream + * after having received that many elements. * * See also [[Flow.limit]], [[Flow.limitWeighted]], [[Flow.take]], [[Flow.takeWithin]], [[Flow.takeWhile]] */ @@ -169,7 +171,7 @@ object Sink { /** * A `Sink` that will consume the stream and discard the elements. */ - def ignore: Sink[Any, Future[Unit]] = + def ignore: Sink[Any, Future[Done]] = new Sink(new SinkholeSink(DefaultAttributes.ignoreSink, shape("SinkholeSink"))) /** @@ -178,13 +180,13 @@ object Sink { * normal end of the stream, or completed with `Failure` if there is a failure signaled in * the stream.. */ - def foreach[T](f: T ⇒ Unit): Sink[T, Future[Unit]] = + def foreach[T](f: T ⇒ Unit): Sink[T, Future[Done]] = Flow[T].map(f).toMat(Sink.ignore)(Keep.right).named("foreachSink") /** * Combine several sinks with fun-out strategy like `Broadcast` or `Balance` and returns `Sink`. */ - def combine[T, U](first: Sink[U, _], second: Sink[U, _], rest: Sink[U, _]*)(strategy: Int ⇒ Graph[UniformFanOutShape[T, U], Unit]): Sink[T, Unit] = + def combine[T, U](first: Sink[U, _], second: Sink[U, _], rest: Sink[U, _]*)(strategy: Int ⇒ Graph[UniformFanOutShape[T, U], NotUsed]): Sink[T, NotUsed] = Sink.fromGraph(GraphDSL.create() { implicit b ⇒ import GraphDSL.Implicits._ @@ -214,7 +216,7 @@ object Sink { * * @see [[#mapAsyncUnordered]] */ - def foreachParallel[T](parallelism: Int)(f: T ⇒ Unit)(implicit ec: ExecutionContext): Sink[T, Future[Unit]] = + def foreachParallel[T](parallelism: Int)(f: T ⇒ Unit)(implicit ec: ExecutionContext): Sink[T, Future[Done]] = Flow[T].mapAsyncUnordered(parallelism)(t ⇒ Future(f(t))).toMat(Sink.ignore)(Keep.right) /** @@ -227,24 +229,34 @@ object Sink { def fold[U, T](zero: U)(f: (U, T) ⇒ U): Sink[T, Future[U]] = Flow[T].fold(zero)(f).toMat(Sink.head)(Keep.right).named("foldSink") + /** + * A `Sink` that will invoke the given function for every received element, giving it its previous + * output (from the second element) and the element as input. + * The returned [[scala.concurrent.Future]] will be completed with value of the final + * function evaluation when the input stream ends, or completed with `Failure` + * if there is a failure signaled in the stream. + */ + def reduce[T](f: (T, T) ⇒ T): Sink[T, Future[T]] = + Flow[T].reduce(f).toMat(Sink.head)(Keep.right).named("reduceSink") + /** * A `Sink` that when the flow is completed, either through a failure or normal * completion, apply the provided function with [[scala.util.Success]] * or [[scala.util.Failure]]. */ - def onComplete[T](callback: Try[Unit] ⇒ Unit): Sink[T, Unit] = { + def onComplete[T](callback: Try[Done] ⇒ Unit): Sink[T, NotUsed] = { - def newOnCompleteStage(): PushStage[T, Unit] = { - new PushStage[T, Unit] { - override def onPush(elem: T, ctx: Context[Unit]): SyncDirective = ctx.pull() + def newOnCompleteStage(): PushStage[T, NotUsed] = { + new PushStage[T, NotUsed] { + override def onPush(elem: T, ctx: Context[NotUsed]): SyncDirective = ctx.pull() - override def onUpstreamFailure(cause: Throwable, ctx: Context[Unit]): TerminationDirective = { + override def onUpstreamFailure(cause: Throwable, ctx: Context[NotUsed]): TerminationDirective = { callback(Failure(cause)) ctx.fail(cause) } - override def onUpstreamFinish(ctx: Context[Unit]): TerminationDirective = { - callback(Success[Unit](())) + override def onUpstreamFinish(ctx: Context[NotUsed]): TerminationDirective = { + callback(Success(Done)) ctx.finish() } } @@ -268,7 +280,7 @@ object Sink { * to use a bounded mailbox with zero `mailbox-push-timeout-time` or use a rate * limiting stage in front of this `Sink`. */ - def actorRef[T](ref: ActorRef, onCompleteMessage: Any): Sink[T, Unit] = + def actorRef[T](ref: ActorRef, onCompleteMessage: Any): Sink[T, NotUsed] = new Sink(new ActorRefSink(ref, onCompleteMessage, DefaultAttributes.actorRefSink, shape("ActorRefSink"))) /** @@ -285,7 +297,7 @@ object Sink { * function will be sent to the destination actor. */ def actorRefWithAck[T](ref: ActorRef, onInitMessage: Any, ackMessage: Any, onCompleteMessage: Any, - onFailureMessage: (Throwable) ⇒ Any = Status.Failure): Sink[T, Unit] = + onFailureMessage: (Throwable) ⇒ Any = Status.Failure): Sink[T, NotUsed] = Sink.fromGraph(new ActorRefBackpressureSinkStage(ref, onInitMessage, ackMessage, onCompleteMessage, onFailureMessage)) /** diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/Source.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/Source.scala index 27ee565474..ceb1fc01c3 100644 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/Source.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/Source.scala @@ -1,9 +1,10 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.scaladsl import java.io.{ OutputStream, InputStream, File } +import akka.{ Done, NotUsed } import akka.actor.{ ActorRef, Cancellable, Props } import akka.stream.actor.ActorPublisher import akka.stream.impl.Stages.{ DefaultAttributes, StageModule } @@ -22,6 +23,8 @@ import scala.collection.immutable import scala.concurrent.duration.{ FiniteDuration, _ } import scala.concurrent.{ Future, Promise } import akka.stream.impl.fusing.Buffer +import java.util.concurrent.CompletionStage +import scala.compat.java8.FutureConverters._ /** * A `Source` is a set of stream processing steps that has one open output. It can comprise @@ -100,6 +103,17 @@ final class Source[+Out, +Mat](private[stream] override val module: Module) def runFold[U](zero: U)(f: (U, Out) ⇒ U)(implicit materializer: Materializer): Future[U] = runWith(Sink.fold(zero)(f)) + /** + * Shortcut for running this `Source` with a reduce function. + * The given function is invoked for every received element, giving it its previous + * output (from the second element) and the element as input. + * The returned [[scala.concurrent.Future]] will be completed with value of the final + * function evaluation when the input stream ends, or completed with `Failure` + * if there is a failure signaled in the stream. + */ + def runReduce[U >: Out](f: (U, U) ⇒ U)(implicit materializer: Materializer): Future[U] = + runWith(Sink.reduce(f)) + /** * Shortcut for running this `Source` with a foreach procedure. The given procedure is invoked * for each received element. @@ -107,7 +121,8 @@ final class Source[+Out, +Mat](private[stream] override val module: Module) * normal end of the stream, or completed with `Failure` if there is a failure signaled in * the stream. */ - def runForeach(f: Out ⇒ Unit)(implicit materializer: Materializer): Future[Unit] = runWith(Sink.foreach(f)) + // FIXME: Out => Unit should stay, right?? + def runForeach(f: Out ⇒ Unit)(implicit materializer: Materializer): Future[Done] = runWith(Sink.foreach(f)) /** * Change the attributes of this [[Source]] to the given ones and seal the list @@ -138,7 +153,7 @@ final class Source[+Out, +Mat](private[stream] override val module: Module) /** * Combines several sources with fun-in strategy like `Merge` or `Concat` and returns `Source`. */ - def combine[T, U](first: Source[T, _], second: Source[T, _], rest: Source[T, _]*)(strategy: Int ⇒ Graph[UniformFanInShape[T, U], Unit]): Source[U, Unit] = + def combine[T, U](first: Source[T, _], second: Source[T, _], rest: Source[T, _]*)(strategy: Int ⇒ Graph[UniformFanInShape[T, U], NotUsed]): Source[U, NotUsed] = Source.fromGraph(GraphDSL.create() { implicit b ⇒ import GraphDSL.Implicits._ val c = b.add(strategy(rest.size + 2)) @@ -167,7 +182,7 @@ object Source { * that mediate the flow of elements downstream and the propagation of * back-pressure upstream. */ - def fromPublisher[T](publisher: Publisher[T]): Source[T, Unit] = + def fromPublisher[T](publisher: Publisher[T]): Source[T, NotUsed] = new Source(new PublisherSource(publisher, DefaultAttributes.publisherSource, shape("PublisherSource"))) /** @@ -180,7 +195,7 @@ object Source { * Elements are pulled out of the iterator in accordance with the demand coming * from the downstream transformation steps. */ - def fromIterator[T](f: () ⇒ Iterator[T]): Source[T, Unit] = + def fromIterator[T](f: () ⇒ Iterator[T]): Source[T, NotUsed] = apply(new immutable.Iterable[T] { override def iterator: Iterator[T] = f() override def toString: String = "() => Iterator" @@ -205,7 +220,7 @@ object Source { * stream will see an individual flow of elements (always starting from the * beginning) regardless of when they subscribed. */ - def apply[T](iterable: immutable.Iterable[T]): Source[T, Unit] = + def apply[T](iterable: immutable.Iterable[T]): Source[T, NotUsed] = single(iterable).mapConcat(ConstantFun.scalaIdentityFunction).withAttributes(DefaultAttributes.iterableSource) /** @@ -214,9 +229,18 @@ object Source { * may happen before or after materializing the `Flow`. * The stream terminates with a failure if the `Future` is completed with a failure. */ - def fromFuture[T](future: Future[T]): Source[T, Unit] = + def fromFuture[T](future: Future[T]): Source[T, NotUsed] = fromGraph(new FutureSource(future)) + /** + * Start a new `Source` from the given `Future`. The stream will consist of + * one element when the `Future` is completed with a successful value, which + * may happen before or after materializing the `Flow`. + * The stream terminates with a failure if the `Future` is completed with a failure. + */ + def fromCompletionStage[T](future: CompletionStage[T]): Source[T, NotUsed] = + fromGraph(new FutureSource(future.toScala)) + /** * Elements are emitted periodically with the specified interval. * The tick element will be delivered to downstream consumers that has requested any elements. @@ -225,26 +249,22 @@ object Source { * receive new tick elements as soon as it has requested more elements. */ def tick[T](initialDelay: FiniteDuration, interval: FiniteDuration, tick: T): Source[T, Cancellable] = - fromGraph(new TickSource[T](initialDelay, interval, tick).withAttributes(DefaultAttributes.tickSource)) + fromGraph(new TickSource[T](initialDelay, interval, tick)) /** * Create a `Source` with one element. * Every connected `Sink` of this stream will see an individual stream consisting of one element. */ - def single[T](element: T): Source[T, Unit] = - fromGraph(new GraphStages.SingleSource(element).withAttributes(DefaultAttributes.singleSource)) + def single[T](element: T): Source[T, NotUsed] = + fromGraph(new GraphStages.SingleSource(element)) /** * Create a `Source` that will continually emit the given element. */ - def repeat[T](element: T): Source[T, Unit] = - single(new immutable.Iterable[T] { - override val iterator: Iterator[T] = Iterator.continually(element) - - override def toString: String = "repeat(" + element + ")" - }) - .mapConcat(ConstantFun.scalaIdentityFunction) - .withAttributes(DefaultAttributes.repeat) + def repeat[T](element: T): Source[T, NotUsed] = { + val next = Some((element, element)) + unfold(element)(_ ⇒ next).withAttributes(DefaultAttributes.repeat) + } /** * Create a `Source` that will unfold a value of type `S` into @@ -259,8 +279,8 @@ object Source { * } * }}} */ - def unfold[S, E](s: S)(f: S ⇒ Option[(S, E)]): Source[E, Unit] = - Source.fromGraph(new Unfold(s, f)).withAttributes(DefaultAttributes.unfold) + def unfold[S, E](s: S)(f: S ⇒ Option[(S, E)]): Source[E, NotUsed] = + Source.fromGraph(new Unfold(s, f)) /** * Same as [[unfold]], but uses an async function to generate the next state-element tuple. @@ -277,38 +297,14 @@ object Source { * } * }}} */ - def unfoldAsync[S, E](s: S)(f: S ⇒ Future[Option[(S, E)]]): Source[E, Unit] = - Source.fromGraph(new UnfoldAsync(s, f)).withAttributes(DefaultAttributes.unfoldAsync) - - /** - * Simpler [[unfold]], for infinite sequences. - * - * {{{ - * Source.unfoldInf(0 → 1) { - * case (a, b) ⇒ (b → (a + b)) → a - * } - * }}} - */ - def unfoldInf[S, E](s: S)(f: S ⇒ (S, E)): Source[E, Unit] = { - Source.fromGraph(GraphDSL.create() { implicit b ⇒ - import GraphDSL.Implicits._ - - val uzip = b.add(UnzipWith(f)) - val cnct = b.add(Concat[S]()) - val init = Source.single(s) - - init ~> cnct ~> uzip.in - cnct <~ Flow[S].buffer(2, OverflowStrategy.backpressure) <~ uzip.out0 - - SourceShape(uzip.out1) - }).withAttributes(DefaultAttributes.unfoldInf) - } + def unfoldAsync[S, E](s: S)(f: S ⇒ Future[Option[(S, E)]]): Source[E, NotUsed] = + Source.fromGraph(new UnfoldAsync(s, f)) /** * A `Source` with no elements, i.e. an empty stream that is completed immediately for every connected `Sink`. */ - def empty[T]: Source[T, Unit] = _empty - private[this] val _empty: Source[Nothing, Unit] = + def empty[T]: Source[T, NotUsed] = _empty + private[this] val _empty: Source[Nothing, NotUsed] = new Source( new PublisherSource[Nothing]( EmptyPublisher, @@ -332,7 +328,7 @@ object Source { /** * Create a `Source` that immediately ends the stream with the `cause` error to every connected `Sink`. */ - def failed[T](cause: Throwable): Source[T, Unit] = + def failed[T](cause: Throwable): Source[T, NotUsed] = new Source( new PublisherSource( ErrorPublisher(cause, "FailedSource")[T], @@ -387,14 +383,14 @@ object Source { */ def actorRef[T](bufferSize: Int, overflowStrategy: OverflowStrategy): Source[T, ActorRef] = { require(bufferSize >= 0, "bufferSize must be greater than or equal to 0") - require(overflowStrategy != OverflowStrategy.Backpressure, "Backpressure overflowStrategy not supported") + require(overflowStrategy != OverflowStrategies.Backpressure, "Backpressure overflowStrategy not supported") new Source(new ActorRefSource(bufferSize, overflowStrategy, DefaultAttributes.actorRefSource, shape("ActorRefSource"))) } /** * Combines several sources with fun-in strategy like `Merge` or `Concat` and returns `Source`. */ - def combine[T, U](first: Source[T, _], second: Source[T, _], rest: Source[T, _]*)(strategy: Int ⇒ Graph[UniformFanInShape[T, U], Unit]): Source[U, Unit] = + def combine[T, U](first: Source[T, _], second: Source[T, _], rest: Source[T, _]*)(strategy: Int ⇒ Graph[UniformFanInShape[T, U], NotUsed]): Source[U, NotUsed] = Source.fromGraph(GraphDSL.create() { implicit b ⇒ import GraphDSL.Implicits._ val c = b.add(strategy(rest.size + 2)) @@ -413,29 +409,33 @@ object Source { /** * Creates a `Source` that is materialized as an [[akka.stream.SourceQueue]]. * You can push elements to the queue and they will be emitted to the stream if there is demand from downstream, - * otherwise they will be buffered until request for demand is received. + * otherwise they will be buffered until request for demand is received. Elements in the buffer will be discarded + * if downstream is terminated. * * Depending on the defined [[akka.stream.OverflowStrategy]] it might drop elements if * there is no space available in the buffer. * * Acknowledgement mechanism is available. - * [[akka.stream.SourceQueue.offer]] returns ``Future[Boolean]`` which completes with true - * if element was added to buffer or sent downstream. It completes - * with false if element was dropped. + * [[akka.stream.SourceQueue.offer]] returns ``Future[StreamCallbackStatus[Boolean]]`` which completes with `Success(true)` + * if element was added to buffer or sent downstream. It completes with `Success(false)` if element was dropped. Can also complete + * with [[akka.stream.StreamCallbackStatus.Failure]] - when stream failed or [[akka.stream.StreamCallbackStatus.StreamCompleted]] + * when downstream is completed. * - * The strategy [[akka.stream.OverflowStrategy.backpressure]] will not complete `offer():Future` until buffer is full. + * The strategy [[akka.stream.OverflowStrategy.backpressure]] will not complete last `offer():Future` + * call when buffer is full. * - * The buffer can be disabled by using `bufferSize` of 0 and then received messages are dropped - * if there is no demand from downstream. When `bufferSize` is 0 the `overflowStrategy` does - * not matter. + * You can watch accessibility of stream with [[akka.stream.SourceQueue.watchCompletion]]. + * It returns future that completes with success when stream is completed or fail when stream is failed. * - * @param bufferSize The size of the buffer in element count + * The buffer can be disabled by using `bufferSize` of 0 and then received message will wait for downstream demand. + * When `bufferSize` is 0 the `overflowStrategy` does not matter. + * + * SourceQueue that current source is materialized to is for single thread usage only. + * + * @param bufferSize size of buffer in element count * @param overflowStrategy Strategy that is used when incoming elements cannot fit inside the buffer - * @param timeout Timeout for ``SourceQueue.offer(T):Future[Boolean]`` */ - def queue[T](bufferSize: Int, overflowStrategy: OverflowStrategy, timeout: FiniteDuration = 5.seconds): Source[T, SourceQueue[T]] = { - require(bufferSize >= 0, "bufferSize must be greater than or equal to 0") - new Source(new AcknowledgeSource(bufferSize, overflowStrategy, DefaultAttributes.acknowledgeSource, shape("AcknowledgeSource"))) - } + def queue[T](bufferSize: Int, overflowStrategy: OverflowStrategy): Source[T, SourceQueue[T]] = + Source.fromGraph(new QueueSource(bufferSize, overflowStrategy).withAttributes(DefaultAttributes.queueSource)) } diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/StreamConverters.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/StreamConverters.scala index 32f5c34476..130235913b 100644 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/StreamConverters.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/StreamConverters.scala @@ -1,11 +1,12 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.scaladsl import java.io.{ OutputStream, InputStream } import akka.stream.ActorAttributes +import akka.stream.io.IOResult import akka.stream.impl.Stages.DefaultAttributes import akka.stream.impl.io.{ InputStreamSinkStage, OutputStreamSink, OutputStreamSourceStage, InputStreamSource } import akka.util.ByteString @@ -29,12 +30,13 @@ object StreamConverters { * You can configure the default dispatcher for this Source by changing the `akka.stream.blocking-io-dispatcher` or * set it for a given Source by using [[ActorAttributes]]. * - * It materializes a [[Future]] containing the number of bytes read from the source file upon completion. + * It materializes a [[Future]] of [[IOResult]] containing the number of bytes read from the source file upon completion, + * and a possible exception if IO operation was not completed successfully. * * @param in a function which creates the InputStream to read from * @param chunkSize the size of each read operation, defaults to 8192 */ - def fromInputStream(in: () ⇒ InputStream, chunkSize: Int = 8192): Source[ByteString, Future[Long]] = + def fromInputStream(in: () ⇒ InputStream, chunkSize: Int = 8192): Source[ByteString, Future[IOResult]] = new Source(new InputStreamSource(in, chunkSize, DefaultAttributes.inputStreamSource, sourceShape("InputStreamSource"))) /** @@ -54,13 +56,15 @@ object StreamConverters { /** * Creates a Sink which writes incoming [[ByteString]]s to an [[OutputStream]] created by the given function. * - * Materializes a [[Future]] that will be completed with the size of the file (in bytes) at the streams completion. + * Materializes a [[Future]] of [[IOResult]] that will be completed with the size of the file (in bytes) at the streams completion, + * and a possible exception if IO operation was not completed successfully. * * You can configure the default dispatcher for this Source by changing the `akka.stream.blocking-io-dispatcher` or * set it for a given Source by using [[ActorAttributes]]. + * If `autoFlush` is true the OutputStream will be flushed whenever a byte array is written, defaults to false. */ - def fromOutputStream(out: () ⇒ OutputStream): Sink[ByteString, Future[Long]] = - new Sink(new OutputStreamSink(out, DefaultAttributes.outputStreamSink, sinkShape("OutputStreamSink"))) + def fromOutputStream(out: () ⇒ OutputStream, autoFlush: Boolean = false): Sink[ByteString, Future[IOResult]] = + new Sink(new OutputStreamSink(out, DefaultAttributes.outputStreamSink, sinkShape("OutputStreamSink"), autoFlush)) /** * Creates a Sink which when materialized will return an [[InputStream]] which it is possible diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/SubFlow.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/SubFlow.scala index 7ae0a29ced..f83fbd3f76 100644 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/SubFlow.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/SubFlow.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.scaladsl diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/Tcp.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/Tcp.scala index 79726c97f1..517fc0f4ad 100644 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/Tcp.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/Tcp.scala @@ -1,10 +1,11 @@ /** - * Copyright (C) 2009-2014 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.stream.scaladsl import java.net.InetSocketAddress +import akka.NotUsed import akka.actor._ import akka.io.Inet.SocketOption import akka.io.{ IO, Tcp ⇒ IoTcp } @@ -32,7 +33,7 @@ object Tcp extends ExtensionId[Tcp] with ExtensionIdProvider { case class IncomingConnection( localAddress: InetSocketAddress, remoteAddress: InetSocketAddress, - flow: Flow[ByteString, ByteString, Unit]) { + flow: Flow[ByteString, ByteString, NotUsed]) { /** * Handles the connection using the given flow, which is materialized exactly once and the respective diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/package.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/package.scala index f76ea0b218..7a25923eda 100644 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/package.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/package.scala @@ -1,8 +1,13 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream +import java.util.concurrent.CompletionStage +import scala.concurrent.Future +import scala.compat.java8.FutureConverters +import akka.japi.function + /** * Scala API: The flow DSL allows the formulation of stream transformations based on some * input. The starting point is called [[Source]] and can be a collection, an iterator, @@ -47,9 +52,14 @@ package akka.stream * by those methods that materialize the Flow into a series of * [[org.reactivestreams.Processor]] instances. The returned reactive stream * is fully started and active. - * - * Use [[ImplicitMaterializer]] to define an implicit [[akka.stream.Materializer]] - * inside an [[akka.actor.Actor]]. */ package object scaladsl { + implicit class SourceToCompletionStage[Out, T](val src: Source[Out, Future[T]]) extends AnyVal { + def toCompletionStage(): Source[Out, CompletionStage[T]] = + src.mapMaterializedValue(FutureConverters.toJava) + } + implicit class SinkToCompletionStage[In, T](val sink: Sink[In, Future[T]]) extends AnyVal { + def toCompletionStage(): Sink[In, CompletionStage[T]] = + sink.mapMaterializedValue(FutureConverters.toJava) + } } diff --git a/akka-stream/src/main/scala/akka/stream/stage/GraphStage.scala b/akka-stream/src/main/scala/akka/stream/stage/GraphStage.scala index 89ad8b7d83..1a7063d5fb 100644 --- a/akka-stream/src/main/scala/akka/stream/stage/GraphStage.scala +++ b/akka-stream/src/main/scala/akka/stream/stage/GraphStage.scala @@ -1,24 +1,23 @@ /** - * Copyright (C) 2015 Typesafe Inc. + * Copyright (C) 2015-2016 Typesafe Inc. */ package akka.stream.stage -import java.util import java.util.concurrent.atomic.{ AtomicReferenceFieldUpdater, AtomicReference } +import akka.NotUsed +import java.util.concurrent.locks.ReentrantLock import akka.actor._ import akka.dispatch.sysmsg.{ DeathWatchNotification, SystemMessage, Unwatch, Watch } import akka.event.LoggingAdapter import akka.japi.function.{ Effect, Procedure } import akka.stream._ import akka.stream.impl.StreamLayout.Module -import akka.stream.impl.fusing.GraphInterpreter.GraphAssembly -import akka.stream.impl.fusing.{ GraphInterpreter, GraphModule, GraphStageModule, SubSource, SubSink } +import akka.stream.impl.fusing.{ GraphInterpreter, GraphStageModule, SubSource, SubSink } import akka.stream.impl.{ ReactiveStreamsCompliance, SeqActorName } import scala.annotation.tailrec import scala.collection.mutable.ArrayBuffer import scala.collection.{ immutable, mutable } import scala.concurrent.duration.FiniteDuration -import akka.stream.impl.SubscriptionTimeoutException import akka.stream.actor.ActorSubscriberMessage import akka.stream.actor.ActorPublisherMessage @@ -43,9 +42,9 @@ abstract class GraphStageWithMaterializedValue[+S <: Shape, +M] extends Graph[S, * its input and output ports and a factory function that creates a [[GraphStageLogic]] which implements the processing * logic that ties the ports together. */ -abstract class GraphStage[S <: Shape] extends GraphStageWithMaterializedValue[S, Unit] { - final override def createLogicAndMaterializedValue(inheritedAttributes: Attributes) = - (createLogic(inheritedAttributes), Unit) +abstract class GraphStage[S <: Shape] extends GraphStageWithMaterializedValue[S, NotUsed] { + final override def createLogicAndMaterializedValue(inheritedAttributes: Attributes): (GraphStageLogic, NotUsed) = + (createLogic(inheritedAttributes), NotUsed) def createLogic(inheritedAttributes: Attributes): GraphStageLogic } @@ -128,14 +127,34 @@ object GraphStageLogic { /** * Minimal actor to work with other actors and watch them in a synchronous ways */ - final class StageActorRef(val provider: ActorRefProvider, val log: LoggingAdapter, - getAsyncCallback: StageActorRef.Receive ⇒ AsyncCallback[(ActorRef, Any)], - initialReceive: StageActorRef.Receive, - override val path: ActorPath) extends akka.actor.MinimalActorRef { - import StageActorRef._ + final class StageActor(materializer: ActorMaterializer, + getAsyncCallback: StageActorRef.Receive ⇒ AsyncCallback[(ActorRef, Any)], + initialReceive: StageActorRef.Receive) { private val callback = getAsyncCallback(internalReceive) + private val functionRef: FunctionRef = { + val cell = materializer.supervisor match { + case ref: LocalActorRef ⇒ ref.underlying + case ref: RepointableActorRef if ref.isStarted ⇒ ref.underlying.asInstanceOf[ActorCell] + case unknown ⇒ + throw new IllegalStateException(s"Stream supervisor must be a local actor, was [${unknown.getClass.getName}]") + } + cell.addFunctionRef { + case (_, m @ (PoisonPill | Kill)) ⇒ + materializer.logger.warning("{} message sent to StageActor({}) will be ignored, since it is not a real Actor." + + "Use a custom message type to communicate with it instead.", m, functionRef.path) + case pair ⇒ callback.invoke(pair) + } + } + + /** + * The ActorRef by which this StageActor can be contacted from the outside. + * This is a full-fledged ActorRef that supports watching and being watched + * as well as location transparent (remote) communication. + */ + def ref: ActorRef = functionRef + @volatile private[this] var behaviour = initialReceive @@ -143,32 +162,14 @@ object GraphStageLogic { private[akka] def internalReceive(pack: (ActorRef, Any)): Unit = { pack._2 match { case Terminated(ref) ⇒ - if (watching contains ref) { - watching -= ref + if (functionRef.isWatching(ref)) { + functionRef.unwatch(ref) behaviour(pack) } case _ ⇒ behaviour(pack) } } - override def !(message: Any)(implicit sender: ActorRef = Actor.noSender): Unit = { - message match { - case m @ (PoisonPill | Kill) ⇒ - log.warning("{} message sent to StageActorRef({}) will be ignored, since it is not a real Actor." + - "Use a custom message type to communicate with it instead.", m, path) - case _ ⇒ - callback.invoke((sender, message)) - } - } - - override def sendSystemMessage(message: SystemMessage): Unit = message match { - case w: Watch ⇒ addWatcher(w.watchee, w.watcher) - case u: Unwatch ⇒ remWatcher(u.watchee, u.watcher) - case DeathWatchNotification(actorRef, _, _) ⇒ - this.!(Terminated(actorRef)(existenceConfirmed = true, addressTerminated = false)) - case _ ⇒ //ignore all other messages - } - /** * Special `become` allowing to swap the behaviour of this StageActorRef. * Unbecome is not available. @@ -177,92 +178,14 @@ object GraphStageLogic { behaviour = receive } - private[this] var watching = ActorCell.emptyActorRefSet - private[this] val _watchedBy = new AtomicReference[Set[ActorRef]](ActorCell.emptyActorRefSet) + def stop(): Unit = functionRef.stop() - override def isTerminated = _watchedBy.get() == StageTerminatedTombstone + def watch(actorRef: ActorRef): Unit = functionRef.watch(actorRef) - //noinspection EmptyCheck - protected def sendTerminated(): Unit = { - val watchedBy = _watchedBy.getAndSet(StageTerminatedTombstone) - if (watchedBy != StageTerminatedTombstone) { - if (watchedBy.nonEmpty) { - watchedBy foreach sendTerminated(ifLocal = false) - watchedBy foreach sendTerminated(ifLocal = true) - } - if (watching.nonEmpty) { - watching foreach unwatchWatched - watching = Set.empty - } - } - } - - private def sendTerminated(ifLocal: Boolean)(watcher: ActorRef): Unit = - if (watcher.asInstanceOf[ActorRefScope].isLocal == ifLocal) - watcher.asInstanceOf[InternalActorRef].sendSystemMessage(DeathWatchNotification(this, existenceConfirmed = true, addressTerminated = false)) - - private def unwatchWatched(watched: ActorRef): Unit = - watched.asInstanceOf[InternalActorRef].sendSystemMessage(Unwatch(watched, this)) - - override def stop(): Unit = sendTerminated() - - @tailrec final def addWatcher(watchee: ActorRef, watcher: ActorRef): Unit = - _watchedBy.get() match { - case StageTerminatedTombstone ⇒ - sendTerminated(ifLocal = true)(watcher) - sendTerminated(ifLocal = false)(watcher) - - case watchedBy ⇒ - val watcheeSelf = watchee == this - val watcherSelf = watcher == this - - if (watcheeSelf && !watcherSelf) { - if (!watchedBy.contains(watcher)) - if (!_watchedBy.compareAndSet(watchedBy, watchedBy + watcher)) - addWatcher(watchee, watcher) // try again - } else if (!watcheeSelf && watcherSelf) { - log.warning("externally triggered watch from {} to {} is illegal on StageActorRef", watcher, watchee) - } else { - log.error("BUG: illegal Watch(%s,%s) for %s".format(watchee, watcher, this)) - } - } - - @tailrec final def remWatcher(watchee: ActorRef, watcher: ActorRef): Unit = { - _watchedBy.get() match { - case StageTerminatedTombstone ⇒ // do nothing... - case watchedBy ⇒ - val watcheeSelf = watchee == this - val watcherSelf = watcher == this - - if (watcheeSelf && !watcherSelf) { - if (watchedBy.contains(watcher)) - if (!_watchedBy.compareAndSet(watchedBy, watchedBy - watcher)) - remWatcher(watchee, watcher) // try again - } else if (!watcheeSelf && watcherSelf) { - log.warning("externally triggered unwatch from {} to {} is illegal on StageActorRef", watcher, watchee) - } else { - log.error("BUG: illegal Unwatch(%s,%s) for %s".format(watchee, watcher, this)) - } - } - } - - def watch(actorRef: ActorRef): Unit = { - watching += actorRef - actorRef.asInstanceOf[InternalActorRef].sendSystemMessage(Watch(actorRef.asInstanceOf[InternalActorRef], this)) - } - - def unwatch(actorRef: ActorRef): Unit = { - watching -= actorRef - actorRef.asInstanceOf[InternalActorRef].sendSystemMessage(Unwatch(actorRef.asInstanceOf[InternalActorRef], this)) - } + def unwatch(actorRef: ActorRef): Unit = functionRef.unwatch(actorRef) } object StageActorRef { type Receive = ((ActorRef, Any)) ⇒ Unit - - val StageTerminatedTombstone = null - - // globally sequential, one should not depend on these names in any case - val name = SeqActorName("StageActorRef") } } @@ -375,6 +298,14 @@ abstract class GraphStageLogic private[stream] (val inCount: Int, val outCount: if (_interpreter != null) _interpreter.setHandler(conn(in), handler) } + /** + * Assign callbacks for linear stage for both [[Inlet]] and [[Outlet]] + */ + final protected def setHandlers(in: Inlet[_], out: Outlet[_], handler: InHandler with OutHandler): Unit = { + setHandler(in, handler) + setHandler(out, handler) + } + /** * Retrieves the current callback for the events on the given [[Inlet]] */ @@ -766,7 +697,7 @@ abstract class GraphStageLogic private[stream] (val inCount: Int, val outCount: } else { setOrAddEmitting(out, new EmittingIterator(out, elems, getNonEmittingHandler(out), andThen)) } - } + } else andThen() /** * Emit a sequence of elements through the given outlet, suspending execution if necessary. @@ -950,8 +881,8 @@ abstract class GraphStageLogic private[stream] (val inCount: Int, val outCount: final protected def createAsyncCallback[T](handler: Procedure[T]): AsyncCallback[T] = getAsyncCallback(handler.apply) - private var _stageActorRef: StageActorRef = _ - final def stageActorRef: ActorRef = _stageActorRef match { + private var _stageActor: StageActor = _ + final def stageActor: StageActor = _stageActor match { case null ⇒ throw StageActorRefNotInitializedException() case ref ⇒ ref } @@ -974,14 +905,12 @@ abstract class GraphStageLogic private[stream] (val inCount: Int, val outCount: * @return minimal actor with watch method */ // FIXME: I don't like the Pair allocation :( - final protected def getStageActorRef(receive: ((ActorRef, Any)) ⇒ Unit): StageActorRef = { - _stageActorRef match { + final protected def getStageActor(receive: ((ActorRef, Any)) ⇒ Unit): StageActor = { + _stageActor match { case null ⇒ val actorMaterializer = ActorMaterializer.downcast(interpreter.materializer) - val provider = actorMaterializer.supervisor.asInstanceOf[InternalActorRef].provider - val path = actorMaterializer.supervisor.path / StageActorRef.name.next() - _stageActorRef = new StageActorRef(provider, actorMaterializer.logger, getAsyncCallback, receive, path) - _stageActorRef + _stageActor = new StageActor(actorMaterializer, getAsyncCallback, receive) + _stageActor case existing ⇒ existing.become(receive) existing @@ -995,9 +924,9 @@ abstract class GraphStageLogic private[stream] (val inCount: Int, val outCount: // Internal hooks to avoid reliance on user calling super in postStop /** INTERNAL API */ protected[stream] def afterPostStop(): Unit = { - if (_stageActorRef ne null) { - _stageActorRef.stop() - _stageActorRef = null + if (_stageActor ne null) { + _stageActor.stop() + _stageActor = null } } @@ -1042,7 +971,7 @@ abstract class GraphStageLogic private[stream] (val inCount: Int, val outCount: } }.invoke _) - def sink: Graph[SinkShape[T], Unit] = _sink + def sink: Graph[SinkShape[T], NotUsed] = _sink def setHandler(handler: InHandler): Unit = this.handler = handler @@ -1091,13 +1020,13 @@ abstract class GraphStageLogic private[stream] (val inCount: Int, val outCount: private var available = false private var closed = false - private val callback = getAsyncCallback[ActorPublisherMessage] { + private val callback = getAsyncCallback[SubSink.Command] { case SubSink.RequestOne ⇒ if (!closed) { available = true handler.onPull() } - case ActorPublisherMessage.Cancel ⇒ + case SubSink.Cancel ⇒ if (!closed) { available = false closed = true @@ -1116,7 +1045,7 @@ abstract class GraphStageLogic private[stream] (val inCount: Int, val outCount: /** * Get the Source for this dynamic output port. */ - def source: Graph[SourceShape[T], Unit] = _source + def source: Graph[SourceShape[T], NotUsed] = _source /** * Set OutHandler for this dynamic output port; this needs to be done before @@ -1342,3 +1271,55 @@ abstract class AbstractOutHandler extends OutHandler * (completing when upstream completes, failing when upstream fails, completing when downstream cancels). */ abstract class AbstractInOutHandler extends InHandler with OutHandler + +/** + * INTERNAL API + * This trait wraps callback for `GraphStage` stage instances and handle gracefully cases when stage is + * not yet initialized or already finished. + * + * While `GraphStage` has not initialized it adds all requests to list. + * As soon as `GraphStage` is started it stops collecting requests (pointing to real callback + * function) and run all the callbacks from the list + * + * Supposed to be used by GraphStages that share call back to outer world + */ +private[akka] trait CallbackWrapper[T] extends AsyncCallback[T] { + private trait CallbackState + private case class NotInitialized(list: List[T]) extends CallbackState + private case class Initialized(f: T ⇒ Unit) extends CallbackState + private case class Stopped(f: T ⇒ Unit) extends CallbackState + + /* + * To preserve message order when switching between not initialized / initialized states + * lock is used. Case is similar to RepointableActorRef + */ + private[this] final val lock = new ReentrantLock + + private[this] val callbackState = new AtomicReference[CallbackState](NotInitialized(Nil)) + + def stopCallback(f: T ⇒ Unit): Unit = locked { + callbackState.set(Stopped(f)) + } + + def initCallback(f: T ⇒ Unit): Unit = locked { + val list = (callbackState.getAndSet(Initialized(f)): @unchecked) match { + case NotInitialized(l) ⇒ l + } + list.reverse.foreach(f) + } + + override def invoke(arg: T): Unit = locked { + callbackState.get() match { + case Initialized(cb) ⇒ cb(arg) + case list @ NotInitialized(l) ⇒ callbackState.compareAndSet(list, NotInitialized(arg :: l)) + case Stopped(cb) ⇒ + lock.unlock() + cb(arg) + } + } + + private[this] def locked(body: ⇒ Unit): Unit = { + lock.lock() + try body finally if (lock.isLocked) lock.unlock() + } +} \ No newline at end of file diff --git a/akka-stream/src/main/scala/akka/stream/stage/Stage.scala b/akka-stream/src/main/scala/akka/stream/stage/Stage.scala index bc589c86de..4c8cb2c913 100644 --- a/akka-stream/src/main/scala/akka/stream/stage/Stage.scala +++ b/akka-stream/src/main/scala/akka/stream/stage/Stage.scala @@ -1,8 +1,9 @@ /** - * Copyright (C) 2014 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.stream.stage +import akka.NotUsed import akka.stream._ import scala.annotation.unchecked.uncheckedVariance @@ -28,6 +29,7 @@ import scala.util.control.NonFatal * @see [[akka.stream.scaladsl.Flow#transform]] * @see [[akka.stream.javadsl.Flow#transform]] */ +@deprecated("Please use GraphStage instead.", "2.4.2") sealed trait Stage[-In, +Out] /** @@ -181,9 +183,10 @@ private[stream] object AbstractStage { } class PushPullGraphStage[-In, +Out, Ext](_factory: (Attributes) ⇒ Stage[In, Out], _stageAttributes: Attributes) - extends PushPullGraphStageWithMaterializedValue[In, Out, Ext, Unit]((att: Attributes) ⇒ (_factory(att), ()), _stageAttributes) + extends PushPullGraphStageWithMaterializedValue[In, Out, Ext, NotUsed]((att: Attributes) ⇒ (_factory(att), NotUsed), _stageAttributes) } +@deprecated("Please use GraphStage instead.", "2.4.2") abstract class AbstractStage[-In, Out, PushD <: Directive, PullD <: Directive, Ctx <: Context[Out], LifeCtx <: LifecycleContext] extends Stage[In, Out] { /** @@ -330,11 +333,13 @@ abstract class AbstractStage[-In, Out, PushD <: Directive, PullD <: Directive, C * @see [[StatefulStage]] * @see [[PushStage]] */ +@deprecated("Please use GraphStage instead.", "2.4.2") abstract class PushPullStage[In, Out] extends AbstractStage[In, Out, SyncDirective, SyncDirective, Context[Out], LifecycleContext] /** * `PushStage` is a [[PushPullStage]] that always perform transitive pull by calling `ctx.pull` from `onPull`. */ +@deprecated("Please use GraphStage instead.", "2.4.2") abstract class PushStage[In, Out] extends PushPullStage[In, Out] { /** * Always pulls from upstream. @@ -364,6 +369,7 @@ abstract class PushStage[In, Out] extends PushPullStage[In, Out] { * * @see [[PushPullStage]] */ +@deprecated("Please use GraphStage instead.", "2.4.2") abstract class DetachedStage[In, Out] extends AbstractStage[In, Out, UpstreamDirective, DownstreamDirective, DetachedContext[Out], LifecycleContext] { private[stream] override def isDetached = true diff --git a/akka-testkit/src/main/java/akka/testkit/JavaTestKit.java b/akka-testkit/src/main/java/akka/testkit/JavaTestKit.java index 3a67cc855b..ecdbb29a5f 100644 --- a/akka-testkit/src/main/java/akka/testkit/JavaTestKit.java +++ b/akka-testkit/src/main/java/akka/testkit/JavaTestKit.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit; @@ -558,7 +558,7 @@ public class JavaTestKit { this(clazz, max, Duration.Inf(), messages); } - @SuppressWarnings("unchecked") + @SuppressWarnings("all") public ReceiveWhile(Class clazz, Duration max, Duration idle, int messages) { results = p.receiveWhile(max, idle, messages, new CachingPartialFunction() { public T match(Object msg) throws Exception { diff --git a/akka-testkit/src/main/scala/akka/testkit/CallingThreadDispatcher.scala b/akka-testkit/src/main/scala/akka/testkit/CallingThreadDispatcher.scala index 66a2f33e39..f97b929f1c 100644 --- a/akka-testkit/src/main/scala/akka/testkit/CallingThreadDispatcher.scala +++ b/akka-testkit/src/main/scala/akka/testkit/CallingThreadDispatcher.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/main/scala/akka/testkit/SocketUtil.scala b/akka-testkit/src/main/scala/akka/testkit/SocketUtil.scala index 115565f0a7..5b12b1f571 100644 --- a/akka-testkit/src/main/scala/akka/testkit/SocketUtil.scala +++ b/akka-testkit/src/main/scala/akka/testkit/SocketUtil.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/main/scala/akka/testkit/TestActorRef.scala b/akka-testkit/src/main/scala/akka/testkit/TestActorRef.scala index 82c9696499..b4dbd39a92 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestActorRef.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestActorRef.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/main/scala/akka/testkit/TestActors.scala b/akka-testkit/src/main/scala/akka/testkit/TestActors.scala index e824f9c2b7..e063e5fbe5 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestActors.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestActors.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/main/scala/akka/testkit/TestBarrier.scala b/akka-testkit/src/main/scala/akka/testkit/TestBarrier.scala index a07c84b459..246f531b24 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestBarrier.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestBarrier.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/main/scala/akka/testkit/TestEventListener.scala b/akka-testkit/src/main/scala/akka/testkit/TestEventListener.scala index 537cdbc996..d2cfe1841b 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestEventListener.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestEventListener.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/main/scala/akka/testkit/TestFSMRef.scala b/akka-testkit/src/main/scala/akka/testkit/TestFSMRef.scala index c3659d93e5..21eb10749d 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestFSMRef.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestFSMRef.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/main/scala/akka/testkit/TestKit.scala b/akka-testkit/src/main/scala/akka/testkit/TestKit.scala index 41cf7c8e9f..58b81f0cb3 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestKit.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestKit.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit @@ -712,7 +712,7 @@ trait TestKitBase { * } * * } finally { - * system.shutdown() + * system.terminate() * } * } * }}} diff --git a/akka-testkit/src/main/scala/akka/testkit/TestKitExtension.scala b/akka-testkit/src/main/scala/akka/testkit/TestKitExtension.scala index 5843c5e7da..64fb7971a6 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestKitExtension.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestKitExtension.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala b/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala index 6ff13e672b..8d1ed1c115 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/main/scala/akka/testkit/package.scala b/akka-testkit/src/main/scala/akka/testkit/package.scala index a7f0183a73..6c734c8168 100644 --- a/akka-testkit/src/main/scala/akka/testkit/package.scala +++ b/akka-testkit/src/main/scala/akka/testkit/package.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka diff --git a/akka-testkit/src/test/java/akka/testkit/AkkaJUnitActorSystemResource.java b/akka-testkit/src/test/java/akka/testkit/AkkaJUnitActorSystemResource.java index 663f194ad3..5f6462a696 100644 --- a/akka-testkit/src/test/java/akka/testkit/AkkaJUnitActorSystemResource.java +++ b/akka-testkit/src/test/java/akka/testkit/AkkaJUnitActorSystemResource.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit; diff --git a/akka-testkit/src/test/java/akka/testkit/TestActorRefJavaCompile.java b/akka-testkit/src/test/java/akka/testkit/TestActorRefJavaCompile.java index 1b639e4e38..80e97cb9bb 100644 --- a/akka-testkit/src/test/java/akka/testkit/TestActorRefJavaCompile.java +++ b/akka-testkit/src/test/java/akka/testkit/TestActorRefJavaCompile.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit; diff --git a/akka-testkit/src/test/scala/akka/testkit/AkkaSpec.scala b/akka-testkit/src/test/scala/akka/testkit/AkkaSpec.scala index 9898fe7bca..238b7947a2 100644 --- a/akka-testkit/src/test/scala/akka/testkit/AkkaSpec.scala +++ b/akka-testkit/src/test/scala/akka/testkit/AkkaSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/test/scala/akka/testkit/AkkaSpecSpec.scala b/akka-testkit/src/test/scala/akka/testkit/AkkaSpecSpec.scala index 74f420dfb3..ac230d5d74 100644 --- a/akka-testkit/src/test/scala/akka/testkit/AkkaSpecSpec.scala +++ b/akka-testkit/src/test/scala/akka/testkit/AkkaSpecSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/test/scala/akka/testkit/Coroner.scala b/akka-testkit/src/test/scala/akka/testkit/Coroner.scala index 5fb80a79bd..af473daa7a 100644 --- a/akka-testkit/src/test/scala/akka/testkit/Coroner.scala +++ b/akka-testkit/src/test/scala/akka/testkit/Coroner.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/test/scala/akka/testkit/CoronerSpec.scala b/akka-testkit/src/test/scala/akka/testkit/CoronerSpec.scala index a6e5ba5a9b..077941a8c3 100644 --- a/akka-testkit/src/test/scala/akka/testkit/CoronerSpec.scala +++ b/akka-testkit/src/test/scala/akka/testkit/CoronerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/test/scala/akka/testkit/DefaultTimeoutSpec.scala b/akka-testkit/src/test/scala/akka/testkit/DefaultTimeoutSpec.scala index 32d5279556..f1a7cfe4c5 100644 --- a/akka-testkit/src/test/scala/akka/testkit/DefaultTimeoutSpec.scala +++ b/akka-testkit/src/test/scala/akka/testkit/DefaultTimeoutSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2013-2015 Typesafe Inc. + * Copyright (C) 2013-2016 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/test/scala/akka/testkit/ImplicitSenderSpec.scala b/akka-testkit/src/test/scala/akka/testkit/ImplicitSenderSpec.scala index e24ba685e1..132b78eb70 100644 --- a/akka-testkit/src/test/scala/akka/testkit/ImplicitSenderSpec.scala +++ b/akka-testkit/src/test/scala/akka/testkit/ImplicitSenderSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2013-2015 Typesafe Inc. + * Copyright (C) 2013-2016 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/test/scala/akka/testkit/TestActorRefSpec.scala b/akka-testkit/src/test/scala/akka/testkit/TestActorRefSpec.scala index ee0125287d..92ae6304e1 100644 --- a/akka-testkit/src/test/scala/akka/testkit/TestActorRefSpec.scala +++ b/akka-testkit/src/test/scala/akka/testkit/TestActorRefSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/test/scala/akka/testkit/TestActorsSpec.scala b/akka-testkit/src/test/scala/akka/testkit/TestActorsSpec.scala index 3d8d63198a..a453f37e2d 100644 --- a/akka-testkit/src/test/scala/akka/testkit/TestActorsSpec.scala +++ b/akka-testkit/src/test/scala/akka/testkit/TestActorsSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/test/scala/akka/testkit/TestFSMRefSpec.scala b/akka-testkit/src/test/scala/akka/testkit/TestFSMRefSpec.scala index 6d8273e02a..3a5ca459b7 100644 --- a/akka-testkit/src/test/scala/akka/testkit/TestFSMRefSpec.scala +++ b/akka-testkit/src/test/scala/akka/testkit/TestFSMRefSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/test/scala/akka/testkit/TestTags.scala b/akka-testkit/src/test/scala/akka/testkit/TestTags.scala index 42e7306721..7df8487f69 100644 --- a/akka-testkit/src/test/scala/akka/testkit/TestTags.scala +++ b/akka-testkit/src/test/scala/akka/testkit/TestTags.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/test/scala/akka/testkit/metrics/AveragingGauge.scala b/akka-testkit/src/test/scala/akka/testkit/metrics/AveragingGauge.scala index 72c1025d52..83d1b78dee 100644 --- a/akka-testkit/src/test/scala/akka/testkit/metrics/AveragingGauge.scala +++ b/akka-testkit/src/test/scala/akka/testkit/metrics/AveragingGauge.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit.metrics diff --git a/akka-testkit/src/test/scala/akka/testkit/metrics/FileDescriptorMetricSet.scala b/akka-testkit/src/test/scala/akka/testkit/metrics/FileDescriptorMetricSet.scala index ebae4b68a1..83e234bb03 100644 --- a/akka-testkit/src/test/scala/akka/testkit/metrics/FileDescriptorMetricSet.scala +++ b/akka-testkit/src/test/scala/akka/testkit/metrics/FileDescriptorMetricSet.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit.metrics diff --git a/akka-testkit/src/test/scala/akka/testkit/metrics/HdrHistogram.scala b/akka-testkit/src/test/scala/akka/testkit/metrics/HdrHistogram.scala index f89864c2da..0f4a4d258c 100644 --- a/akka-testkit/src/test/scala/akka/testkit/metrics/HdrHistogram.scala +++ b/akka-testkit/src/test/scala/akka/testkit/metrics/HdrHistogram.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit.metrics diff --git a/akka-testkit/src/test/scala/akka/testkit/metrics/KnownOpsInTimespanTimer.scala b/akka-testkit/src/test/scala/akka/testkit/metrics/KnownOpsInTimespanTimer.scala index 816a0baad7..131a6bfa07 100644 --- a/akka-testkit/src/test/scala/akka/testkit/metrics/KnownOpsInTimespanTimer.scala +++ b/akka-testkit/src/test/scala/akka/testkit/metrics/KnownOpsInTimespanTimer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit.metrics diff --git a/akka-testkit/src/test/scala/akka/testkit/metrics/MemoryUsageSnapshotting.scala b/akka-testkit/src/test/scala/akka/testkit/metrics/MemoryUsageSnapshotting.scala index 13fc6ce97e..8053ec09e0 100644 --- a/akka-testkit/src/test/scala/akka/testkit/metrics/MemoryUsageSnapshotting.scala +++ b/akka-testkit/src/test/scala/akka/testkit/metrics/MemoryUsageSnapshotting.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit.metrics diff --git a/akka-testkit/src/test/scala/akka/testkit/metrics/MetricKeyDSL.scala b/akka-testkit/src/test/scala/akka/testkit/metrics/MetricKeyDSL.scala index b26acb3895..cb78e37606 100644 --- a/akka-testkit/src/test/scala/akka/testkit/metrics/MetricKeyDSL.scala +++ b/akka-testkit/src/test/scala/akka/testkit/metrics/MetricKeyDSL.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit.metrics diff --git a/akka-testkit/src/test/scala/akka/testkit/metrics/MetricsKit.scala b/akka-testkit/src/test/scala/akka/testkit/metrics/MetricsKit.scala index 25d18a385e..c9ba5891b7 100644 --- a/akka-testkit/src/test/scala/akka/testkit/metrics/MetricsKit.scala +++ b/akka-testkit/src/test/scala/akka/testkit/metrics/MetricsKit.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit.metrics diff --git a/akka-testkit/src/test/scala/akka/testkit/metrics/MetricsKitOps.scala b/akka-testkit/src/test/scala/akka/testkit/metrics/MetricsKitOps.scala index bf084521e9..f65e4c7fa3 100644 --- a/akka-testkit/src/test/scala/akka/testkit/metrics/MetricsKitOps.scala +++ b/akka-testkit/src/test/scala/akka/testkit/metrics/MetricsKitOps.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit.metrics diff --git a/akka-testkit/src/test/scala/akka/testkit/metrics/MetricsKitSpec.scala b/akka-testkit/src/test/scala/akka/testkit/metrics/MetricsKitSpec.scala index 29576f9b79..08a3a70135 100644 --- a/akka-testkit/src/test/scala/akka/testkit/metrics/MetricsKitSpec.scala +++ b/akka-testkit/src/test/scala/akka/testkit/metrics/MetricsKitSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit.metrics diff --git a/akka-testkit/src/test/scala/akka/testkit/metrics/reporter/AkkaConsoleReporter.scala b/akka-testkit/src/test/scala/akka/testkit/metrics/reporter/AkkaConsoleReporter.scala index 03a3b0124d..8d0b99ec0e 100644 --- a/akka-testkit/src/test/scala/akka/testkit/metrics/reporter/AkkaConsoleReporter.scala +++ b/akka-testkit/src/test/scala/akka/testkit/metrics/reporter/AkkaConsoleReporter.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit.metrics.reporter diff --git a/akka-testkit/src/test/scala/akka/testkit/metrics/reporter/AkkaGraphiteReporter.scala b/akka-testkit/src/test/scala/akka/testkit/metrics/reporter/AkkaGraphiteReporter.scala index 736b5dc716..33d3dacb32 100644 --- a/akka-testkit/src/test/scala/akka/testkit/metrics/reporter/AkkaGraphiteReporter.scala +++ b/akka-testkit/src/test/scala/akka/testkit/metrics/reporter/AkkaGraphiteReporter.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka.testkit.metrics.reporter diff --git a/akka-testkit/src/test/scala/akka/testkit/metrics/reporter/GraphiteClient.scala b/akka-testkit/src/test/scala/akka/testkit/metrics/reporter/GraphiteClient.scala index b91ce0f8dd..f4114d2f5d 100644 --- a/akka-testkit/src/test/scala/akka/testkit/metrics/reporter/GraphiteClient.scala +++ b/akka-testkit/src/test/scala/akka/testkit/metrics/reporter/GraphiteClient.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.testkit.metrics.reporter diff --git a/akka-typed/build.sbt b/akka-typed/build.sbt index ed0858db51..abcc7414ed 100644 --- a/akka-typed/build.sbt +++ b/akka-typed/build.sbt @@ -2,7 +2,5 @@ import akka.{ AkkaBuild, Formatting, OSGi, Dependencies } import com.typesafe.tools.mima.plugin.MimaKeys AkkaBuild.defaultSettings - AkkaBuild.experimentalSettings - Formatting.formatSettings diff --git a/akka-typed/src/main/scala/akka/typed/ActorContext.scala b/akka-typed/src/main/scala/akka/typed/ActorContext.scala index f0a60e6711..141872e4c6 100644 --- a/akka-typed/src/main/scala/akka/typed/ActorContext.scala +++ b/akka-typed/src/main/scala/akka/typed/ActorContext.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.typed diff --git a/akka-typed/src/main/scala/akka/typed/ActorRef.scala b/akka-typed/src/main/scala/akka/typed/ActorRef.scala index f65eadf7af..4fb81debe1 100644 --- a/akka-typed/src/main/scala/akka/typed/ActorRef.scala +++ b/akka-typed/src/main/scala/akka/typed/ActorRef.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.typed diff --git a/akka-typed/src/main/scala/akka/typed/ActorSystem.scala b/akka-typed/src/main/scala/akka/typed/ActorSystem.scala index 7d6d8fb51b..dcdac3fa4a 100644 --- a/akka-typed/src/main/scala/akka/typed/ActorSystem.scala +++ b/akka-typed/src/main/scala/akka/typed/ActorSystem.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.typed diff --git a/akka-typed/src/main/scala/akka/typed/Ask.scala b/akka-typed/src/main/scala/akka/typed/Ask.scala index 715c266c9b..636c051f12 100644 --- a/akka-typed/src/main/scala/akka/typed/Ask.scala +++ b/akka-typed/src/main/scala/akka/typed/Ask.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.typed diff --git a/akka-typed/src/main/scala/akka/typed/Behavior.scala b/akka-typed/src/main/scala/akka/typed/Behavior.scala index 3bf766a2e4..ab5c089e9f 100644 --- a/akka-typed/src/main/scala/akka/typed/Behavior.scala +++ b/akka-typed/src/main/scala/akka/typed/Behavior.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.typed diff --git a/akka-typed/src/main/scala/akka/typed/Effects.scala b/akka-typed/src/main/scala/akka/typed/Effects.scala index b331ea059a..a7991e65ce 100644 --- a/akka-typed/src/main/scala/akka/typed/Effects.scala +++ b/akka-typed/src/main/scala/akka/typed/Effects.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.typed diff --git a/akka-typed/src/main/scala/akka/typed/Impl.scala b/akka-typed/src/main/scala/akka/typed/Impl.scala index f35dadfa38..f5e10e23e0 100644 --- a/akka-typed/src/main/scala/akka/typed/Impl.scala +++ b/akka-typed/src/main/scala/akka/typed/Impl.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.typed @@ -82,12 +82,20 @@ private[typed] class ActorContextAdapter[T](ctx: akka.actor.ActorContext) extend def spawn[U](props: Props[U], name: String) = ctx.spawn(props, name) def actorOf(props: a.Props) = ctx.actorOf(props) def actorOf(props: a.Props, name: String) = ctx.actorOf(props, name) - def stop(child: ActorRef[Nothing]) = ctx.child(child.path.name) match { - case Some(ref) if ref == child.untypedRef ⇒ - ctx.stop(child.untypedRef) - true - case _ ⇒ false // none of our business - } + def stop(child: ActorRef[Nothing]) = + child.untypedRef match { + case f: akka.actor.FunctionRef ⇒ + val cell = ctx.asInstanceOf[akka.actor.ActorCell] + cell.removeFunctionRef(f) + case _ ⇒ + ctx.child(child.path.name) match { + case Some(ref) if ref == child.untypedRef ⇒ + ctx.stop(child.untypedRef) + true + case _ ⇒ + false // none of our business + } + } def watch[U](other: ActorRef[U]) = { ctx.watch(other.untypedRef); other } def watch(other: a.ActorRef) = { ctx.watch(other); other } def unwatch[U](other: ActorRef[U]) = { ctx.unwatch(other.untypedRef); other } @@ -98,7 +106,11 @@ private[typed] class ActorContextAdapter[T](ctx: akka.actor.ActorContext) extend import ctx.dispatcher ctx.system.scheduler.scheduleOnce(delay, target.untypedRef, msg) } - def spawnAdapter[U](f: U ⇒ T) = ActorRef[U](ctx.actorOf(akka.actor.Props(classOf[MessageWrapper], f))) + def spawnAdapter[U](f: U ⇒ T) = { + val cell = ctx.asInstanceOf[akka.actor.ActorCell] + val ref = cell.addFunctionRef((_, msg) ⇒ ctx.self ! f(msg.asInstanceOf[U])) + ActorRef[U](ref) + } } /** diff --git a/akka-typed/src/main/scala/akka/typed/Inbox.scala b/akka-typed/src/main/scala/akka/typed/Inbox.scala index 2ab938031c..0f0e7bfbbb 100644 --- a/akka-typed/src/main/scala/akka/typed/Inbox.scala +++ b/akka-typed/src/main/scala/akka/typed/Inbox.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.typed diff --git a/akka-typed/src/main/scala/akka/typed/Ops.scala b/akka-typed/src/main/scala/akka/typed/Ops.scala index 9b923747b9..b864ff9243 100644 --- a/akka-typed/src/main/scala/akka/typed/Ops.scala +++ b/akka-typed/src/main/scala/akka/typed/Ops.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.typed diff --git a/akka-typed/src/main/scala/akka/typed/Props.scala b/akka-typed/src/main/scala/akka/typed/Props.scala index 43bace0fbe..8dc183db54 100644 --- a/akka-typed/src/main/scala/akka/typed/Props.scala +++ b/akka-typed/src/main/scala/akka/typed/Props.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.typed diff --git a/akka-typed/src/main/scala/akka/typed/ScalaDSL.scala b/akka-typed/src/main/scala/akka/typed/ScalaDSL.scala index eb066cf3c2..db835e13e5 100644 --- a/akka-typed/src/main/scala/akka/typed/ScalaDSL.scala +++ b/akka-typed/src/main/scala/akka/typed/ScalaDSL.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.typed diff --git a/akka-typed/src/main/scala/akka/typed/patterns/Receiver.scala b/akka-typed/src/main/scala/akka/typed/patterns/Receiver.scala index c26bf0b984..d1f62faf90 100644 --- a/akka-typed/src/main/scala/akka/typed/patterns/Receiver.scala +++ b/akka-typed/src/main/scala/akka/typed/patterns/Receiver.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.typed.patterns diff --git a/akka-typed/src/main/scala/akka/typed/patterns/Receptionist.scala b/akka-typed/src/main/scala/akka/typed/patterns/Receptionist.scala index b64987e9d4..968b99d4e1 100644 --- a/akka-typed/src/main/scala/akka/typed/patterns/Receptionist.scala +++ b/akka-typed/src/main/scala/akka/typed/patterns/Receptionist.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.typed.patterns diff --git a/akka-typed/src/test/scala/akka/typed/ActorContextSpec.scala b/akka-typed/src/test/scala/akka/typed/ActorContextSpec.scala index 756a6dc2b1..708cfdc175 100644 --- a/akka-typed/src/test/scala/akka/typed/ActorContextSpec.scala +++ b/akka-typed/src/test/scala/akka/typed/ActorContextSpec.scala @@ -66,6 +66,9 @@ object ActorContextSpec { final case class BecomeCareless(replyTo: ActorRef[BecameCareless.type]) extends Command case object BecameCareless extends Event + final case class GetAdapter(replyTo: ActorRef[Adapter]) extends Command + final case class Adapter(a: ActorRef[Command]) extends Event + def subject(monitor: ActorRef[GotSignal]): Behavior[Command] = FullTotal { case Sig(ctx, signal) ⇒ @@ -142,6 +145,9 @@ object ActorContextSpec { monitor ! GotSignal(sig) Same } + case GetAdapter(replyTo) ⇒ + replyTo ! Adapter(ctx.spawnAdapter(identity)) + Same } } } @@ -503,6 +509,26 @@ class ActorContextSpec extends TypedSpec(ConfigFactory.parseString( msgs should ===(Scheduled :: Pong2 :: Nil) } }) + + def `40 must create a working adapter`(): Unit = sync(setup("ctx40") { (ctx, startWith) ⇒ + startWith.keep { subj ⇒ + subj ! GetAdapter(ctx.self) + }.expectMessage(500.millis) { (msg, subj) ⇒ + val Adapter(adapter) = msg + ctx.watch(adapter) + adapter ! Ping(ctx.self) + (subj, adapter) + }.expectMessage(500.millis) { + case (msg, (subj, adapter)) ⇒ + msg should ===(Pong1) + ctx.stop(subj) + adapter + }.expectMessageKeep(500.millis) { (msg, _) ⇒ + msg should ===(GotSignal(PostStop)) + }.expectTermination(500.millis) { (t, adapter) ⇒ + t.ref should ===(adapter) + } + }) } object `An ActorContext` extends Tests { diff --git a/akka-typed/src/test/scala/akka/typed/BehaviorSpec.scala b/akka-typed/src/test/scala/akka/typed/BehaviorSpec.scala index cfee3ba0b5..fc155c2640 100644 --- a/akka-typed/src/test/scala/akka/typed/BehaviorSpec.scala +++ b/akka-typed/src/test/scala/akka/typed/BehaviorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.typed diff --git a/akka-typed/src/test/scala/akka/typed/PerformanceSpec.scala b/akka-typed/src/test/scala/akka/typed/PerformanceSpec.scala index 84c1025adb..9f4146bae6 100644 --- a/akka-typed/src/test/scala/akka/typed/PerformanceSpec.scala +++ b/akka-typed/src/test/scala/akka/typed/PerformanceSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.typed diff --git a/akka-typed/src/test/scala/akka/typed/StepWise.scala b/akka-typed/src/test/scala/akka/typed/StepWise.scala index 615689d5b2..724f7f84e5 100644 --- a/akka-typed/src/test/scala/akka/typed/StepWise.scala +++ b/akka-typed/src/test/scala/akka/typed/StepWise.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.typed diff --git a/akka-typed/src/test/scala/akka/typed/TypedSpec.scala b/akka-typed/src/test/scala/akka/typed/TypedSpec.scala index 33785f1509..122c86b633 100644 --- a/akka-typed/src/test/scala/akka/typed/TypedSpec.scala +++ b/akka-typed/src/test/scala/akka/typed/TypedSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.typed diff --git a/akka-typed/src/test/scala/akka/typed/patterns/ReceptionistSpec.scala b/akka-typed/src/test/scala/akka/typed/patterns/ReceptionistSpec.scala index 6a8a3fc5fa..71475ab24d 100644 --- a/akka-typed/src/test/scala/akka/typed/patterns/ReceptionistSpec.scala +++ b/akka-typed/src/test/scala/akka/typed/patterns/ReceptionistSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2014-2015 Typesafe Inc. + * Copyright (C) 2014-2016 Typesafe Inc. */ package akka.typed.patterns diff --git a/project/AkkaBuild.scala b/project/AkkaBuild.scala index f8f0a60293..eefcfc5e8e 100644 --- a/project/AkkaBuild.scala +++ b/project/AkkaBuild.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka @@ -8,9 +8,7 @@ import java.io.FileInputStream import java.io.InputStreamReader import java.util.Properties -import akka.TestExtras.GraphiteBuildEvents import akka.TestExtras.JUnitFileReporting -import akka.TestExtras.StatsDMetrics import com.typesafe.sbt.S3Plugin.S3 import com.typesafe.sbt.S3Plugin.s3Settings import com.typesafe.sbt.pgp.PgpKeys.publishSigned @@ -18,6 +16,7 @@ import com.typesafe.sbt.SbtMultiJvm.MultiJvmKeys.MultiJvm import sbt.Keys._ import sbt._ import sbtunidoc.Plugin.ScalaUnidoc +import sbtunidoc.Plugin.JavaUnidoc import sbtunidoc.Plugin.UnidocKeys._ object AkkaBuild extends Build { @@ -42,18 +41,10 @@ object AkkaBuild extends Build { base = file("."), settings = parentSettings ++ Release.settings ++ SphinxDoc.akkaSettings ++ Dist.settings ++ s3Settings ++ - GraphiteBuildEvents.settings ++ Protobuf.settings ++ Seq( + UnidocRoot.akkaSettings ++ + Protobuf.settings ++ Seq( parallelExecution in GlobalScope := System.getProperty("akka.parallelExecution", parallelExecutionByDefault.toString).toBoolean, Dist.distExclude := Seq(actorTests.id, docs.id, samples.id, osgi.id), - - // FIXME problem with scalaunidoc:doc, there must be a better way - unidocProjectFilter in (ScalaUnidoc, unidoc) := inAnyProject -- inProjects(protobuf, samples, - sampleCamelJava, sampleCamelScala, sampleClusterJava, sampleClusterScala, sampleFsmScala, sampleFsmJavaLambda, - sampleMainJava, sampleMainScala, sampleMainJavaLambda, sampleMultiNodeScala, - samplePersistenceJava, samplePersistenceScala, samplePersistenceJavaLambda, - sampleRemoteJava, sampleRemoteScala, sampleSupervisionJavaLambda, - sampleDistributedDataScala, sampleDistributedDataJava), - S3.host in S3.upload := "downloads.typesafe.com.s3.amazonaws.com", S3.progress in S3.upload := true, mappings in S3.upload <<= (Release.releaseDirectory, version) map { (d, v) => @@ -64,7 +55,10 @@ object AkkaBuild extends Build { ), aggregate = Seq(actor, testkit, actorTests, remote, remoteTests, camel, cluster, clusterMetrics, clusterTools, clusterSharding, distributedData, - slf4j, agent, persistence, persistenceQuery, persistenceTck, kernel, osgi, docs, contrib, samples, multiNodeTestkit, benchJmh, typed, protobuf) + slf4j, agent, persistence, persistenceQuery, persistenceTck, kernel, osgi, docs, contrib, samples, multiNodeTestkit, benchJmh, typed, protobuf, + stream, streamTestkit, streamTests, streamTestsTck, parsing, + httpCore, http, httpSprayJson, httpXml, httpJackson, httpTests, httpTestkit + ) ) lazy val akkaScalaNightly = Project( @@ -74,7 +68,10 @@ object AkkaBuild extends Build { // samples don't work with dbuild right now aggregate = Seq(actor, testkit, actorTests, remote, remoteTests, camel, cluster, clusterMetrics, clusterTools, clusterSharding, distributedData, - slf4j, persistence, persistenceQuery, persistenceTck, kernel, osgi, contrib, multiNodeTestkit, benchJmh, typed, protobuf) + slf4j, persistence, persistenceQuery, persistenceTck, kernel, osgi, contrib, multiNodeTestkit, benchJmh, typed, protobuf, + stream, streamTestkit, streamTests, streamTestsTck, parsing, + httpCore, http, httpSprayJson, httpXml, httpJackson, httpTests, httpTestkit + ) ).disablePlugins(ValidatePullRequest) lazy val actor = Project( @@ -103,7 +100,12 @@ object AkkaBuild extends Build { lazy val benchJmh = Project( id = "akka-bench-jmh", base = file("akka-bench-jmh"), - dependencies = Seq(actor, persistence, distributedData, testkit).map(_ % "compile;compile->test;provided->provided") + dependencies = Seq( + actor, + http, stream, streamTests, + persistence, distributedData, + testkit + ).map(_ % "compile;compile->test;provided->provided") ).disablePlugins(ValidatePullRequest) lazy val protobuf = Project( @@ -185,7 +187,11 @@ object AkkaBuild extends Build { lazy val persistenceQuery = Project( id = "akka-persistence-query-experimental", base = file("akka-persistence-query"), - dependencies = Seq(persistence % "compile;provided->provided;test->test", testkit % "compile;test->test") + dependencies = Seq( + stream, + persistence % "compile;provided->provided;test->test", + testkit % "compile;test->test", + streamTestkit % "compile;test->test") ) lazy val persistenceTck = Project( @@ -194,6 +200,94 @@ object AkkaBuild extends Build { dependencies = Seq(persistence % "compile;provided->provided;test->test", testkit % "compile;test->test") ) + lazy val httpCore = Project( + id = "akka-http-core", + base = file("akka-http-core"), + dependencies = Seq(stream, parsing, streamTestkit % "test->test") + ) + + lazy val http = Project( + id = "akka-http-experimental", + base = file("akka-http"), + dependencies = Seq(httpCore) + ) + + lazy val httpTestkit = Project( + id = "akka-http-testkit-experimental", + base = file("akka-http-testkit"), + dependencies = Seq(http, streamTestkit) + ) + + lazy val httpTests = Project( + id = "akka-http-tests-experimental", + base = file("akka-http-tests"), + dependencies = Seq(httpTestkit % "test", httpSprayJson, httpXml, httpJackson) + ) + + lazy val httpMarshallersScala = Project( + id = "akka-http-marshallers-scala-experimental", + base = file("akka-http-marshallers-scala"), + settings = parentSettings + ).aggregate(httpSprayJson, httpXml) + + lazy val httpXml = + httpMarshallersScalaSubproject("xml") + + lazy val httpSprayJson = + httpMarshallersScalaSubproject("spray-json") + + lazy val httpMarshallersJava = Project( + id = "akka-http-marshallers-java-experimental", + base = file("akka-http-marshallers-java"), + settings = parentSettings + ).aggregate(httpJackson) + + lazy val httpJackson = + httpMarshallersJavaSubproject("jackson") + + def httpMarshallersScalaSubproject(name: String) = + Project( + id = s"akka-http-$name-experimental", + base = file(s"akka-http-marshallers-scala/akka-http-$name"), + dependencies = Seq(http) + ) + + def httpMarshallersJavaSubproject(name: String) = + Project( + id = s"akka-http-$name-experimental", + base = file(s"akka-http-marshallers-java/akka-http-$name"), + dependencies = Seq(http) + ) + + lazy val parsing = Project( + id = "akka-parsing", + base = file("akka-parsing") + ) + + lazy val stream = Project( + id = "akka-stream", + base = file("akka-stream"), + dependencies = Seq(actor) + ) + + lazy val streamTestkit = Project( + id = "akka-stream-testkit", + base = file("akka-stream-testkit"), // TODO that persistence dependency + dependencies = Seq(stream, persistence % "compile;provided->provided;test->test", testkit % "compile;test->test") + ) + + lazy val streamTests = Project( + id = "akka-stream-tests-experimental", + base = file("akka-stream-tests"), + dependencies = Seq(streamTestkit % "test->test", stream) + ) + + lazy val streamTestsTck = Project( + id = "akka-stream-tests-tck-experimental", + base = file("akka-stream-tests-tck"), + dependencies = Seq(streamTestkit % "test->test", stream) + ) + lazy val kernel = Project( id = "akka-kernel", base = file("akka-kernel"), @@ -215,10 +309,16 @@ object AkkaBuild extends Build { lazy val docs = Project( id = "akka-docs", base = file("akka-docs"), - dependencies = Seq(actor, testkit % "test->test", + dependencies = Seq( + actor, + testkit % "compile;test->test", remote % "compile;test->test", cluster, clusterMetrics, slf4j, agent, camel, osgi, persistence % "compile;provided->provided;test->test", persistenceTck, persistenceQuery, - typed % "compile;test->test", distributedData) + typed % "compile;test->test", distributedData, + stream, streamTestkit % "compile;test->test", + http, httpSprayJson, httpJackson, httpXml, + httpTests % "compile;test->test", httpTestkit % "compile;test->test" + ) ) lazy val contrib = Project( @@ -288,7 +388,12 @@ object AkkaBuild extends Build { val dontPublishSettings = Seq( publishSigned := (), - publish := () + publish := (), + publishArtifact in Compile := false + ) + + val dontPublishDocsSettings = Seq( + sources in doc in Compile := List() ) override lazy val settings = @@ -347,7 +452,8 @@ object AkkaBuild extends Build { private def allWarnings: Boolean = System.getProperty("akka.allwarnings", "false").toBoolean - lazy val defaultSettings = resolverSettings ++ TestExtras.Filter.settings ++ + lazy val defaultSettings = resolverSettings ++ + TestExtras.Filter.settings ++ Protobuf.settings ++ Seq( // compile options scalacOptions in Compile ++= Seq("-encoding", "UTF-8", "-target:jvm-1.8", "-feature", "-unchecked", "-Xlog-reflective-calls", "-Xlint"), @@ -357,7 +463,7 @@ object AkkaBuild extends Build { // -XDignore.symbol.file suppresses sun.misc.Unsafe warnings javacOptions in compile ++= Seq("-encoding", "UTF-8", "-source", "1.8", "-target", "1.8", "-Xlint:unchecked", "-XDignore.symbol.file"), javacOptions in compile ++= (if (allWarnings) Seq("-Xlint:deprecation") else Nil), - javacOptions in doc ++= Seq("-encoding", "UTF-8", "-source", "1.8"), + javacOptions in doc ++= Seq(), incOptions := incOptions.value.withNameHashing(true), crossVersion := CrossVersion.binary, @@ -402,16 +508,24 @@ object AkkaBuild extends Build { testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a") ) ++ mavenLocalResolverSettings ++ - JUnitFileReporting.settings ++ StatsDMetrics.settings + JUnitFileReporting.settings ++ + docLintingSettings + + lazy val docLintingSettings = Seq( + javacOptions in compile ++= Seq("-Xdoclint:none"), + javacOptions in test ++= Seq("-Xdoclint:none"), + javacOptions in doc ++= Seq("-Xdoclint:none") + ) def akkaPreviousArtifacts(id: String): Def.Initialize[Set[sbt.ModuleID]] = Def.setting { if (enableMiMa) { val versions = { val akka23Versions = Seq("2.3.11", "2.3.12", "2.3.13", "2.3.14") - val akka24Versions = Seq("2.4.0") + val akka24Versions = Seq("2.4.0", "2.4.1") val akka24NewArtifacts = Seq( "akka-cluster-sharding", "akka-cluster-tools", + "akka-cluster-metrics", "akka-persistence", "akka-distributed-data-experimental", "akka-persistence-query-experimental" @@ -428,6 +542,11 @@ object AkkaBuild extends Build { else Set.empty } + def akkaStreamAndHttpPreviousArtifacts(id: String): Def.Initialize[Set[sbt.ModuleID]] = Def.setting { + // TODO fix MiMa for 2.4 Akka streams + Set.empty + } + def loadSystemProperties(fileName: String): Unit = { import scala.collection.JavaConverters._ val file = new File(fileName) diff --git a/project/CliOptions.scala b/project/CliOptions.scala index a71f3f982d..f7e7d12898 100644 --- a/project/CliOptions.scala +++ b/project/CliOptions.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka diff --git a/project/Dependencies.scala b/project/Dependencies.scala index a525579716..66d86faad6 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -4,10 +4,12 @@ import sbt._ import Keys._ object Dependencies { + import DependencyHelpers._ lazy val scalaTestVersion = settingKey[String]("The version of ScalaTest to use.") lazy val scalaStmVersion = settingKey[String]("The version of ScalaSTM to use.") lazy val scalaCheckVersion = settingKey[String]("The version of ScalaCheck to use.") + val junitVersion = "4.12" val Versions = Seq( crossScalaVersions := Seq("2.11.7"), //"2.12.0-M2" @@ -20,9 +22,6 @@ object Dependencies { object Compile { // Compile - // FIXME: change to project dependency once akka-stream merged to master - val akkaStream = "com.typesafe.akka" %% "akka-stream-experimental" % "2.0.1" - val camelCore = "org.apache.camel" % "camel-core" % "2.13.4" exclude("org.slf4j", "slf4j-api") // ApacheV2 // when updating config version, update links ActorSystem ScalaDoc to link to the updated version @@ -30,8 +29,12 @@ object Dependencies { val netty = "io.netty" % "netty" % "3.10.3.Final" // ApacheV2 val scalaStm = Def.setting { "org.scala-stm" %% "scala-stm" % scalaStmVersion.value } // Modified BSD (Scala) + val scalaXml = "org.scala-lang.modules" %% "scala-xml" % "1.0.1" // Scala License + val scalaReflect = ScalaVersionDependentModuleID.versioned("org.scala-lang" % "scala-reflect" % _) // Scala License + val slf4jApi = "org.slf4j" % "slf4j-api" % "1.7.12" // MIT - // mirrored in OSGi sample + + // mirrored in OSGi sample val uncommonsMath = "org.uncommons.maths" % "uncommons-maths" % "1.2.2a" exclude("jfree", "jcommon") exclude("jfree", "jfreechart") // ApacheV2 val osgiCore = "org.osgi" % "org.osgi.core" % "4.3.1" // ApacheV2 val osgiCompendium= "org.osgi" % "org.osgi.compendium" % "4.3.1" // ApacheV2 @@ -39,6 +42,24 @@ object Dependencies { // TODO remove with metrics from akka-cluster val sigar = "org.fusesource" % "sigar" % "1.6.4" // ApacheV2 + // reactive streams + val reactiveStreams = "org.reactivestreams" % "reactive-streams" % "1.0.0" // CC0 + + // ssl-config + val sslConfigAkka = "com.typesafe" %% "ssl-config-akka" % "0.1.1" // ApacheV2 + + // For akka-http spray-json support + val sprayJson = "io.spray" %% "spray-json" % "1.3.2" // ApacheV2 + + // For akka-http-jackson support + val jackson = "com.fasterxml.jackson.core" % "jackson-databind" % "2.4.3" // ApacheV2 + + // For akka-http-testkit-java + val junit = "junit" % "junit" % junitVersion // Common Public License 1.0 + + // For Java 8 Conversions + val java8Compat = "org.scala-lang.modules" %% "scala-java8-compat" % "0.7.0" // Scala License + object Docs { val sprayJson = "io.spray" %% "spray-json" % "1.3.2" % "test" val gson = "com.google.code.gson" % "gson" % "2.3.1" % "test" @@ -48,7 +69,7 @@ object Dependencies { val commonsMath = "org.apache.commons" % "commons-math" % "2.2" % "test" // ApacheV2 val commonsIo = "commons-io" % "commons-io" % "2.4" % "test" // ApacheV2 val commonsCodec = "commons-codec" % "commons-codec" % "1.10" % "test" // ApacheV2 - val junit = "junit" % "junit" % "4.12" % "test" // Common Public License 1.0 + val junit = "junit" % "junit" % junitVersion % "test" // Common Public License 1.0 val logback = "ch.qos.logback" % "logback-classic" % "1.1.3" % "test" // EPL 1.0 / LGPL 2.1 val mockito = "org.mockito" % "mockito-all" % "1.10.19" % "test" // MIT // changing the scalatest dependency must be reflected in akka-docs/rst/dev/multi-jvm-testing.rst @@ -59,8 +80,6 @@ object Dependencies { val log4j = "log4j" % "log4j" % "1.2.14" % "test" // ApacheV2 val junitIntf = "com.novocode" % "junit-interface" % "0.11" % "test" // MIT val scalaXml = "org.scala-lang.modules" %% "scala-xml" % "1.0.4" % "test" - // FIXME: change to project dependency once akka-stream merged to master - val akkaStreamTestkit = "com.typesafe.akka" %% "akka-stream-testkit-experimental" % "2.0.1" % "test" // metrics, measurements, perf testing val metrics = "com.codahale.metrics" % "metrics-core" % "3.0.2" % "test" // ApacheV2 @@ -72,22 +91,28 @@ object Dependencies { // sigar logging val slf4jJul = "org.slf4j" % "jul-to-slf4j" % "1.7.12" % "test" // MIT val slf4jLog4j = "org.slf4j" % "log4j-over-slf4j" % "1.7.12" % "test" // MIT + + lazy val sprayJson = Compile.sprayJson % "test" + + // reactive streams tck + val reactiveStreamsTck = "org.reactivestreams" % "reactive-streams-tck" % "1.0.0" % "test" // CC0 } object Provided { // TODO remove from "test" config val sigarLoader = "io.kamon" % "sigar-loader" % "1.6.6-rev002" % "optional;provided;test" // ApacheV2 - + val levelDB = "org.iq80.leveldb" % "leveldb" % "0.7" % "optional;provided" // ApacheV2 val levelDBNative = "org.fusesource.leveldbjni" % "leveldbjni-all" % "1.8" % "optional;provided" // New BSD } - + } import Compile._ + // TODO check if `l ++=` everywhere expensive? val l = libraryDependencies - val actor = l ++= Seq(config) + val actor = l ++= Seq(config, java8Compat) val testkit = l ++= Seq(Test.junit, Test.scalatest.value) ++ Test.metricsAll @@ -98,13 +123,13 @@ object Dependencies { val remoteTests = l ++= Seq(Test.junit, Test.scalatest.value, Test.scalaXml) val cluster = l ++= Seq(Test.junit, Test.scalatest.value) - + val clusterTools = l ++= Seq(Test.junit, Test.scalatest.value) - + val clusterSharding = l ++= Seq(Provided.levelDB, Provided.levelDBNative, Test.junit, Test.scalatest.value, Test.commonsIo) val clusterMetrics = l ++= Seq(Provided.sigarLoader, Test.slf4jJul, Test.slf4jLog4j, Test.logback, Test.mockito) - + val distributedData = l ++= Seq(Test.junit, Test.scalatest.value) val slf4j = l ++= Seq(slf4jApi, Test.logback) @@ -113,7 +138,7 @@ object Dependencies { val persistence = l ++= Seq(Provided.levelDB, Provided.levelDBNative, Test.scalatest.value, Test.junit, Test.commonsIo, Test.scalaXml) - val persistenceQuery = l ++= Seq(akkaStream, Test.scalatest.value, Test.junit, Test.commonsIo, Test.akkaStreamTestkit) + val persistenceQuery = l ++= Seq(Test.scalatest.value, Test.junit, Test.commonsIo) val persistenceTck = l ++= Seq(Test.scalatest.value.copy(configurations = Some("compile")), Test.junit.copy(configurations = Some("compile"))) @@ -126,6 +151,70 @@ object Dependencies { val docs = l ++= Seq(Test.scalatest.value, Test.junit, Test.junitIntf, Docs.sprayJson, Docs.gson) val contrib = l ++= Seq(Test.junitIntf, Test.commonsIo) - + val benchJmh = l ++= Seq(Provided.levelDB, Provided.levelDBNative) + + // akka stream & http + + lazy val httpCore = l ++= Seq( + java8Compat, + Test.sprayJson, // for WS Autobahn test metadata + Test.junitIntf, Test.junit, Test.scalatest.value) + + lazy val http = l ++= Seq(java8Compat) + + // special, since it also includes a compiler plugin + lazy val parsing = Seq( + DependencyHelpers.versionDependentDeps( + Dependencies.Compile.scalaReflect % "provided" + ), + addCompilerPlugin("org.scalamacros" % "paradise" % "2.0.1" cross CrossVersion.full) + ) + + lazy val httpTestkit = l ++= Seq( + Test.junit, Test.junitIntf, Compile.junit % "provided", Test.scalatest.value.copy(configurations = Some("provided; test"))) + + // TODO collapse those + lazy val httpTests = l ++= Seq(Test.junit, Test.scalatest.value, Test.junitIntf) + lazy val httpTestsJava8 = l ++= Seq(Test.junit, Test.junitIntf) + + lazy val httpXml = versionDependentDeps(scalaXml) + + lazy val httpSprayJson = versionDependentDeps(sprayJson) + + lazy val httpJackson = l ++= Seq(jackson) + + lazy val stream = l ++= Seq[sbt.ModuleID]( + sslConfigAkka, + reactiveStreams, + Test.junitIntf, + Test.scalatest.value) + + lazy val streamTestkit = l ++= Seq(Test.scalatest.value, Test.scalacheck.value, Test.junit) + + lazy val streamTests = l ++= Seq(Test.scalatest.value, Test.scalacheck.value, Test.junit, Test.commonsIo) + + lazy val streamTestsTck = l ++= Seq(Test.scalatest.value, Test.scalacheck.value, Test.junit, Test.reactiveStreamsTck) + +} + +object DependencyHelpers { + case class ScalaVersionDependentModuleID(modules: String => Seq[ModuleID]) { + def %(config: String): ScalaVersionDependentModuleID = + ScalaVersionDependentModuleID(version => modules(version).map(_ % config)) + } + object ScalaVersionDependentModuleID { + implicit def liftConstantModule(mod: ModuleID): ScalaVersionDependentModuleID = versioned(_ => mod) + + def versioned(f: String => ModuleID): ScalaVersionDependentModuleID = ScalaVersionDependentModuleID(v => Seq(f(v))) + def fromPF(f: PartialFunction[String, ModuleID]): ScalaVersionDependentModuleID = + ScalaVersionDependentModuleID(version => if (f.isDefinedAt(version)) Seq(f(version)) else Nil) + } + + /** + * Use this as a dependency setting if the dependencies contain both static and Scala-version + * dependent entries. + */ + def versionDependentDeps(modules: ScalaVersionDependentModuleID*): Def.Setting[Seq[ModuleID]] = + libraryDependencies <++= scalaVersion(version => modules.flatMap(m => m.modules(version))) } diff --git a/project/Doc.scala b/project/Doc.scala index 6a1de8219d..00fe10a32a 100644 --- a/project/Doc.scala +++ b/project/Doc.scala @@ -1,11 +1,11 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka import sbt._ import sbtunidoc.Plugin.UnidocKeys._ -import sbtunidoc.Plugin.{ ScalaUnidoc, JavaUnidoc, scalaJavaUnidocSettings, genjavadocExtraSettings, scalaUnidocSettings } +import sbtunidoc.Plugin.{ ScalaUnidoc, JavaUnidoc, Genjavadoc, scalaJavaUnidocSettings, genjavadocExtraSettings, scalaUnidocSettings } import sbt.Keys._ import sbt.File import scala.annotation.tailrec @@ -99,6 +99,14 @@ object UnidocRoot extends AutoPlugin { override def trigger = noTrigger + val akkaSettings = UnidocRoot.CliOptions.genjavadocEnabled.ifTrue(Seq( + javacOptions in (JavaUnidoc, unidoc) ++= Seq("-Xdoclint:none"), + // genjavadoc needs to generate synthetic methods since the java code uses them + scalacOptions += "-P:genjavadoc:suppressSynthetic=false", + // FIXME: see #18056 + sources in(JavaUnidoc, unidoc) ~= (_.filterNot(_.getPath.contains("Access$minusControl$minusAllow$minusOrigin"))) + )).getOrElse(Nil) + def settings(ignoreAggregates: Seq[Project], ignoreProjects: Seq[Project]) = { val withoutAggregates = ignoreAggregates.foldLeft(inAnyProject) { _ -- inAggregates(_, transitive = true, includeRoot = true) } val docProjectFilter = ignoreProjects.foldLeft(withoutAggregates) { _ -- inProjects(_) } @@ -112,7 +120,7 @@ object UnidocRoot extends AutoPlugin { override lazy val projectSettings = CliOptions.genjavadocEnabled.ifTrue(scalaJavaUnidocSettings).getOrElse(scalaUnidocSettings) ++ - settings(Seq(AkkaBuild.samples), Seq(AkkaBuild.remoteTests, AkkaBuild.benchJmh)) + settings(Seq(AkkaBuild.samples), Seq(AkkaBuild.remoteTests, AkkaBuild.benchJmh, AkkaBuild.parsing, AkkaBuild.protobuf)) } /** @@ -126,7 +134,9 @@ object Unidoc extends AutoPlugin { override lazy val projectSettings = UnidocRoot.CliOptions.genjavadocEnabled.ifTrue( genjavadocExtraSettings ++ Seq( scalacOptions in Compile += "-P:genjavadoc:fabricateParams=true", - unidocGenjavadocVersion in Global := "0.9" + unidocGenjavadocVersion in Global := "0.9", + // FIXME: see #18056 + sources in(Genjavadoc, doc) ~= (_.filterNot(_.getPath.contains("Access$minusControl$minusAllow$minusOrigin"))) ) ).getOrElse(Seq.empty) } diff --git a/project/MiMa.scala b/project/MiMa.scala index aae3626cef..f51dcf963c 100644 --- a/project/MiMa.scala +++ b/project/MiMa.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka @@ -571,29 +571,26 @@ object MiMa extends AutoPlugin { "2.3.14" -> bcIssuesBetween23and24, "2.4.0" -> Seq( FilterAnyProblem("akka.remote.transport.ProtocolStateActor"), - FilterAnyProblem("akka.persistence.journal.inmem.InmemJournal"), - FilterAnyProblem("akka.persistence.journal.inmem.InmemStore"), //#18353 Changes to methods and fields private to remoting actors ProblemFilters.exclude[MissingMethodProblem]("akka.remote.EndpointManager.retryGateEnabled"), ProblemFilters.exclude[IncompatibleResultTypeProblem]("akka.remote.EndpointManager.pruneTimerCancellable"), - + // #18722 internal changes to actor FilterAnyProblem("akka.cluster.sharding.DDataShardCoordinator"), // #18328 optimize VersionVector for size 1 - FilterAnyProblem("akka.cluster.ddata.VersionVector"), + FilterAnyProblem("akka.cluster.ddata.VersionVector") + ), + "2.4.1" -> Seq( + // #19008 + FilterAnyProblem("akka.persistence.journal.inmem.InmemJournal"), + FilterAnyProblem("akka.persistence.journal.inmem.InmemStore"), // #19133 change in internal actor ProblemFilters.exclude[MissingMethodProblem]("akka.remote.ReliableDeliverySupervisor.gated"), - // debug logging in ReplayFilter, change of internal actor - ProblemFilters.exclude[MissingMethodProblem]("akka.persistence.journal.ReplayFilter.this"), - ProblemFilters.exclude[MissingMethodProblem]("akka.persistence.journal.AsyncWriteJournal.akka$persistence$journal$AsyncWriteJournal$_setter_$akka$persistence$journal$AsyncWriteJournal$$replayDebugEnabled_="), - ProblemFilters.exclude[MissingMethodProblem]("akka.persistence.journal.AsyncWriteJournal.akka$persistence$journal$AsyncWriteJournal$$replayDebugEnabled"), - ProblemFilters.exclude[MissingMethodProblem]("akka.persistence.journal.ReplayFilter.props"), - - // report invalid association events #18758 + // #18758 report invalid association events ProblemFilters.exclude[MissingTypesProblem]("akka.remote.InvalidAssociation$"), ProblemFilters.exclude[MissingMethodProblem]("akka.remote.InvalidAssociation.apply"), ProblemFilters.exclude[MissingMethodProblem]("akka.remote.InvalidAssociation.copy"), @@ -603,7 +600,14 @@ object MiMa extends AutoPlugin { ProblemFilters.exclude[MissingMethodProblem]("akka.pattern.BackoffSupervisor.akka$pattern$BackoffSupervisor$$child_="), ProblemFilters.exclude[MissingMethodProblem]("akka.pattern.BackoffSupervisor.akka$pattern$BackoffSupervisor$$restartCount"), ProblemFilters.exclude[MissingMethodProblem]("akka.pattern.BackoffSupervisor.akka$pattern$BackoffSupervisor$$restartCount_="), - ProblemFilters.exclude[MissingMethodProblem]("akka.pattern.BackoffSupervisor.akka$pattern$BackoffSupervisor$$child") + ProblemFilters.exclude[MissingMethodProblem]("akka.pattern.BackoffSupervisor.akka$pattern$BackoffSupervisor$$child"), + + // #19487 + FilterAnyProblem("akka.actor.dungeon.Children"), + + // #19440 + ProblemFilters.exclude[MissingMethodProblem]("akka.pattern.PipeToSupport.pipeCompletionStage"), + ProblemFilters.exclude[MissingMethodProblem]("akka.pattern.FutureTimeoutSupport.afterCompletionStage") ) ) } diff --git a/project/MultiNode.scala b/project/MultiNode.scala index 28fd00bb64..b9e1f2c8ad 100644 --- a/project/MultiNode.scala +++ b/project/MultiNode.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka diff --git a/project/OSGi.scala b/project/OSGi.scala index 9d40cc9b5f..f29b396b78 100644 --- a/project/OSGi.scala +++ b/project/OSGi.scala @@ -44,6 +44,32 @@ object OSGi { val remote = exports(Seq("akka.remote.*")) + val parsing = exports(Seq("akka.parboiled2.*", "akka.shapeless.*"), + imports = Seq(optionalResolution("scala.quasiquotes"))) + + val httpCore = exports(Seq("akka.http.*")) + + val http = exports(Seq("akka.http.impl.server", + "akka.http.scaladsl.server.*", "akka.http.javadsl.server.*", + "akka.http.scaladsl.client", "akka.http.scaladsl.coding", "akka.http.scaladsl.common", + "akka.http.scaladsl.marshalling", "akka.http.scaladsl.unmarshalling"), + imports = Seq( + streamAndHttpImport("akka.stream.*"), + streamAndHttpImport("akka.parboiled2.*")) + ) + + val httpTestkit = exports(Seq("akka.http.scaladsl.testkit.*", "akka.http.javadsl.testkit.*")) + + val httpSprayJson = exports(Seq("akka.http.scaladsl.marshallers.sprayjson")) + + val httpXml = exports(Seq("akka.http.scaladsl.marshallers.xml")) + + val httpJackson = exports(Seq("akka.http.javadsl.marshallers.jackson")) + + val stream = exports(Seq("akka.stream.*")) + + val streamTestkit = exports(Seq("akka.stream.testkit.*")) + val slf4j = exports(Seq("akka.event.slf4j.*")) val persistence = exports(Seq("akka.persistence.*"), @@ -65,6 +91,7 @@ object OSGi { ) def defaultImports(scalaVersion: String) = Seq("!sun.misc", akkaImport(), configImport(), scalaImport(scalaVersion), "*") def akkaImport(packageName: String = "akka.*") = versionedImport(packageName, "2.4", "2.5") + def streamAndHttpImport(packageName: String) = versionedImport(packageName, "2.0", "2.4") // TODO not sure about the range def configImport(packageName: String = "com.typesafe.config.*") = versionedImport(packageName, "1.3.0", "1.4.0") def scalaImport(version: String) = { val packageName = "scala.*" diff --git a/project/Protobuf.scala b/project/Protobuf.scala index 85fb07ac05..dbd6209bea 100644 --- a/project/Protobuf.scala +++ b/project/Protobuf.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka diff --git a/project/Publish.scala b/project/Publish.scala index 9447532375..52e3644090 100644 --- a/project/Publish.scala +++ b/project/Publish.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka diff --git a/project/SigarLoader.scala b/project/SigarLoader.scala index c9513946fe..a1c91f0f04 100644 --- a/project/SigarLoader.scala +++ b/project/SigarLoader.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka diff --git a/project/TestExtras.scala b/project/TestExtras.scala index 7708668b47..ec7b3b05c3 100644 --- a/project/TestExtras.scala +++ b/project/TestExtras.scala @@ -21,202 +21,6 @@ object TestExtras { ) } - object GraphiteBuildEvents { - val graphite = config("graphite") - - val enabled = settingKey[Boolean]("Set to true when you want to send build events to graphite; Enable with `-Dakka.sbt.graphite=true`") - - val host = settingKey[String]("Host where graphite is located (ip, or hostname)") - - val port = settingKey[Int]("Port on which graphite is listening, defaults to 80") - - private val notifier = settingKey[Option[GraphiteBuildNotifier]]("Notifies graphite about this build") - - val settings = SbtGit.settings ++ SbtGit.projectSettings ++ Seq( - enabled in graphite := sys.props("akka.sbt.graphite") == "true", - host in graphite := sys.props.get("akka.sbt.graphite.host").getOrElse("54.72.154.120"), - port in graphite := sys.props.get("akka.sbt.graphite.port").flatMap(p => Try(p.toInt).toOption).getOrElse(80), - - notifier := (enabled.in(graphite).value match { - case true => Some(new GraphiteBuildNotifier(gitCurrentBranch.value, gitHeadCommit.value, host.in(graphite).value, port.in(graphite).value)) - case _ => None - }), - - // this wraps the test task in order to send events before and after it - test in Test := Def.settingDyn { - val g = notifier.value - g.foreach(_.start()) - - // todo support complete(failed / successful) - val task = (test in Test).taskValue andFinally { g.foreach(_.complete()) } - - Def.setting(task) - }.value - ) - - /** - * Notifies graphite by sending an *event*, when a build starts. - * It will be tagged as "akka-build" and "branch:...", for filtering in UIs. - * - * Event includes branch and commit id of the build that is running. - */ - class GraphiteBuildNotifier(branch: String, commitId: Option[String], host: String, port: Int) { - - private val url = new URL(s"http://$host:$port/events/") - - private val hostname = InetAddress.getLocalHost.getHostName - - private val marker = branch + commitId.fold("")(id => s" @ $id") - - private def json(what: String, tag: String, data: String = "") = - s"""{ "what": "$what", "tags": "akka-build,branch:${sanitize(branch)},$tag", "data": "$data"}""".stripMargin - - def start(): Unit = send(s"Build started: $marker", data = "host = " + hostname, tag = "started") - - def complete(): Unit = send(s"Build completed: $marker", data = "host = " + hostname, tag = "completed") - - def send(msg: String, data: String, tag: String) = try { - // specifically not using Akka-IO (even though I'd love to), in order to not make the akka build depend on akka itself - val con = url.openConnection().asInstanceOf[HttpURLConnection] - try { - val bytes = json(msg, data, tag).getBytes("UTF-8") - con.setDoOutput(true) // triggers POST - con.connect() - - val out = new DataOutputStream(con.getOutputStream) - try { - out.write(bytes) - out.flush() - - // sigh, if left un-consumed graphite wouldn't take the write (*really*)! - consume(con) - - } finally { - out.close() - } - } finally { - con.disconnect() - } - } - - private def sanitize(s: String): String = s.replaceAll("""[^\w]+""", "-") - - private def consume(con: HttpURLConnection) { - val in = new BufferedReader(new InputStreamReader(con.getInputStream)) - var inputLine = "" - try { - while (inputLine != null) { - inputLine = in.readLine() - } - } finally { - in.close() - } - } - } - } - - object StatsDMetrics { - - val statsd = config("statsd") - - val enabled = settingKey[Boolean]("Set to true when you want to send stats to statsd; Enable with `-Dakka.sbt.statsd=true`") - - val prefix = settingKey[String]("Prefix given to all metrics sent to statsd") - - val host = settingKey[String]("Host where statsd is located (ip, or hostname)") - - val port = settingKey[Int]("Port on which statsd is listening, defaults to 8125") - - - val settings = Seq( - // configuration - enabled in statsd := sys.props("akka.sbt.statsd") == "true", - prefix in statsd := Option(sys.props("akka.sbt.statsd.prefix")).getOrElse("akka_master"), - host in statsd := Option(sys.props("akka.sbt.statsd.host")).getOrElse("54.72.154.120"), - port in statsd := Option(sys.props("akka.sbt.statsd.port")).flatMap(p => Try(p.toInt).toOption).getOrElse(8125), - - testListeners in(Test, test) ++= { - // for `test` - enabled.in(statsd).value match { - case true => Seq(StatsDTestListener(streams.value.log, prefix.in(statsd).value, host.in(statsd).value, port.in(statsd).value)) - case _ => Nil - } - }, - testListeners ++= { - // for `testOnly` - enabled.in(statsd).value match { - case true => Seq(StatsDTestListener(streams.value.log, prefix.in(statsd).value, host.in(statsd).value, port.in(statsd).value)) - case _ => Nil - } - } - ) - - case class StatsDTestListener(log: Logger, prefix: String, host: String, port: Int) extends TestsListener { - - var client: NonBlockingStatsDClient = _ - - override def doInit(): Unit = { - log.info(s"Initialised StatsDTestsListener (sending stats to $host:$port)") - client = new NonBlockingStatsDClient(prefix, host, port, new StatsDClientErrorHandler { - override def handle(exception: Exception): Unit = log.error(exception.toString) - }) - } - - override def testEvent(event: TestEvent) { - event.detail foreach { det => - det.status match { - case Status.Success => - client.incrementCounter(testCounterKey(det, det.status)) - client.recordExecutionTime(testTimerKey(det), det.duration.toInt) - - case status => - client.incrementCounter(testCounterKey(det, status)) - } - } - } - - override def endGroup(name: String, result: TestResult.Value) { - // manual switch instead of toStringing class name all the time - result match { - case TestResult.Passed => client.incrementCounter(keySuccess(name)) - case TestResult.Failed => client.incrementCounter(keyFail(name)) - case TestResult.Error => client.incrementCounter(keyError(name)) - } - } - - override def endGroup(name: String, t: Throwable) { - client.incrementCounter(keyError(name)) - } - - override def startGroup(name: String) { - // do nothing - } - - override def doComplete(finalResult: TestResult.Value): Unit = { - log.debug("Final test run result: " + finalResult) - log.info("Shutting down StatsDTestsListener client...") - if (client != null) - client.stop() - } - - private def testTimerKey(det: Event): String = s"${det.fullyQualifiedName}.${testSelectorToId(det.selector)}" - - private def testSelectorToId(sel: testing.Selector): String = sanitize(sel.asInstanceOf[TestSelector].testName()) - - private def testCounterKey(det: Event, status: Status): String = s"${sanitize(det.fullyQualifiedName)}.${status.toString.toLowerCase}" - - private def keySuccess(fullyQualifiedName: String): String = fullyQualifiedName + ".success" - - private def keyFail(fullyQualifiedName: String): String = fullyQualifiedName + ".fail" - - private def keyError(fullyQualifiedName: String): String = fullyQualifiedName + ".error" - - private def sanitize(s: String): String = s.replaceAll("""[^\w]""", "_") - - } - - } - object Filter { object Keys { val excludeTestNames = settingKey[Set[String]]("Names of tests to be excluded. Not supported by MultiJVM tests. Example usage: -Dakka.test.names.exclude=TimingSpec") diff --git a/project/TimeStampede.scala b/project/TimeStampede.scala index 85374e9461..11033180b8 100644 --- a/project/TimeStampede.scala +++ b/project/TimeStampede.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka diff --git a/project/ValidatePullRequest.scala b/project/ValidatePullRequest.scala index 78fcd607e5..8a721b7c5b 100644 --- a/project/ValidatePullRequest.scala +++ b/project/ValidatePullRequest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2015 Typesafe Inc. + * Copyright (C) 2009-2016 Typesafe Inc. */ package akka @@ -65,6 +65,8 @@ object ValidatePullRequest extends AutoPlugin { val SourcePullIdJenkinsEnvVarName = "ghprbPullId" // used to obtain branch name in form of "pullreq/17397" val sourceBranch = settingKey[String]("Branch containing the changes of this PR") + val targetBranch = settingKey[String]("Target branch of this PR, defaults to `master`") + // asking github comments if this PR should be PLS BUILD ALL val githubEnforcedBuildAll = taskKey[Option[BuildMode]]("Checks via GitHub API if comments included the PLS BUILD ALL keyword") val buildAllKeyword = taskKey[Regex]("Magic phrase to be used to trigger building of the entire project instead of analysing dependencies") @@ -97,6 +99,11 @@ object ValidatePullRequest extends AutoPlugin { } } + def localTargetBranch: Option[String] = sys.env.get("PR_TARGET_BRANCH") + def jenkinsTargetBranch: Option[String] = sys.env.get("ghprbTargetBranch") + def runningOnJenkins: Boolean = jenkinsTargetBranch.isDefined + def runningLocally: Boolean = !runningOnJenkins + override lazy val buildSettings = Seq( sourceBranch in Global in ValidatePR := { sys.env.get(SourceBranchEnvVarName) orElse @@ -104,6 +111,14 @@ object ValidatePullRequest extends AutoPlugin { "HEAD" }, + targetBranch in Global in ValidatePR := { + (localTargetBranch, jenkinsTargetBranch) match { + case (Some(local), _) => local // local override + case (None, Some(branch)) => s"origin/$branch" // usually would be "master" or "release-2.3" etc + case (None, None) => "origin/master" // defaulting to diffing with "master" + } + }, + buildAllKeyword in Global in ValidatePR := """PLS BUILD ALL""".r, githubEnforcedBuildAll in Global in ValidatePR := { @@ -134,18 +149,34 @@ object ValidatePullRequest extends AutoPlugin { val prId = (sourceBranch in ValidatePR).value + val target = (targetBranch in ValidatePR).value + // TODO could use jgit log.info(s"Diffing [$prId] to determine changed modules in PR...") - val gitOutput = "git diff HEAD^ --name-only".!!.split("\n") - - val moduleNames = - gitOutput + val diffOutput = s"git diff $target --name-only".!!.split("\n") + val diffedModuleNames = + diffOutput .map(l ⇒ l.trim.takeWhile(_ != '/')) .filter(dir => dir.startsWith("akka-") || dir == "project") .toSet - log.info("Detected changes in directories: " + moduleNames.mkString("[", ", ", "]")) - moduleNames + val dirtyModuleNames: Set[String] = + if (runningOnJenkins) Set.empty + else { + val statusOutput = s"git status --short".!!.split("\n") + val dirtyDirectories = statusOutput + .map(l ⇒ l.trim.dropWhile(_ != ' ').drop(1)) + .map(_.takeWhile(_ != '/')) + .filter(dir => dir.startsWith("akka-") || dir == "project") + .toSet + log.info("Detected uncomitted changes in directories (including in dependency analysis): " + dirtyDirectories.mkString("[", ",", "]")) + dirtyDirectories + } + + + val allModuleNames = dirtyModuleNames ++ diffedModuleNames + log.info("Detected changes in directories: " + allModuleNames.mkString("[", ", ", "]")) + allModuleNames } ) diff --git a/project/build.properties b/project/build.properties index 748703f770..817bc38df8 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=0.13.7 +sbt.version=0.13.9