Merge branch 'master' into wip-sync-artery-dev-2.4.9-patriknw

This commit is contained in:
Patrik Nordwall 2016-08-23 20:14:15 +02:00
commit 8ab02738b7
483 changed files with 9535 additions and 2177 deletions

View file

@ -25,6 +25,9 @@ public final class ContentTypes {
public static final ContentType.WithCharset TEXT_XML_UTF8 =
akka.http.scaladsl.model.ContentTypes.text$divxml$u0028UTF$minus8$u0029();
public static final ContentType.WithCharset TEXT_CSV_UTF8 =
akka.http.scaladsl.model.ContentTypes.text$divcsv$u0028UTF$minus8$u0029();
public static ContentType.Binary create(MediaType.Binary mediaType) {
return ContentType$.MODULE$.apply((akka.http.scaladsl.model.MediaType.Binary) mediaType);
}

View file

@ -4,12 +4,12 @@
package akka.http.javadsl.model;
import akka.Done;
import akka.http.impl.util.Util;
import akka.http.javadsl.model.headers.EntityTagRanges;
import akka.http.scaladsl.model.HttpEntity$;
import akka.stream.Materializer;
import akka.stream.javadsl.Source;
import akka.util.ByteString;
import scala.concurrent.Future;
import java.util.OptionalLong;
import java.util.concurrent.CompletionStage;
@ -142,6 +142,43 @@ public interface HttpEntity {
*/
CompletionStage<HttpEntity.Strict> toStrict(long timeoutMillis, Materializer materializer);
/**
* Discards the entities data bytes by running the {@code dataBytes} Source contained in this entity.
*
* Note: It is crucial that entities are either discarded, or consumed by running the underlying [[Source]]
* as otherwise the lack of consuming of the data will trigger back-pressure to the underlying TCP connection
* (as designed), however possibly leading to an idle-timeout that will close the connection, instead of
* just having ignored the data.
*
* Warning: It is not allowed to discard and/or consume the {@code dataBytes} more than once
* as the stream is directly attached to the "live" incoming data source from the underlying TCP connection.
* Allowing it to be consumable twice would require buffering the incoming data, thus defeating the purpose
* of its streaming nature. If the dataBytes source is materialized a second time, it will fail with an
* "stream can cannot be materialized more than once" exception.
*
* In future versions, more automatic ways to warn or resolve these situations may be introduced, see issue #18716.
*/
HttpMessage.DiscardedEntity discardBytes(Materializer materializer);
/**
* Represents the currently being-drained HTTP Entity which triggers completion of the contained
* Future once the entity has been drained for the given HttpMessage completely.
*/
interface DiscardedEntity {
/**
* This future completes successfully once the underlying entity stream has been
* successfully drained (and fails otherwise).
*/
Future<Done> future();
/**
* This future completes successfully once the underlying entity stream has been
* successfully drained (and fails otherwise).
*/
CompletionStage<Done> completionStage();
}
/**
* The entity type which consists of a predefined fixed ByteString of data.
*/

View file

@ -4,16 +4,13 @@
package akka.http.javadsl.model;
import akka.Done;
import akka.stream.Materializer;
import akka.http.javadsl.model.headers.HttpCredentials;
import akka.util.ByteString;
import scala.concurrent.Future;
import java.io.File;
import java.nio.file.Path;
import java.util.Optional;
import java.util.concurrent.CompletionStage;
/**
* The base type for an Http message (request or response).
@ -70,7 +67,7 @@ public interface HttpMessage {
* (as designed), however possibly leading to an idle-timeout that will close the connection, instead of
* just having ignored the data.
*
* Warning: It is not allowed to discard and/or consume the the {@code entity.dataBytes} more than once
* Warning: It is not allowed to discard and/or consume the {@code entity.dataBytes} more than once
* as the stream is directly attached to the "live" incoming data source from the underlying TCP connection.
* Allowing it to be consumable twice would require buffering the incoming data, thus defeating the purpose
* of its streaming nature. If the dataBytes source is materialized a second time, it will fail with an
@ -81,21 +78,10 @@ public interface HttpMessage {
DiscardedEntity discardEntityBytes(Materializer materializer);
/**
* Represents the the currently being-drained HTTP Entity which triggers completion of the contained
* Represents the currently being-drained HTTP Entity which triggers completion of the contained
* Future once the entity has been drained for the given HttpMessage completely.
*/
interface DiscardedEntity {
/**
* This future completes successfully once the underlying entity stream has been
* successfully drained (and fails otherwise).
*/
Future<Done> future();
/**
* This future completes successfully once the underlying entity stream has been
* successfully drained (and fails otherwise).
*/
CompletionStage<Done> completionStage();
interface DiscardedEntity extends HttpEntity.DiscardedEntity {
}
interface MessageTransformations<Self> {

View file

@ -42,6 +42,14 @@ public abstract class HttpMethod {
/**
* Returns the entity acceptance level for this method.
* @deprecated Use {@link #getRequestEntityAcceptance} instead, which returns {@link akka.http.javadsl.model.RequestEntityAcceptance}.
*/
@Deprecated
public abstract akka.http.scaladsl.model.RequestEntityAcceptance requestEntityAcceptance();
/**
* Java API: Returns the entity acceptance level for this method.
*/
// TODO: Rename it to requestEntityAcceptance() in Akka 3.0
public abstract akka.http.javadsl.model.RequestEntityAcceptance getRequestEntityAcceptance();
}

View file

@ -27,11 +27,24 @@ public final class HttpMethods {
/**
* Create a custom method type.
* @deprecated Use {@link #createCustom} instead.
*/
@Deprecated
public static HttpMethod custom(String value, boolean safe, boolean idempotent, akka.http.scaladsl.model.RequestEntityAcceptance requestEntityAcceptance) {
return akka.http.scaladsl.model.HttpMethod.custom(value, safe, idempotent, requestEntityAcceptance);
}
/**
* Create a custom method type.
*/
// TODO: Rename it to custom() in Akka 3.0
public static HttpMethod createCustom(String value, boolean safe, boolean idempotent, akka.http.javadsl.model.RequestEntityAcceptance requestEntityAcceptance) {
//This cast is safe as implementation of RequestEntityAcceptance only exists in Scala
akka.http.scaladsl.model.RequestEntityAcceptance scalaRequestEntityAcceptance
= (akka.http.scaladsl.model.RequestEntityAcceptance) requestEntityAcceptance;
return akka.http.scaladsl.model.HttpMethod.custom(value, safe, idempotent, scalaRequestEntityAcceptance);
}
/**
* Looks up a predefined HTTP method with the given name.
*/

View file

@ -0,0 +1,14 @@
/**
* Copyright (C) 2009-2016 Lightbend Inc. <http://www.lightbend.com>
*/
package akka.http.javadsl.model;
/**
* @see RequestEntityAcceptances for convenience access to often used values.
* Do not extend this to a concrete Java class,
* as implementation of RequestEntityAcceptation should only exist in Scala
*/
public abstract class RequestEntityAcceptance {
public abstract boolean isEntityAccepted();
}

View file

@ -0,0 +1,14 @@
/**
* Copyright (C) 2009-2016 Lightbend Inc. <http://www.lightbend.com>
*/
package akka.http.javadsl.model;
public final class RequestEntityAcceptances {
private RequestEntityAcceptances() {}
public static final RequestEntityAcceptance Expected = akka.http.scaladsl.model.RequestEntityAcceptance.Expected$.MODULE$;
public static final RequestEntityAcceptance Tolerated = akka.http.scaladsl.model.RequestEntityAcceptance.Tolerated$.MODULE$;
public static final RequestEntityAcceptance Disallowed = akka.http.scaladsl.model.RequestEntityAcceptance.Disallowed$.MODULE$;
}

View file

@ -0,0 +1,13 @@
/**
* Copyright (C) 2016 Lightbend Inc. <http://www.lightbend.com>
*/
package akka.http.javadsl.model.headers;
/**
* Model for the `Content-Length` header.
* Specification: https://tools.ietf.org/html/draft-ietf-httpbis-p1-messaging-26#section-3.3.2
*/
public abstract class ContentLength extends akka.http.scaladsl.model.HttpHeader {
public abstract long length();
}

View file

@ -5,6 +5,8 @@
package akka.http.javadsl.model.headers;
import akka.http.impl.util.Util;
import akka.japi.Option;
import java.util.Map;
@ -14,11 +16,28 @@ public abstract class HttpChallenge {
public abstract Map<String, String> getParams();
/**
* @deprecated Use constructor with optional realm parameter instead.
*/
@Deprecated
public static HttpChallenge create(String scheme, String realm) {
return new akka.http.scaladsl.model.headers.HttpChallenge(scheme, realm, Util.emptyMap);
return akka.http.scaladsl.model.headers.HttpChallenge.apply(scheme, scala.Option.apply(realm), Util.emptyMap);
}
/**
* @deprecated Use constructor with optional realm parameter instead.
*/
@Deprecated
public static HttpChallenge create(String scheme, String realm, Map<String, String> params) {
return new akka.http.scaladsl.model.headers.HttpChallenge(scheme, realm, Util.convertMapToScala(params));
return akka.http.scaladsl.model.headers.HttpChallenge.apply(scheme, scala.Option.apply(realm), Util.convertMapToScala(params));
}
public static HttpChallenge create(String scheme, Option<String> realm) {
return akka.http.scaladsl.model.headers.HttpChallenge.apply(scheme, realm.asScala(), Util.emptyMap);
}
public static HttpChallenge create(String scheme, Option<String> realm, Map<String, String> params) {
return akka.http.scaladsl.model.headers.HttpChallenge.apply(scheme, realm.asScala(), Util.convertMapToScala(params));
}
public static HttpChallenge createBasic(String realm) {

View file

@ -11,18 +11,18 @@ import akka.http.impl.util.Util;
* @see HttpOriginRanges for convenience access to often used values.
*/
public abstract class HttpOriginRange {
public abstract boolean matches(HttpOrigin origin);
public abstract boolean matches(HttpOrigin origin);
public static HttpOriginRange create(HttpOrigin... origins) {
return HttpOriginRange$.MODULE$.apply(Util.<HttpOrigin, akka.http.scaladsl.model.headers.HttpOrigin>convertArray(origins));
}
public static HttpOriginRange create(HttpOrigin... origins) {
return HttpOriginRange$.MODULE$.apply(Util.<HttpOrigin, akka.http.scaladsl.model.headers.HttpOrigin>convertArray(origins));
}
/**
* @deprecated because of troublesome initialisation order (with regards to scaladsl class implementing this class).
* In some edge cases this field could end up containing a null value.
* Will be removed in Akka 3.x, use {@link HttpEncodingRanges#ALL} instead.
*/
@Deprecated
// FIXME: Remove in Akka 3.0
public static final HttpOriginRange ALL = HttpOriginRanges.ALL;
/**
* @deprecated because of troublesome initialisation order (with regards to scaladsl class implementing this class).
* In some edge cases this field could end up containing a null value.
* Will be removed in Akka 3.x, use {@link HttpEncodingRanges#ALL} instead.
*/
@Deprecated
// FIXME: Remove in Akka 3.0
public static final HttpOriginRange ALL = HttpOriginRanges.ALL;
}

View file

@ -286,7 +286,7 @@ akka.http {
#
# IMPORTANT:
# Please note that this sections settings can be overriden by the corresponding settings in:
# `akka.http.server.parsing`, `akka.http.client.parsing` or `akka.http.http-connection-pool.client.parsing`.
# `akka.http.server.parsing`, `akka.http.client.parsing` or `akka.http.host-connection-pool.client.parsing`.
parsing {
# The limits for the various parts of the HTTP message parser.
max-uri-length = 2k
@ -352,6 +352,15 @@ akka.http {
# `full` : the full error details (potentially spanning several lines) are logged
error-logging-verbosity = full
# Configures the processing mode when encountering illegal characters in
# header value of response.
#
# Supported mode:
# `error` : default mode, throw an ParsingException and terminate the processing
# `warn` : ignore the illegal characters in response header value and log a warning message
# `ignore` : just ignore the illegal characters in response header value
illegal-response-header-value-processing-mode = error
# limits for the number of different values per header type that the
# header cache will hold
header-cache {

View file

@ -83,7 +83,7 @@ private[http] object OutgoingConnectionBlueprint {
val responseParsingMerge = b.add {
// the initial header parser we initially use for every connection,
// will not be mutated, all "shared copy" parsers copy on first-write into the header cache
val rootParser = new HttpResponseParser(parserSettings, HttpHeaderParser(parserSettings) { info
val rootParser = new HttpResponseParser(parserSettings, HttpHeaderParser(parserSettings, log) { info
if (parserSettings.illegalHeaderWarnings)
logParsingError(info withSummaryPrepended "Illegal response header", log, parserSettings.errorLoggingVerbosity)
})

View file

@ -58,7 +58,7 @@ private[http] final class BodyPartParser(
private[this] val boyerMoore = new BoyerMoore(needle)
// TODO: prevent re-priming header parser from scratch
private[this] val headerParser = HttpHeaderParser(settings) { errorInfo
private[this] val headerParser = HttpHeaderParser(settings, log) { errorInfo
if (illegalHeaderWarnings) log.warning(errorInfo.withSummaryPrepended("Illegal multipart header").formatPretty)
}

View file

@ -7,6 +7,10 @@ package akka.http.impl.engine.parsing
import java.nio.{ CharBuffer, ByteBuffer }
import java.util.Arrays.copyOf
import java.lang.{ StringBuilder JStringBuilder }
import akka.event.LoggingAdapter
import akka.http.scaladsl.settings.ParserSettings.IllegalResponseHeaderValueProcessingMode
import akka.http.scaladsl.settings.ParserSettings
import scala.annotation.tailrec
import akka.parboiled2.CharUtils
import akka.util.ByteString
@ -60,6 +64,7 @@ import akka.http.impl.model.parser.CharacterClasses._
*/
private[engine] final class HttpHeaderParser private (
val settings: HttpHeaderParser.Settings,
val log: LoggingAdapter,
onIllegalHeader: ErrorInfo Unit,
private[this] var nodes: Array[Char] = new Array(512), // initial size, can grow as needed
private[this] var nodeCount: Int = 0,
@ -85,7 +90,7 @@ private[engine] final class HttpHeaderParser private (
* Returns a copy of this parser that shares the trie data with this instance.
*/
def createShallowCopy(): HttpHeaderParser =
new HttpHeaderParser(settings, onIllegalHeader, nodes, nodeCount, branchData, branchDataCount, values, valueCount)
new HttpHeaderParser(settings, log, onIllegalHeader, nodes, nodeCount, branchData, branchDataCount, values, valueCount)
/**
* Parses a header line and returns the line start index of the subsequent line.
@ -145,12 +150,14 @@ private[engine] final class HttpHeaderParser private (
val colonIx = scanHeaderNameAndReturnIndexOfColon(input, lineStart, lineStart + 1 + maxHeaderNameLength)(cursor)
val headerName = asciiString(input, lineStart, colonIx)
try {
val valueParser = new RawHeaderValueParser(headerName, maxHeaderValueLength, headerValueCacheLimit(headerName))
val valueParser = new RawHeaderValueParser(headerName, maxHeaderValueLength,
headerValueCacheLimit(headerName), log, illegalResponseHeaderValueProcessingMode)
insert(input, valueParser)(cursor, colonIx + 1, nodeIx, colonIx)
parseHeaderLine(input, lineStart)(cursor, nodeIx)
} catch {
case OutOfTrieSpaceException // if we cannot insert we drop back to simply creating new header instances
val (headerValue, endIx) = scanHeaderValue(this, input, colonIx + 1, colonIx + maxHeaderValueLength + 3)()
val (headerValue, endIx) = scanHeaderValue(this, input, colonIx + 1, colonIx + maxHeaderValueLength + 3,
log, settings.illegalResponseHeaderValueProcessingMode)()
resultHeader = RawHeader(headerName, headerValue.trim)
endIx
}
@ -413,6 +420,7 @@ private[http] object HttpHeaderParser {
def maxHeaderValueLength: Int
def headerValueCacheLimit(headerName: String): Int
def customMediaTypes: MediaTypes.FindCustom
def illegalResponseHeaderValueProcessingMode: IllegalResponseHeaderValueProcessingMode
}
private def predefinedHeaders = Seq(
@ -426,16 +434,16 @@ private[http] object HttpHeaderParser {
"Cache-Control: no-cache",
"Expect: 100-continue")
def apply(settings: HttpHeaderParser.Settings)(onIllegalHeader: ErrorInfo Unit = info throw IllegalHeaderException(info)) =
prime(unprimed(settings, onIllegalHeader))
def apply(settings: HttpHeaderParser.Settings, log: LoggingAdapter)(onIllegalHeader: ErrorInfo Unit = info throw IllegalHeaderException(info)) =
prime(unprimed(settings, log, onIllegalHeader))
def unprimed(settings: HttpHeaderParser.Settings, warnOnIllegalHeader: ErrorInfo Unit) =
new HttpHeaderParser(settings, warnOnIllegalHeader)
def unprimed(settings: HttpHeaderParser.Settings, log: LoggingAdapter, warnOnIllegalHeader: ErrorInfo Unit) =
new HttpHeaderParser(settings, log, warnOnIllegalHeader)
def prime(parser: HttpHeaderParser): HttpHeaderParser = {
val valueParsers: Seq[HeaderValueParser] =
HeaderParser.ruleNames.map { name
new ModeledHeaderValueParser(name, parser.settings.maxHeaderValueLength, parser.settings.headerValueCacheLimit(name), parser.settings)
new ModeledHeaderValueParser(name, parser.settings.maxHeaderValueLength, parser.settings.headerValueCacheLimit(name), parser.log, parser.settings)
}(collection.breakOut)
def insertInGoodOrder(items: Seq[Any])(startIx: Int = 0, endIx: Int = items.size): Unit =
if (endIx - startIx > 0) {
@ -470,11 +478,11 @@ private[http] object HttpHeaderParser {
def cachingEnabled = maxValueCount > 0
}
private[parsing] class ModeledHeaderValueParser(headerName: String, maxHeaderValueLength: Int, maxValueCount: Int, settings: HeaderParser.Settings)
private[parsing] class ModeledHeaderValueParser(headerName: String, maxHeaderValueLength: Int, maxValueCount: Int, log: LoggingAdapter, settings: HeaderParser.Settings)
extends HeaderValueParser(headerName, maxValueCount) {
def apply(hhp: HttpHeaderParser, input: ByteString, valueStart: Int, onIllegalHeader: ErrorInfo Unit): (HttpHeader, Int) = {
// TODO: optimize by running the header value parser directly on the input ByteString (rather than an extracted String)
val (headerValue, endIx) = scanHeaderValue(hhp, input, valueStart, valueStart + maxHeaderValueLength + 2)()
// TODO: optimize by running the header value parser directly on the input ByteString (rather than an extracted String); seems done?
val (headerValue, endIx) = scanHeaderValue(hhp, input, valueStart, valueStart + maxHeaderValueLength + 2, log, settings.illegalResponseHeaderValueProcessingMode)()
val trimmedHeaderValue = headerValue.trim
val header = HeaderParser.parseFull(headerName, trimmedHeaderValue, settings) match {
case Right(h) h
@ -486,10 +494,10 @@ private[http] object HttpHeaderParser {
}
}
private[parsing] class RawHeaderValueParser(headerName: String, maxHeaderValueLength: Int, maxValueCount: Int)
extends HeaderValueParser(headerName, maxValueCount) {
private[parsing] class RawHeaderValueParser(headerName: String, maxHeaderValueLength: Int, maxValueCount: Int,
log: LoggingAdapter, mode: IllegalResponseHeaderValueProcessingMode) extends HeaderValueParser(headerName, maxValueCount) {
def apply(hhp: HttpHeaderParser, input: ByteString, valueStart: Int, onIllegalHeader: ErrorInfo Unit): (HttpHeader, Int) = {
val (headerValue, endIx) = scanHeaderValue(hhp, input, valueStart, valueStart + maxHeaderValueLength + 2)()
val (headerValue, endIx) = scanHeaderValue(hhp, input, valueStart, valueStart + maxHeaderValueLength + 2, log, mode)()
RawHeader(headerName, headerValue.trim) endIx
}
}
@ -503,15 +511,16 @@ private[http] object HttpHeaderParser {
}
else fail(s"HTTP header name exceeds the configured limit of ${limit - start - 1} characters")
@tailrec private def scanHeaderValue(hhp: HttpHeaderParser, input: ByteString, start: Int,
limit: Int)(sb: JStringBuilder = null, ix: Int = start): (String, Int) = {
@tailrec private def scanHeaderValue(hhp: HttpHeaderParser, input: ByteString, start: Int, limit: Int, log: LoggingAdapter,
mode: IllegalResponseHeaderValueProcessingMode)(sb: JStringBuilder = null, ix: Int = start): (String, Int) = {
def appended(c: Char) = (if (sb != null) sb else new JStringBuilder(asciiString(input, start, ix))).append(c)
def appended2(c: Int) = if ((c >> 16) != 0) appended(c.toChar).append((c >> 16).toChar) else appended(c.toChar)
if (ix < limit)
byteChar(input, ix) match {
case '\t' scanHeaderValue(hhp, input, start, limit)(appended(' '), ix + 1)
case '\t' scanHeaderValue(hhp, input, start, limit, log, mode)(appended(' '), ix + 1)
case '\r' if byteChar(input, ix + 1) == '\n'
if (WSP(byteChar(input, ix + 2))) scanHeaderValue(hhp, input, start, limit)(appended(' '), ix + 3)
if (WSP(byteChar(input, ix + 2))) scanHeaderValue(hhp, input, start, limit, log, mode)(appended(' '), ix + 3)
else (if (sb != null) sb.toString else asciiString(input, start, ix), ix + 2)
case c
var nix = ix + 1
@ -544,8 +553,21 @@ private[http] object HttpHeaderParser {
case -1 if (sb != null) sb.append(c).append(byteChar(input, ix + 1)).append(byteChar(input, ix + 2)).append(byteChar(input, ix + 3)) else null
case cc appended2(cc)
}
} else fail(s"Illegal character '${escape(c)}' in header value")
scanHeaderValue(hhp, input, start, limit)(nsb, nix)
} else {
mode match {
case ParserSettings.IllegalResponseHeaderValueProcessingMode.Error
fail(s"Illegal character '${escape(c)}' in header value")
case ParserSettings.IllegalResponseHeaderValueProcessingMode.Warn
// ignore the illegal character and log a warning message
log.warning(s"Illegal character '${escape(c)}' in header value")
sb
case ParserSettings.IllegalResponseHeaderValueProcessingMode.Ignore
// just ignore the illegal character
sb
}
}
scanHeaderValue(hhp, input, start, limit, log, mode)(nsb, nix)
}
else fail(s"HTTP header value exceeds the configured limit of ${limit - start - 2} characters")
}
@ -569,4 +591,4 @@ private[http] object HttpHeaderParser {
def withValueCountIncreased = copy(valueCount = valueCount + 1)
def spaceLeft = valueCount < parser.maxValueCount
}
}
}

View file

@ -23,52 +23,35 @@ import akka.stream.{ Attributes, FlowShape, Inlet, Outlet }
/**
* INTERNAL API
*
* Common logic for http request and response message parsing
*/
private[http] abstract class HttpMessageParser[Output >: MessageOutput <: ParserOutput](
val settings: ParserSettings,
val headerParser: HttpHeaderParser) { self
import HttpMessageParser._
import settings._
private[http] trait HttpMessageParser[Output >: MessageOutput <: ParserOutput] {
private[this] val result = new ListBuffer[Output]
import HttpMessageParser._
protected final val result = new ListBuffer[Output]
private[this] var state: ByteString StateResult = startNewMessage(_, 0)
private[this] var protocol: HttpProtocol = `HTTP/1.1`
private[this] var completionHandling: CompletionHandling = CompletionOk
private[this] var terminated = false
protected var completionHandling: CompletionHandling = CompletionOk
protected var terminated = false
private[this] var lastSession: SSLSession = null // used to prevent having to recreate header on each message
private[this] var tlsSessionInfoHeader: `Tls-Session-Info` = null
def initialHeaderBuffer: ListBuffer[HttpHeader] =
protected def settings: ParserSettings
protected def headerParser: HttpHeaderParser
/** invoked if the specified protocol is unknown */
protected def onBadProtocol(): Nothing
protected def parseMessage(input: ByteString, offset: Int): HttpMessageParser.StateResult
protected def parseEntity(headers: List[HttpHeader], protocol: HttpProtocol, input: ByteString, bodyStart: Int,
clh: Option[`Content-Length`], cth: Option[`Content-Type`], teh: Option[`Transfer-Encoding`],
expect100continue: Boolean, hostHeaderPresent: Boolean, closeAfterResponseCompletion: Boolean): HttpMessageParser.StateResult
protected final def initialHeaderBuffer: ListBuffer[HttpHeader] =
if (settings.includeTlsSessionInfoHeader && tlsSessionInfoHeader != null) ListBuffer(tlsSessionInfoHeader)
else ListBuffer()
// Note that this GraphStage mutates the HttpMessageParser instance, use with caution.
val stage = new GraphStage[FlowShape[SessionBytes, Output]] {
val in: Inlet[SessionBytes] = Inlet("HttpMessageParser.in")
val out: Outlet[Output] = Outlet("HttpMessageParser.out")
override val shape: FlowShape[SessionBytes, Output] = FlowShape(in, out)
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic =
new GraphStageLogic(shape) with InHandler with OutHandler {
override def onPush(): Unit = handleParserOutput(self.parseSessionBytes(grab(in)))
override def onPull(): Unit = handleParserOutput(self.onPull())
override def onUpstreamFinish(): Unit =
if (self.onUpstreamFinish()) completeStage()
else if (isAvailable(out)) handleParserOutput(self.onPull())
private def handleParserOutput(output: Output): Unit = {
output match {
case StreamEnd completeStage()
case NeedMoreData pull(in)
case x push(out, x)
}
}
setHandlers(in, out, this)
}
}
final def parseSessionBytes(input: SessionBytes): Output = {
if (input.session ne lastSession) {
lastSession = input.session
@ -93,17 +76,17 @@ private[http] abstract class HttpMessageParser[Output >: MessageOutput <: Parser
if (result.nonEmpty) throw new IllegalStateException("Unexpected `onPush`")
run(state)
onPull()
doPull()
}
final def onPull(): Output =
protected final def doPull(): Output =
if (result.nonEmpty) {
val head = result.head
result.remove(0) // faster than `ListBuffer::drop`
head
} else if (terminated) StreamEnd else NeedMoreData
final def onUpstreamFinish(): Boolean = {
protected final def shouldComplete(): Boolean = {
completionHandling() match {
case Some(x) emit(x)
case None // nothing to do
@ -118,28 +101,24 @@ private[http] abstract class HttpMessageParser[Output >: MessageOutput <: Parser
catch { case NotEnoughDataException continue(input, offset)(startNewMessage) }
}
protected def parseMessage(input: ByteString, offset: Int): StateResult
def parseProtocol(input: ByteString, cursor: Int): Int = {
protected final def parseProtocol(input: ByteString, cursor: Int): Int = {
def c(ix: Int) = byteChar(input, cursor + ix)
if (c(0) == 'H' && c(1) == 'T' && c(2) == 'T' && c(3) == 'P' && c(4) == '/' && c(5) == '1' && c(6) == '.') {
protocol = c(7) match {
case '0' `HTTP/1.0`
case '1' `HTTP/1.1`
case _ badProtocol
case _ onBadProtocol
}
cursor + 8
} else badProtocol
} else onBadProtocol
}
def badProtocol: Nothing
@tailrec final def parseHeaderLines(input: ByteString, lineStart: Int, headers: ListBuffer[HttpHeader] = initialHeaderBuffer,
headerCount: Int = 0, ch: Option[Connection] = None,
clh: Option[`Content-Length`] = None, cth: Option[`Content-Type`] = None,
teh: Option[`Transfer-Encoding`] = None, e100c: Boolean = false,
hh: Boolean = false): StateResult =
if (headerCount < maxHeaderCount) {
@tailrec protected final def parseHeaderLines(input: ByteString, lineStart: Int, headers: ListBuffer[HttpHeader] = initialHeaderBuffer,
headerCount: Int = 0, ch: Option[Connection] = None,
clh: Option[`Content-Length`] = None, cth: Option[`Content-Type`] = None,
teh: Option[`Transfer-Encoding`] = None, e100c: Boolean = false,
hh: Boolean = false): StateResult =
if (headerCount < settings.maxHeaderCount) {
var lineEnd = 0
val resultHeader =
try {
@ -182,19 +161,15 @@ private[http] abstract class HttpMessageParser[Output >: MessageOutput <: Parser
case h parseHeaderLines(input, lineEnd, headers += h, headerCount + 1, ch, clh, cth, teh, e100c, hh)
}
} else failMessageStart(s"HTTP message contains more than the configured limit of $maxHeaderCount headers")
} else failMessageStart(s"HTTP message contains more than the configured limit of ${settings.maxHeaderCount} headers")
// work-around for compiler complaining about non-tail-recursion if we inline this method
def parseHeaderLinesAux(headers: ListBuffer[HttpHeader], headerCount: Int, ch: Option[Connection],
clh: Option[`Content-Length`], cth: Option[`Content-Type`], teh: Option[`Transfer-Encoding`],
e100c: Boolean, hh: Boolean)(input: ByteString, lineStart: Int): StateResult =
private def parseHeaderLinesAux(headers: ListBuffer[HttpHeader], headerCount: Int, ch: Option[Connection],
clh: Option[`Content-Length`], cth: Option[`Content-Type`], teh: Option[`Transfer-Encoding`],
e100c: Boolean, hh: Boolean)(input: ByteString, lineStart: Int): StateResult =
parseHeaderLines(input, lineStart, headers, headerCount, ch, clh, cth, teh, e100c, hh)
def parseEntity(headers: List[HttpHeader], protocol: HttpProtocol, input: ByteString, bodyStart: Int,
clh: Option[`Content-Length`], cth: Option[`Content-Type`], teh: Option[`Transfer-Encoding`],
expect100continue: Boolean, hostHeaderPresent: Boolean, closeAfterResponseCompletion: Boolean): StateResult
def parseFixedLengthBody(
protected final def parseFixedLengthBody(
remainingBodyBytes: Long,
isLastMessage: Boolean)(input: ByteString, bodyStart: Int): StateResult = {
val remainingInputBytes = input.length - bodyStart
@ -213,7 +188,7 @@ private[http] abstract class HttpMessageParser[Output >: MessageOutput <: Parser
} else continue(input, bodyStart)(parseFixedLengthBody(remainingBodyBytes, isLastMessage))
}
def parseChunk(input: ByteString, offset: Int, isLastMessage: Boolean, totalBytesRead: Long): StateResult = {
protected final def parseChunk(input: ByteString, offset: Int, isLastMessage: Boolean, totalBytesRead: Long): StateResult = {
@tailrec def parseTrailer(extension: String, lineStart: Int, headers: List[HttpHeader] = Nil,
headerCount: Int = 0): StateResult = {
var errorInfo: ErrorInfo = null
@ -230,9 +205,9 @@ private[http] abstract class HttpMessageParser[Output >: MessageOutput <: Parser
setCompletionHandling(CompletionOk)
if (isLastMessage) terminate()
else startNewMessage(input, lineEnd)
case header if headerCount < maxHeaderCount
case header if headerCount < settings.maxHeaderCount
parseTrailer(extension, lineEnd, header :: headers, headerCount + 1)
case _ failEntityStream(s"Chunk trailer contains more than the configured limit of $maxHeaderCount headers")
case _ failEntityStream(s"Chunk trailer contains more than the configured limit of ${settings.maxHeaderCount} headers")
}
} else failEntityStream(errorInfo)
}
@ -252,24 +227,24 @@ private[http] abstract class HttpMessageParser[Output >: MessageOutput <: Parser
} else parseTrailer(extension, cursor)
@tailrec def parseChunkExtensions(chunkSize: Int, cursor: Int)(startIx: Int = cursor): StateResult =
if (cursor - startIx <= maxChunkExtLength) {
if (cursor - startIx <= settings.maxChunkExtLength) {
def extension = asciiString(input, startIx, cursor)
byteChar(input, cursor) match {
case '\r' if byteChar(input, cursor + 1) == '\n' parseChunkBody(chunkSize, extension, cursor + 2)
case '\n' parseChunkBody(chunkSize, extension, cursor + 1)
case _ parseChunkExtensions(chunkSize, cursor + 1)(startIx)
}
} else failEntityStream(s"HTTP chunk extension length exceeds configured limit of $maxChunkExtLength characters")
} else failEntityStream(s"HTTP chunk extension length exceeds configured limit of ${settings.maxChunkExtLength} characters")
@tailrec def parseSize(cursor: Int, size: Long): StateResult =
if (size <= maxChunkSize) {
if (size <= settings.maxChunkSize) {
byteChar(input, cursor) match {
case c if CharacterClasses.HEXDIG(c) parseSize(cursor + 1, size * 16 + CharUtils.hexValue(c))
case ';' if cursor > offset parseChunkExtensions(size.toInt, cursor + 1)()
case '\r' if cursor > offset && byteChar(input, cursor + 1) == '\n' parseChunkBody(size.toInt, "", cursor + 2)
case c failEntityStream(s"Illegal character '${escape(c)}' in chunk start")
}
} else failEntityStream(s"HTTP chunk size exceeds the configured limit of $maxChunkSize bytes")
} else failEntityStream(s"HTTP chunk size exceeds the configured limit of ${settings.maxChunkSize} bytes")
try parseSize(offset, 0)
catch {
@ -277,9 +252,9 @@ private[http] abstract class HttpMessageParser[Output >: MessageOutput <: Parser
}
}
def emit(output: Output): Unit = result += output
protected def emit(output: Output): Unit = result += output
def continue(input: ByteString, offset: Int)(next: (ByteString, Int) StateResult): StateResult = {
protected final def continue(input: ByteString, offset: Int)(next: (ByteString, Int) StateResult): StateResult = {
state =
math.signum(offset - input.length) match {
case -1
@ -291,30 +266,30 @@ private[http] abstract class HttpMessageParser[Output >: MessageOutput <: Parser
done()
}
def continue(next: (ByteString, Int) StateResult): StateResult = {
protected final def continue(next: (ByteString, Int) StateResult): StateResult = {
state = next(_, 0)
done()
}
def failMessageStart(summary: String): StateResult = failMessageStart(summary, "")
def failMessageStart(summary: String, detail: String): StateResult = failMessageStart(StatusCodes.BadRequest, summary, detail)
def failMessageStart(status: StatusCode): StateResult = failMessageStart(status, status.defaultMessage)
def failMessageStart(status: StatusCode, summary: String, detail: String = ""): StateResult = failMessageStart(status, ErrorInfo(summary, detail))
def failMessageStart(status: StatusCode, info: ErrorInfo): StateResult = {
protected final def failMessageStart(summary: String): StateResult = failMessageStart(summary, "")
protected final def failMessageStart(summary: String, detail: String): StateResult = failMessageStart(StatusCodes.BadRequest, summary, detail)
protected final def failMessageStart(status: StatusCode): StateResult = failMessageStart(status, status.defaultMessage)
protected final def failMessageStart(status: StatusCode, summary: String, detail: String = ""): StateResult = failMessageStart(status, ErrorInfo(summary, detail))
protected final def failMessageStart(status: StatusCode, info: ErrorInfo): StateResult = {
emit(MessageStartError(status, info))
setCompletionHandling(CompletionOk)
terminate()
}
def failEntityStream(summary: String): StateResult = failEntityStream(summary, "")
def failEntityStream(summary: String, detail: String): StateResult = failEntityStream(ErrorInfo(summary, detail))
def failEntityStream(info: ErrorInfo): StateResult = {
protected final def failEntityStream(summary: String): StateResult = failEntityStream(summary, "")
protected final def failEntityStream(summary: String, detail: String): StateResult = failEntityStream(ErrorInfo(summary, detail))
protected final def failEntityStream(info: ErrorInfo): StateResult = {
emit(EntityStreamError(info))
setCompletionHandling(CompletionOk)
terminate()
}
def terminate(): StateResult = {
protected final def terminate(): StateResult = {
terminated = true
done()
}
@ -325,19 +300,19 @@ private[http] abstract class HttpMessageParser[Output >: MessageOutput <: Parser
*/
private def done(): StateResult = null // StateResult is a phantom type
def contentType(cth: Option[`Content-Type`]) = cth match {
protected final def contentType(cth: Option[`Content-Type`]) = cth match {
case Some(x) x.contentType
case None ContentTypes.`application/octet-stream`
}
def emptyEntity(cth: Option[`Content-Type`]) =
protected final def emptyEntity(cth: Option[`Content-Type`]) =
StrictEntityCreator(if (cth.isDefined) HttpEntity.empty(cth.get.contentType) else HttpEntity.Empty)
def strictEntity(cth: Option[`Content-Type`], input: ByteString, bodyStart: Int,
contentLength: Int) =
protected final def strictEntity(cth: Option[`Content-Type`], input: ByteString, bodyStart: Int,
contentLength: Int) =
StrictEntityCreator(HttpEntity.Strict(contentType(cth), input.slice(bodyStart, bodyStart + contentLength)))
def defaultEntity[A <: ParserOutput](cth: Option[`Content-Type`], contentLength: Long) =
protected final def defaultEntity[A <: ParserOutput](cth: Option[`Content-Type`], contentLength: Long) =
StreamedEntityCreator[A, UniversalEntity] { entityParts
val data = entityParts.collect {
case EntityPart(bytes) bytes
@ -346,7 +321,7 @@ private[http] abstract class HttpMessageParser[Output >: MessageOutput <: Parser
HttpEntity.Default(contentType(cth), contentLength, HttpEntity.limitableByteSource(data))
}
def chunkedEntity[A <: ParserOutput](cth: Option[`Content-Type`]) =
protected final def chunkedEntity[A <: ParserOutput](cth: Option[`Content-Type`]) =
StreamedEntityCreator[A, RequestEntity] { entityChunks
val chunks = entityChunks.collect {
case EntityChunk(chunk) chunk
@ -355,16 +330,20 @@ private[http] abstract class HttpMessageParser[Output >: MessageOutput <: Parser
HttpEntity.Chunked(contentType(cth), HttpEntity.limitableChunkSource(chunks))
}
def addTransferEncodingWithChunkedPeeled(headers: List[HttpHeader], teh: `Transfer-Encoding`): List[HttpHeader] =
protected final def addTransferEncodingWithChunkedPeeled(headers: List[HttpHeader], teh: `Transfer-Encoding`): List[HttpHeader] =
teh.withChunkedPeeled match {
case Some(x) x :: headers
case None headers
}
def setCompletionHandling(completionHandling: CompletionHandling): Unit =
protected final def setCompletionHandling(completionHandling: CompletionHandling): Unit =
this.completionHandling = completionHandling
}
/**
* INTERNAL API
*/
private[http] object HttpMessageParser {
sealed trait StateResult // phantom type for ensuring soundness of our parsing method setup
final case class Trampoline(f: ByteString StateResult) extends StateResult

View file

@ -5,169 +5,207 @@
package akka.http.impl.engine.parsing
import java.lang.{ StringBuilder JStringBuilder }
import scala.annotation.tailrec
import scala.annotation.{ switch, tailrec }
import akka.http.scaladsl.settings.ParserSettings
import akka.util.ByteString
import akka.util.{ ByteString, OptionVal }
import akka.http.impl.engine.ws.Handshake
import akka.http.impl.model.parser.CharacterClasses
import akka.http.scaladsl.model._
import headers._
import StatusCodes._
import ParserOutput._
import akka.http.impl.util.ByteStringParserInput
import akka.stream.{ Attributes, FlowShape, Inlet, Outlet }
import akka.stream.TLSProtocol.SessionBytes
import akka.stream.stage.{ GraphStage, GraphStageLogic, InHandler, OutHandler }
/**
* INTERNAL API
*/
private[http] class HttpRequestParser(
_settings: ParserSettings,
private[http] final class HttpRequestParser(
settings: ParserSettings,
rawRequestUriHeader: Boolean,
_headerParser: HttpHeaderParser)
extends HttpMessageParser[RequestOutput](_settings, _headerParser) {
headerParser: HttpHeaderParser)
extends GraphStage[FlowShape[SessionBytes, RequestOutput]] { self
import HttpMessageParser._
import settings._
private[this] var method: HttpMethod = _
private[this] var uri: Uri = _
private[this] var uriBytes: Array[Byte] = _
val in = Inlet[SessionBytes]("HttpRequestParser.in")
val out = Outlet[RequestOutput]("HttpRequestParser.out")
def createShallowCopy(): HttpRequestParser =
new HttpRequestParser(settings, rawRequestUriHeader, headerParser.createShallowCopy())
val shape = FlowShape.of(in, out)
def parseMessage(input: ByteString, offset: Int): StateResult = {
var cursor = parseMethod(input, offset)
cursor = parseRequestTarget(input, cursor)
cursor = parseProtocol(input, cursor)
if (byteChar(input, cursor) == '\r' && byteChar(input, cursor + 1) == '\n')
parseHeaderLines(input, cursor + 2)
else badProtocol
}
override protected def initialAttributes: Attributes = Attributes.name("HttpRequestParser")
def parseMethod(input: ByteString, cursor: Int): Int = {
@tailrec def parseCustomMethod(ix: Int = 0, sb: JStringBuilder = new JStringBuilder(16)): Int =
if (ix < maxMethodLength) {
byteChar(input, cursor + ix) match {
case ' '
customMethods(sb.toString) match {
case Some(m)
method = m
cursor + ix + 1
case None throw new ParsingException(NotImplemented, ErrorInfo("Unsupported HTTP method", sb.toString))
}
case c parseCustomMethod(ix + 1, sb.append(c))
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) with HttpMessageParser[RequestOutput] with InHandler with OutHandler {
import HttpMessageParser._
override val settings = self.settings
override val headerParser = self.headerParser.createShallowCopy()
private[this] var method: HttpMethod = _
private[this] var uri: Uri = _
private[this] var uriBytes: ByteString = _
override def onPush(): Unit = handleParserOutput(parseSessionBytes(grab(in)))
override def onPull(): Unit = handleParserOutput(doPull())
override def onUpstreamFinish(): Unit =
if (super.shouldComplete()) completeStage()
else if (isAvailable(out)) handleParserOutput(doPull())
setHandlers(in, out, this)
private def handleParserOutput(output: RequestOutput): Unit = {
output match {
case StreamEnd completeStage()
case NeedMoreData pull(in)
case x push(out, x)
}
}
override def parseMessage(input: ByteString, offset: Int): StateResult = {
var cursor = parseMethod(input, offset)
cursor = parseRequestTarget(input, cursor)
cursor = parseProtocol(input, cursor)
if (byteChar(input, cursor) == '\r' && byteChar(input, cursor + 1) == '\n')
parseHeaderLines(input, cursor + 2)
else onBadProtocol
}
def parseMethod(input: ByteString, cursor: Int): Int = {
@tailrec def parseCustomMethod(ix: Int = 0, sb: JStringBuilder = new JStringBuilder(16)): Int =
if (ix < maxMethodLength) {
byteChar(input, cursor + ix) match {
case ' '
customMethods(sb.toString) match {
case Some(m)
method = m
cursor + ix + 1
case None throw new ParsingException(NotImplemented, ErrorInfo("Unsupported HTTP method", sb.toString))
}
case c parseCustomMethod(ix + 1, sb.append(c))
}
} else throw new ParsingException(
BadRequest,
ErrorInfo("Unsupported HTTP method", s"HTTP method too long (started with '${sb.toString}'). " +
"Increase `akka.http.server.parsing.max-method-length` to support HTTP methods with more characters."))
@tailrec def parseMethod(meth: HttpMethod, ix: Int = 1): Int =
if (ix == meth.value.length)
if (byteChar(input, cursor + ix) == ' ') {
method = meth
cursor + ix + 1
} else parseCustomMethod()
else if (byteChar(input, cursor + ix) == meth.value.charAt(ix)) parseMethod(meth, ix + 1)
else parseCustomMethod()
import HttpMethods._
(byteChar(input, cursor): @switch) match {
case 'G' parseMethod(GET)
case 'P' byteChar(input, cursor + 1) match {
case 'O' parseMethod(POST, 2)
case 'U' parseMethod(PUT, 2)
case 'A' parseMethod(PATCH, 2)
case _ parseCustomMethod()
}
} else throw new ParsingException(
BadRequest,
ErrorInfo("Unsupported HTTP method", s"HTTP method too long (started with '${sb.toString}'). " +
"Increase `akka.http.server.parsing.max-method-length` to support HTTP methods with more characters."))
@tailrec def parseMethod(meth: HttpMethod, ix: Int = 1): Int =
if (ix == meth.value.length)
if (byteChar(input, cursor + ix) == ' ') {
method = meth
cursor + ix + 1
} else parseCustomMethod()
else if (byteChar(input, cursor + ix) == meth.value.charAt(ix)) parseMethod(meth, ix + 1)
else parseCustomMethod()
import HttpMethods._
byteChar(input, cursor) match {
case 'G' parseMethod(GET)
case 'P' byteChar(input, cursor + 1) match {
case 'O' parseMethod(POST, 2)
case 'U' parseMethod(PUT, 2)
case 'A' parseMethod(PATCH, 2)
case 'D' parseMethod(DELETE)
case 'H' parseMethod(HEAD)
case 'O' parseMethod(OPTIONS)
case 'T' parseMethod(TRACE)
case 'C' parseMethod(CONNECT)
case _ parseCustomMethod()
}
case 'D' parseMethod(DELETE)
case 'H' parseMethod(HEAD)
case 'O' parseMethod(OPTIONS)
case 'T' parseMethod(TRACE)
case 'C' parseMethod(CONNECT)
case _ parseCustomMethod()
}
}
def parseRequestTarget(input: ByteString, cursor: Int): Int = {
val uriStart = cursor
val uriEndLimit = cursor + maxUriLength
def parseRequestTarget(input: ByteString, cursor: Int): Int = {
val uriStart = cursor
val uriEndLimit = cursor + maxUriLength
@tailrec def findUriEnd(ix: Int = cursor): Int =
if (ix == input.length) throw NotEnoughDataException
else if (CharacterClasses.WSPCRLF(input(ix).toChar)) ix
else if (ix < uriEndLimit) findUriEnd(ix + 1)
else throw new ParsingException(
RequestUriTooLong,
s"URI length exceeds the configured limit of $maxUriLength characters")
@tailrec def findUriEnd(ix: Int = cursor): Int =
if (ix == input.length) throw NotEnoughDataException
else if (CharacterClasses.WSPCRLF(input(ix).toChar)) ix
else if (ix < uriEndLimit) findUriEnd(ix + 1)
else throw new ParsingException(
RequestUriTooLong,
s"URI length exceeds the configured limit of $maxUriLength characters")
val uriEnd = findUriEnd()
try {
uriBytes = input.iterator.slice(uriStart, uriEnd).toArray[Byte] // TODO: can we reduce allocations here?
uri = Uri.parseHttpRequestTarget(uriBytes, mode = uriParsingMode)
} catch {
case IllegalUriException(info) throw new ParsingException(BadRequest, info)
val uriEnd = findUriEnd()
try {
uriBytes = input.slice(uriStart, uriEnd)
uri = Uri.parseHttpRequestTarget(new ByteStringParserInput(uriBytes), mode = uriParsingMode)
} catch {
case IllegalUriException(info) throw new ParsingException(BadRequest, info)
}
uriEnd + 1
}
uriEnd + 1
}
def badProtocol = throw new ParsingException(HTTPVersionNotSupported)
override def onBadProtocol() = throw new ParsingException(HTTPVersionNotSupported)
// http://tools.ietf.org/html/rfc7230#section-3.3
def parseEntity(headers: List[HttpHeader], protocol: HttpProtocol, input: ByteString, bodyStart: Int,
clh: Option[`Content-Length`], cth: Option[`Content-Type`], teh: Option[`Transfer-Encoding`],
expect100continue: Boolean, hostHeaderPresent: Boolean, closeAfterResponseCompletion: Boolean): StateResult =
if (hostHeaderPresent || protocol == HttpProtocols.`HTTP/1.0`) {
def emitRequestStart(
createEntity: EntityCreator[RequestOutput, RequestEntity],
headers: List[HttpHeader] = headers) = {
val allHeaders0 =
if (rawRequestUriHeader) `Raw-Request-URI`(new String(uriBytes, HttpCharsets.`US-ASCII`.nioCharset)) :: headers
else headers
// http://tools.ietf.org/html/rfc7230#section-3.3
override def parseEntity(headers: List[HttpHeader], protocol: HttpProtocol, input: ByteString, bodyStart: Int,
clh: Option[`Content-Length`], cth: Option[`Content-Type`], teh: Option[`Transfer-Encoding`],
expect100continue: Boolean, hostHeaderPresent: Boolean, closeAfterResponseCompletion: Boolean): StateResult =
if (hostHeaderPresent || protocol == HttpProtocols.`HTTP/1.0`) {
def emitRequestStart(
createEntity: EntityCreator[RequestOutput, RequestEntity],
headers: List[HttpHeader] = headers) = {
val allHeaders0 =
if (rawRequestUriHeader) `Raw-Request-URI`(uriBytes.decodeString(HttpCharsets.`US-ASCII`.nioCharset)) :: headers
else headers
val allHeaders =
if (method == HttpMethods.GET) {
Handshake.Server.websocketUpgrade(headers, hostHeaderPresent) match {
case Some(upgrade) upgrade :: allHeaders0
case None allHeaders0
val allHeaders =
if (method == HttpMethods.GET) {
Handshake.Server.websocketUpgrade(headers, hostHeaderPresent) match {
case OptionVal.Some(upgrade) upgrade :: allHeaders0
case OptionVal.None allHeaders0
}
} else allHeaders0
emit(RequestStart(method, uri, protocol, allHeaders, createEntity, expect100continue, closeAfterResponseCompletion))
}
teh match {
case None
val contentLength = clh match {
case Some(`Content-Length`(len)) len
case None 0
}
if (contentLength == 0) {
emitRequestStart(emptyEntity(cth))
setCompletionHandling(HttpMessageParser.CompletionOk)
startNewMessage(input, bodyStart)
} else if (!method.isEntityAccepted) {
failMessageStart(UnprocessableEntity, s"${method.name} requests must not have an entity")
} else if (contentLength <= input.size - bodyStart) {
val cl = contentLength.toInt
emitRequestStart(strictEntity(cth, input, bodyStart, cl))
setCompletionHandling(HttpMessageParser.CompletionOk)
startNewMessage(input, bodyStart + cl)
} else {
emitRequestStart(defaultEntity(cth, contentLength))
parseFixedLengthBody(contentLength, closeAfterResponseCompletion)(input, bodyStart)
}
} else allHeaders0
emit(RequestStart(method, uri, protocol, allHeaders, createEntity, expect100continue, closeAfterResponseCompletion))
}
teh match {
case None
val contentLength = clh match {
case Some(`Content-Length`(len)) len
case None 0
}
if (contentLength == 0) {
emitRequestStart(emptyEntity(cth))
setCompletionHandling(HttpMessageParser.CompletionOk)
startNewMessage(input, bodyStart)
} else if (!method.isEntityAccepted) {
case Some(_) if !method.isEntityAccepted
failMessageStart(UnprocessableEntity, s"${method.name} requests must not have an entity")
} else if (contentLength <= input.size - bodyStart) {
val cl = contentLength.toInt
emitRequestStart(strictEntity(cth, input, bodyStart, cl))
setCompletionHandling(HttpMessageParser.CompletionOk)
startNewMessage(input, bodyStart + cl)
} else {
emitRequestStart(defaultEntity(cth, contentLength))
parseFixedLengthBody(contentLength, closeAfterResponseCompletion)(input, bodyStart)
}
case Some(_) if !method.isEntityAccepted
failMessageStart(UnprocessableEntity, s"${method.name} requests must not have an entity")
case Some(te)
val completedHeaders = addTransferEncodingWithChunkedPeeled(headers, te)
if (te.isChunked) {
if (clh.isEmpty) {
emitRequestStart(chunkedEntity(cth), completedHeaders)
parseChunk(input, bodyStart, closeAfterResponseCompletion, totalBytesRead = 0L)
} else failMessageStart("A chunked request must not contain a Content-Length header.")
} else parseEntity(completedHeaders, protocol, input, bodyStart, clh, cth, teh = None,
expect100continue, hostHeaderPresent, closeAfterResponseCompletion)
}
} else failMessageStart("Request is missing required `Host` header")
case Some(te)
val completedHeaders = addTransferEncodingWithChunkedPeeled(headers, te)
if (te.isChunked) {
if (clh.isEmpty) {
emitRequestStart(chunkedEntity(cth), completedHeaders)
parseChunk(input, bodyStart, closeAfterResponseCompletion, totalBytesRead = 0L)
} else failMessageStart("A chunked request must not contain a Content-Length header.")
} else parseEntity(completedHeaders, protocol, input, bodyStart, clh, cth, teh = None,
expect100continue, hostHeaderPresent, closeAfterResponseCompletion)
}
} else failMessageStart("Request is missing required `Host` header")
}
override def toString: String = "HttpRequestParser"
}

View file

@ -13,12 +13,15 @@ import akka.util.ByteString
import akka.http.scaladsl.model._
import headers._
import ParserOutput._
import akka.stream.{ Attributes, FlowShape, Inlet, Outlet }
import akka.stream.TLSProtocol.SessionBytes
import akka.stream.stage.{ GraphStage, GraphStageLogic, InHandler, OutHandler }
/**
* INTERNAL API
*/
private[http] class HttpResponseParser(_settings: ParserSettings, _headerParser: HttpHeaderParser)
extends HttpMessageParser[ResponseOutput](_settings, _headerParser) {
private[http] class HttpResponseParser(protected val settings: ParserSettings, protected val headerParser: HttpHeaderParser)
extends HttpMessageParser[ResponseOutput] { self
import HttpResponseParser._
import HttpMessageParser._
import settings._
@ -26,31 +29,74 @@ private[http] class HttpResponseParser(_settings: ParserSettings, _headerParser:
private[this] var contextForCurrentResponse: Option[ResponseContext] = None
private[this] var statusCode: StatusCode = StatusCodes.OK
def createShallowCopy(): HttpResponseParser = new HttpResponseParser(settings, headerParser.createShallowCopy())
// Note that this GraphStage mutates the HttpMessageParser instance, use with caution.
final val stage = new GraphStage[FlowShape[SessionBytes, ResponseOutput]] {
val in: Inlet[SessionBytes] = Inlet("HttpResponseParser.in")
val out: Outlet[ResponseOutput] = Outlet("HttpResponseParser.out")
override val shape: FlowShape[SessionBytes, ResponseOutput] = FlowShape(in, out)
def setContextForNextResponse(responseContext: ResponseContext): Unit =
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic =
new GraphStageLogic(shape) with InHandler with OutHandler {
override def onPush(): Unit = handleParserOutput(self.parseSessionBytes(grab(in)))
override def onPull(): Unit = handleParserOutput(self.onPull())
override def onUpstreamFinish(): Unit =
if (self.onUpstreamFinish()) completeStage()
else if (isAvailable(out)) handleParserOutput(self.onPull())
private def handleParserOutput(output: ResponseOutput): Unit = {
output match {
case StreamEnd completeStage()
case NeedMoreData pull(in)
case x push(out, x)
}
}
setHandlers(in, out, this)
}
}
final def createShallowCopy(): HttpResponseParser = new HttpResponseParser(settings, headerParser.createShallowCopy())
final def setContextForNextResponse(responseContext: ResponseContext): Unit =
if (contextForCurrentResponse.isEmpty) contextForCurrentResponse = Some(responseContext)
protected def parseMessage(input: ByteString, offset: Int): StateResult =
final def onPull(): ResponseOutput =
if (result.nonEmpty) {
val head = result.head
result.remove(0) // faster than `ListBuffer::drop`
head
} else if (terminated) StreamEnd else NeedMoreData
final def onUpstreamFinish(): Boolean = {
completionHandling() match {
case Some(x) emit(x)
case None // nothing to do
}
terminated = true
result.isEmpty
}
override final def emit(output: ResponseOutput): Unit = {
if (output == MessageEnd) contextForCurrentResponse = None
super.emit(output)
}
override protected def parseMessage(input: ByteString, offset: Int): StateResult =
if (contextForCurrentResponse.isDefined) {
var cursor = parseProtocol(input, offset)
if (byteChar(input, cursor) == ' ') {
cursor = parseStatus(input, cursor + 1)
parseHeaderLines(input, cursor)
} else badProtocol
} else onBadProtocol()
} else {
emit(NeedNextRequestMethod)
continue(input, offset)(startNewMessage)
}
override def emit(output: ResponseOutput): Unit = {
if (output == MessageEnd) contextForCurrentResponse = None
super.emit(output)
}
override final def onBadProtocol() = throw new ParsingException("The server-side HTTP version is not supported")
def badProtocol = throw new ParsingException("The server-side HTTP version is not supported")
def parseStatus(input: ByteString, cursor: Int): Int = {
private def parseStatus(input: ByteString, cursor: Int): Int = {
def badStatusCode = throw new ParsingException("Illegal response status code")
def parseStatusCode() = {
def intValue(offset: Int): Int = {
@ -84,9 +130,9 @@ private[http] class HttpResponseParser(_settings: ParserSettings, _headerParser:
def handleInformationalResponses: Boolean = true
// http://tools.ietf.org/html/rfc7230#section-3.3
def parseEntity(headers: List[HttpHeader], protocol: HttpProtocol, input: ByteString, bodyStart: Int,
clh: Option[`Content-Length`], cth: Option[`Content-Type`], teh: Option[`Transfer-Encoding`],
expect100continue: Boolean, hostHeaderPresent: Boolean, closeAfterResponseCompletion: Boolean): StateResult = {
protected final def parseEntity(headers: List[HttpHeader], protocol: HttpProtocol, input: ByteString, bodyStart: Int,
clh: Option[`Content-Length`], cth: Option[`Content-Type`], teh: Option[`Transfer-Encoding`],
expect100continue: Boolean, hostHeaderPresent: Boolean, closeAfterResponseCompletion: Boolean): StateResult = {
def emitResponseStart(
createEntity: EntityCreator[ResponseOutput, ResponseEntity],
@ -161,7 +207,7 @@ private[http] class HttpResponseParser(_settings: ParserSettings, _headerParser:
} else finishEmptyResponse()
}
def parseToCloseBody(input: ByteString, bodyStart: Int, totalBytesRead: Long): StateResult = {
private def parseToCloseBody(input: ByteString, bodyStart: Int, totalBytesRead: Long): StateResult = {
val newTotalBytes = totalBytesRead + math.max(0, input.length - bodyStart)
if (input.length > bodyStart)
emit(EntityPart(input.drop(bodyStart).compact))

View file

@ -7,11 +7,11 @@ package akka.http.impl.engine.rendering
import akka.NotUsed
import akka.http.impl.engine.ws.{ FrameEvent, UpgradeToWebSocketResponseHeader }
import akka.http.scaladsl.model.ws.Message
import akka.stream.{ Outlet, Inlet, Attributes, FlowShape, Graph }
import akka.stream.{ Attributes, FlowShape, Graph, Inlet, Outlet }
import scala.annotation.tailrec
import akka.event.LoggingAdapter
import akka.util.ByteString
import akka.util.{ ByteString, OptionVal }
import akka.stream.scaladsl.{ Flow, Source }
import akka.stream.stage._
import akka.http.scaladsl.model._
@ -20,6 +20,8 @@ import RenderSupport._
import HttpProtocols._
import headers._
import scala.concurrent.duration._
/**
* INTERNAL API
*/
@ -129,9 +131,17 @@ private[http] class HttpResponseRendererFactory(
@tailrec def renderHeaders(remaining: List[HttpHeader], alwaysClose: Boolean = false,
connHeader: Connection = null, serverSeen: Boolean = false,
transferEncodingSeen: Boolean = false, dateSeen: Boolean = false): Unit =
transferEncodingSeen: Boolean = false, dateSeen: Boolean = false): Unit = {
remaining match {
case head :: tail head match {
case x: Server
render(x)
renderHeaders(tail, alwaysClose, connHeader, serverSeen = true, transferEncodingSeen, dateSeen)
case x: Date
render(x)
renderHeaders(tail, alwaysClose, connHeader, serverSeen, transferEncodingSeen, dateSeen = true)
case x: `Content-Length`
suppressionWarning(log, x, "explicit `Content-Length` header is not allowed. Use the appropriate HttpEntity subtype.")
renderHeaders(tail, alwaysClose, connHeader, serverSeen, transferEncodingSeen, dateSeen)
@ -140,10 +150,6 @@ private[http] class HttpResponseRendererFactory(
suppressionWarning(log, x, "explicit `Content-Type` header is not allowed. Set `HttpResponse.entity.contentType` instead.")
renderHeaders(tail, alwaysClose, connHeader, serverSeen, transferEncodingSeen, dateSeen)
case x: Date
render(x)
renderHeaders(tail, alwaysClose, connHeader, serverSeen, transferEncodingSeen, dateSeen = true)
case x: `Transfer-Encoding`
x.withChunkedPeeled match {
case None
@ -159,10 +165,6 @@ private[http] class HttpResponseRendererFactory(
val connectionHeader = if (connHeader eq null) x else Connection(x.tokens ++ connHeader.tokens)
renderHeaders(tail, alwaysClose, connectionHeader, serverSeen, transferEncodingSeen, dateSeen)
case x: Server
render(x)
renderHeaders(tail, alwaysClose, connHeader, serverSeen = true, transferEncodingSeen, dateSeen)
case x: CustomHeader
if (x.renderInResponses) render(x)
renderHeaders(tail, alwaysClose, connHeader, serverSeen, transferEncodingSeen, dateSeen)
@ -205,13 +207,15 @@ private[http] class HttpResponseRendererFactory(
r ~~ Connection ~~ (if (close) CloseBytes else KeepAliveBytes) ~~ CrLf
else if (connHeader != null && connHeader.hasUpgrade) {
r ~~ connHeader ~~ CrLf
headers
.collectFirst { case u: UpgradeToWebSocketResponseHeader u }
.foreach { header closeMode = SwitchToWebSocket(header.handler) }
HttpHeader.fastFind(classOf[UpgradeToWebSocketResponseHeader], headers) match {
case OptionVal.Some(header) closeMode = SwitchToWebSocket(header.handler)
case _ // nothing to do here...
}
}
if (mustRenderTransferEncodingChunkedHeader && !transferEncodingSeen)
r ~~ `Transfer-Encoding` ~~ ChunkedBytes ~~ CrLf
}
}
def renderContentLengthHeader(contentLength: Long) =
if (status.allowsEntity) r ~~ `Content-Length` ~~ contentLength ~~ CrLf else r
@ -219,7 +223,7 @@ private[http] class HttpResponseRendererFactory(
def byteStrings(entityBytes: Source[ByteString, Any]): Source[ResponseRenderingOutput, Any] =
renderByteStrings(r, entityBytes, skipEntity = noEntity).map(ResponseRenderingOutput.HttpData(_))
def completeResponseRendering(entity: ResponseEntity): StrictOrStreamed =
@tailrec def completeResponseRendering(entity: ResponseEntity): StrictOrStreamed =
entity match {
case HttpEntity.Strict(_, data)
renderHeaders(headers.toList)

View file

@ -28,6 +28,20 @@ private object RenderSupport {
val KeepAliveBytes = "Keep-Alive".asciiBytes
val CloseBytes = "close".asciiBytes
private[this] final val PreRenderedContentTypes = {
val m = new java.util.HashMap[ContentType, Array[Byte]](16)
def preRenderContentType(ct: ContentType) =
m.put(ct, (new ByteArrayRendering(32) ~~ headers.`Content-Type` ~~ ct ~~ CrLf).get)
import ContentTypes._
preRenderContentType(`application/json`)
preRenderContentType(`text/plain(UTF-8)`)
preRenderContentType(`text/xml(UTF-8)`)
preRenderContentType(`text/html(UTF-8)`)
preRenderContentType(`text/csv(UTF-8)`)
m
}
def CrLf = Rendering.CrLf
implicit val trailerRenderer = Renderer.genericSeqRenderer[Renderable, HttpHeader](CrLf, Rendering.Empty)
@ -42,9 +56,14 @@ private object RenderSupport {
})
}
def renderEntityContentType(r: Rendering, entity: HttpEntity) =
if (entity.contentType != ContentTypes.NoContentType) r ~~ headers.`Content-Type` ~~ entity.contentType ~~ CrLf
else r
def renderEntityContentType(r: Rendering, entity: HttpEntity) = {
val ct = entity.contentType
if (ct != ContentTypes.NoContentType) {
val preRendered = PreRenderedContentTypes.get(ct)
if (preRendered ne null) r ~~ preRendered // re-use pre-rendered
else r ~~ headers.`Content-Type` ~~ ct ~~ CrLf // render ad-hoc
} else r // don't render
}
def renderByteStrings(r: ByteStringRendering, entityBytes: Source[ByteString, Any],
skipEntity: Boolean = false): Source[ByteString, Any] = {

View file

@ -65,7 +65,8 @@ private[http] object HttpServerBluePrint {
websocketSupport(settings, log) atop
tlsSupport
theStack.withAttributes(HttpAttributes.remoteAddress(remoteAddress))
if (settings.remoteAddressHeader && remoteAddress.isDefined) theStack.withAttributes(HttpAttributes.remoteAddress(remoteAddress))
else theStack
}
val tlsSupport: BidiFlow[ByteString, SslTlsOutbound, SslTlsInbound, SessionBytes, NotUsed] =
@ -211,7 +212,7 @@ private[http] object HttpServerBluePrint {
// the initial header parser we initially use for every connection,
// will not be mutated, all "shared copy" parsers copy on first-write into the header cache
val rootParser = new HttpRequestParser(parserSettings, rawRequestUriHeader,
HttpHeaderParser(parserSettings) { info
HttpHeaderParser(parserSettings, log) { info
if (parserSettings.illegalHeaderWarnings)
logParsingError(info withSummaryPrepended "Illegal request header", log, parserSettings.errorLoggingVerbosity)
})
@ -228,11 +229,7 @@ private[http] object HttpServerBluePrint {
case x x
}
Flow[SessionBytes].via(
// each connection uses a single (private) request parser instance for all its requests
// which builds a cache of all header instances seen on that connection
rootParser.createShallowCopy().stage).named("rootParser")
.map(establishAbsoluteUri)
Flow[SessionBytes].via(rootParser).map(establishAbsoluteUri)
}
def rendering(settings: ServerSettings, log: LoggingAdapter): Flow[ResponseRenderingContext, ResponseRenderingOutput, NotUsed] = {
@ -425,7 +422,7 @@ private[http] object HttpServerBluePrint {
emit(responseCtxOut, ResponseRenderingContext(response, requestStart.method, requestStart.protocol, close),
pullHttpResponseIn)
if (close && requestStart.expect100Continue) pull(requestParsingIn)
if (!isClosed(requestParsingIn) && close && requestStart.expect100Continue) pull(requestParsingIn)
}
override def onUpstreamFinish() =
if (openRequests.isEmpty && isClosed(requestParsingIn)) completeStage()

View file

@ -5,6 +5,7 @@
package akka.http.impl.engine.ws
import java.util.Random
import scala.collection.immutable
import scala.collection.immutable.Seq
import scala.reflect.ClassTag
@ -12,7 +13,8 @@ import akka.http.impl.util._
import akka.http.scaladsl.model.headers._
import akka.http.scaladsl.model.ws.{ Message, UpgradeToWebSocket }
import akka.http.scaladsl.model._
import akka.stream.{ Graph, FlowShape }
import akka.stream.{ FlowShape, Graph }
import akka.util.OptionVal
/**
* Server-side implementation of the WebSocket handshake
@ -62,50 +64,63 @@ private[http] object Handshake {
* to speak. The interpretation of this header field is discussed
* in Section 9.1.
*/
def websocketUpgrade(headers: List[HttpHeader], hostHeaderPresent: Boolean): Option[UpgradeToWebSocket] = {
def find[T <: HttpHeader: ClassTag]: Option[T] =
headers.collectFirst {
case t: T t
def websocketUpgrade(headers: List[HttpHeader], hostHeaderPresent: Boolean): OptionVal[UpgradeToWebSocket] = {
// notes on Headers that re REQUIRE to be present here:
// - Host header is validated in general HTTP logic
// - Origin header is optional and, if required, should be validated
// on higher levels (routing, application logic)
//
// TODO See #18709 Extension support is optional in WS and currently unsupported.
//
// these are not needed directly, we verify their presence and correctness only:
// - Upgrade
// - Connection
// - `Sec-WebSocket-Version`
def hasAllRequiredWebsocketUpgradeHeaders: Boolean = {
// single-pass through the headers list while collecting all needed requirements
// this way we avoid scanning the requirements list 3 times (as we would with collect/find)
val it = headers.iterator
var requirementsMet = 0
val targetRequirements = 3
while (it.hasNext && (requirementsMet != targetRequirements)) it.next() match {
case u: Upgrade if (u.hasWebSocket) requirementsMet += 1
case c: Connection if (c.hasUpgrade) requirementsMet += 1
case v: `Sec-WebSocket-Version` if (v.hasVersion(CurrentWebSocketVersion)) requirementsMet += 1
case _ // continue...
}
requirementsMet == targetRequirements
}
// Host header is validated in general HTTP logic
// val host = find[Host]
val upgrade = find[Upgrade]
val connection = find[Connection]
val key = find[`Sec-WebSocket-Key`]
val version = find[`Sec-WebSocket-Version`]
// Origin header is optional and, if required, should be validated
// on higher levels (routing, application logic)
// val origin = find[Origin]
val protocol = find[`Sec-WebSocket-Protocol`]
val clientSupportedSubprotocols = protocol.toList.flatMap(_.protocols)
// Extension support is optional in WS and currently unsupported.
// TODO See #18709
// val extensions = find[`Sec-WebSocket-Extensions`]
if (hasAllRequiredWebsocketUpgradeHeaders) {
val key = HttpHeader.fastFind(classOf[`Sec-WebSocket-Key`], headers)
if (key.isDefined && key.get.isValid) {
val protocol = HttpHeader.fastFind(classOf[`Sec-WebSocket-Protocol`], headers)
if (upgrade.exists(_.hasWebSocket) &&
connection.exists(_.hasUpgrade) &&
version.exists(_.hasVersion(CurrentWebSocketVersion)) &&
key.exists(k k.isValid)) {
val header = new UpgradeToWebSocketLowLevel {
def requestedProtocols: Seq[String] = clientSupportedSubprotocols
def handle(handler: Either[Graph[FlowShape[FrameEvent, FrameEvent], Any], Graph[FlowShape[Message, Message], Any]], subprotocol: Option[String]): HttpResponse = {
require(
subprotocol.forall(chosen clientSupportedSubprotocols.contains(chosen)),
s"Tried to choose invalid subprotocol '$subprotocol' which wasn't offered by the client: [${requestedProtocols.mkString(", ")}]")
buildResponse(key.get, handler, subprotocol)
val clientSupportedSubprotocols = protocol match {
case OptionVal.Some(p) p.protocols
case _ Nil
}
def handleFrames(handlerFlow: Graph[FlowShape[FrameEvent, FrameEvent], Any], subprotocol: Option[String]): HttpResponse =
handle(Left(handlerFlow), subprotocol)
val header = new UpgradeToWebSocketLowLevel {
def requestedProtocols: Seq[String] = clientSupportedSubprotocols
override def handleMessages(handlerFlow: Graph[FlowShape[Message, Message], Any], subprotocol: Option[String] = None): HttpResponse =
handle(Right(handlerFlow), subprotocol)
}
Some(header)
} else None
def handle(handler: Either[Graph[FlowShape[FrameEvent, FrameEvent], Any], Graph[FlowShape[Message, Message], Any]], subprotocol: Option[String]): HttpResponse = {
require(
subprotocol.forall(chosen clientSupportedSubprotocols.contains(chosen)),
s"Tried to choose invalid subprotocol '$subprotocol' which wasn't offered by the client: [${requestedProtocols.mkString(", ")}]")
buildResponse(key.get, handler, subprotocol)
}
def handleFrames(handlerFlow: Graph[FlowShape[FrameEvent, FrameEvent], Any], subprotocol: Option[String]): HttpResponse =
handle(Left(handlerFlow), subprotocol)
override def handleMessages(handlerFlow: Graph[FlowShape[Message, Message], Any], subprotocol: Option[String] = None): HttpResponse =
handle(Right(handlerFlow), subprotocol)
}
OptionVal.Some(header)
} else OptionVal.None
} else OptionVal.None
}
/*

View file

@ -7,8 +7,9 @@ package akka.http.impl.engine.ws
import java.util.Random
import akka.NotUsed
import akka.stream.{ Attributes, Outlet, Inlet, FlowShape }
import akka.stream.scaladsl.{ Keep, BidiFlow, Flow }
import akka.stream.stage.{ SyncDirective, Context, StatefulStage }
import akka.stream.stage._
/**
* Implements WebSocket Frame masking.
@ -20,19 +21,20 @@ private[http] object Masking {
BidiFlow.fromFlowsMat(unmaskIf(serverSide), maskIf(!serverSide, maskRandom))(Keep.none)
def maskIf(condition: Boolean, maskRandom: () Random): Flow[FrameEvent, FrameEvent, NotUsed] =
if (condition)
if (condition) {
Flow[FrameEvent]
.transform(() new Masking(maskRandom())) // new random per materialization
.via(new Masking(maskRandom())) // new random per materialization
.map {
case f: FrameEvent f
case FrameError(ex) throw ex
}
else Flow[FrameEvent]
} else Flow[FrameEvent]
def unmaskIf(condition: Boolean): Flow[FrameEvent, FrameEventOrError, NotUsed] =
if (condition) Flow[FrameEvent].transform(() new Unmasking())
if (condition) Flow[FrameEvent].via(Unmasking)
else Flow[FrameEvent]
private class Masking(random: Random) extends Masker {
private final class Masking(random: Random) extends Masker {
def extractMask(header: FrameHeader): Int = random.nextInt()
def setNewMask(header: FrameHeader, mask: Int): FrameHeader = {
if (header.mask.isDefined) throw new ProtocolException("Frame mustn't already be masked")
@ -40,7 +42,8 @@ private[http] object Masking {
}
override def toString: String = s"Masking($random)"
}
private class Unmasking extends Masker {
private object Unmasking extends Masker {
def extractMask(header: FrameHeader): Int = header.mask match {
case Some(mask) mask
case None throw new ProtocolException("Frame wasn't masked")
@ -50,42 +53,59 @@ private[http] object Masking {
}
/** Implements both masking and unmasking which is mostly symmetric (because of XOR) */
private abstract class Masker extends StatefulStage[FrameEvent, FrameEventOrError] {
private abstract class Masker extends GraphStage[FlowShape[FrameEvent, FrameEventOrError]] {
def extractMask(header: FrameHeader): Int
def setNewMask(header: FrameHeader, mask: Int): FrameHeader
def initial: State = Idle
val in = Inlet[FrameEvent](s"${toString}-in")
val out = Outlet[FrameEventOrError](s"${toString}-out")
override val shape: FlowShape[FrameEvent, FrameEventOrError] = FlowShape(in, out)
private object Idle extends State {
def onPush(part: FrameEvent, ctx: Context[FrameEventOrError]): SyncDirective =
part match {
case start @ FrameStart(header, data)
try {
val mask = extractMask(header)
become(new Running(mask))
current.onPush(start.copy(header = setNewMask(header, mask)), ctx)
} catch {
case p: ProtocolException
become(Done)
ctx.push(FrameError(p))
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) with OutHandler with InHandler {
def onPush(): Unit = grab(in) match {
case start @ FrameStart(header, data)
try {
val mask = extractMask(header)
val (masked, newMask) = FrameEventParser.mask(data, mask)
if (!start.lastPart) {
setHandler(in, runningHandler(newMask, this))
}
case _: FrameData
ctx.fail(new IllegalStateException("unexpected FrameData (need FrameStart first)"))
}
}
private class Running(initialMask: Int) extends State {
var mask = initialMask
def onPush(part: FrameEvent, ctx: Context[FrameEventOrError]): SyncDirective = {
if (part.lastPart) become(Idle)
val (masked, newMask) = FrameEventParser.mask(part.data, mask)
mask = newMask
ctx.push(part.withData(data = masked))
push(out, start.copy(header = setNewMask(header, mask), data = masked))
} catch {
case p: ProtocolException {
setHandler(in, doneHandler)
push(out, FrameError(p))
}
}
case _: FrameData fail(out, new IllegalStateException("unexpected FrameData (need FrameStart first)"))
}
}
private object Done extends State {
def onPush(part: FrameEvent, ctx: Context[FrameEventOrError]): SyncDirective = ctx.pull()
private def doneHandler = new InHandler {
override def onPush(): Unit = pull(in)
}
private def runningHandler(initialMask: Int, nextState: InHandler): InHandler = new InHandler {
var mask = initialMask
override def onPush(): Unit = {
val part = grab(in)
if (part.lastPart) {
setHandler(in, nextState)
}
val (masked, newMask) = FrameEventParser.mask(part.data, mask)
mask = newMask
push(out, part.withData(data = masked))
}
}
def onPull(): Unit = pull(in)
setHandler(in, this)
setHandler(out, this)
}
}
}

View file

@ -62,7 +62,7 @@ object WebSocketClientBlueprint {
new GraphStageLogic(shape) with InHandler with OutHandler {
// a special version of the parser which only parses one message and then reports the remaining data
// if some is available
val parser = new HttpResponseParser(settings.parserSettings, HttpHeaderParser(settings.parserSettings)()) {
val parser = new HttpResponseParser(settings.parserSettings, HttpHeaderParser(settings.parserSettings, log)()) {
var first = true
override def handleInformationalResponses = false
override protected def parseMessage(input: ByteString, offset: Int): StateResult = {
@ -111,6 +111,11 @@ object WebSocketClientBlueprint {
override def onPull(): Unit = pull(in)
setHandlers(in, out, this)
override def onUpstreamFailure(ex: Throwable): Unit = {
result.tryFailure(new RuntimeException("Connection failed.", ex))
super.onUpstreamFailure(ex)
}
}
override def toString = "UpgradeStage"

View file

@ -174,7 +174,7 @@ private[parser] trait CommonRules { this: Parser with StringBuilding ⇒
def challenge = rule {
`challenge-or-credentials` ~> { (scheme, params)
val (realms, otherParams) = params.partition(_._1 equalsIgnoreCase "realm")
HttpChallenge(scheme, realms.headOption.map(_._2).getOrElse(""), otherParams.toMap)
HttpChallenge(scheme, realms.headOption.map(_._2), otherParams.toMap)
}
}

View file

@ -6,6 +6,7 @@ package akka.http.impl.model.parser
import akka.http.scaladsl.settings.ParserSettings
import akka.http.scaladsl.settings.ParserSettings.CookieParsingMode
import akka.http.scaladsl.settings.ParserSettings.IllegalResponseHeaderValueProcessingMode
import akka.http.scaladsl.model.headers.HttpCookiePair
import akka.stream.impl.ConstantFun
import scala.util.control.NonFatal
@ -169,20 +170,26 @@ private[http] object HeaderParser {
def uriParsingMode: Uri.ParsingMode
def cookieParsingMode: ParserSettings.CookieParsingMode
def customMediaTypes: MediaTypes.FindCustom
def illegalResponseHeaderValueProcessingMode: IllegalResponseHeaderValueProcessingMode
}
def Settings(
uriParsingMode: Uri.ParsingMode = Uri.ParsingMode.Relaxed,
cookieParsingMode: ParserSettings.CookieParsingMode = ParserSettings.CookieParsingMode.RFC6265,
customMediaTypes: MediaTypes.FindCustom = ConstantFun.scalaAnyTwoToNone): Settings = {
uriParsingMode: Uri.ParsingMode = Uri.ParsingMode.Relaxed,
cookieParsingMode: ParserSettings.CookieParsingMode = ParserSettings.CookieParsingMode.RFC6265,
customMediaTypes: MediaTypes.FindCustom = ConstantFun.scalaAnyTwoToNone,
mode: IllegalResponseHeaderValueProcessingMode = ParserSettings.IllegalResponseHeaderValueProcessingMode.Error): Settings = {
val _uriParsingMode = uriParsingMode
val _cookieParsingMode = cookieParsingMode
val _customMediaTypes = customMediaTypes
val _illegalResponseHeaderValueProcessingMode = mode
new Settings {
def uriParsingMode: Uri.ParsingMode = _uriParsingMode
def cookieParsingMode: CookieParsingMode = _cookieParsingMode
def customMediaTypes: MediaTypes.FindCustom = _customMediaTypes
def illegalResponseHeaderValueProcessingMode: IllegalResponseHeaderValueProcessingMode =
_illegalResponseHeaderValueProcessingMode
}
}
val DefaultSettings: Settings = Settings()
}
}

View file

@ -13,9 +13,12 @@ import Parser.DeliveryScheme.Either
import Uri._
// format: OFF
// http://tools.ietf.org/html/rfc3986
private[http] class UriParser(val input: ParserInput,
/**
* INTERNAL API
*
* http://tools.ietf.org/html/rfc3986
*/
private[http] final class UriParser(val input: ParserInput,
val uriParsingCharset: Charset,
val uriParsingMode: Uri.ParsingMode,
val maxValueStackSize: Int) extends Parser(maxValueStackSize)

View file

@ -5,7 +5,7 @@
package akka.http.impl.settings
import akka.http.scaladsl.settings.ParserSettings
import akka.http.scaladsl.settings.ParserSettings.{ ErrorLoggingVerbosity, CookieParsingMode }
import akka.http.scaladsl.settings.ParserSettings.{ IllegalResponseHeaderValueProcessingMode, ErrorLoggingVerbosity, CookieParsingMode }
import akka.stream.impl.ConstantFun
import com.typesafe.config.Config
import scala.collection.JavaConverters._
@ -14,24 +14,25 @@ import akka.http.impl.util._
/** INTERNAL API */
private[akka] final case class ParserSettingsImpl(
maxUriLength: Int,
maxMethodLength: Int,
maxResponseReasonLength: Int,
maxHeaderNameLength: Int,
maxHeaderValueLength: Int,
maxHeaderCount: Int,
maxContentLength: Long,
maxChunkExtLength: Int,
maxChunkSize: Int,
uriParsingMode: Uri.ParsingMode,
cookieParsingMode: CookieParsingMode,
illegalHeaderWarnings: Boolean,
errorLoggingVerbosity: ParserSettings.ErrorLoggingVerbosity,
headerValueCacheLimits: Map[String, Int],
includeTlsSessionInfoHeader: Boolean,
customMethods: String Option[HttpMethod],
customStatusCodes: Int Option[StatusCode],
customMediaTypes: MediaTypes.FindCustom)
maxUriLength: Int,
maxMethodLength: Int,
maxResponseReasonLength: Int,
maxHeaderNameLength: Int,
maxHeaderValueLength: Int,
maxHeaderCount: Int,
maxContentLength: Long,
maxChunkExtLength: Int,
maxChunkSize: Int,
uriParsingMode: Uri.ParsingMode,
cookieParsingMode: CookieParsingMode,
illegalHeaderWarnings: Boolean,
errorLoggingVerbosity: ErrorLoggingVerbosity,
illegalResponseHeaderValueProcessingMode: IllegalResponseHeaderValueProcessingMode,
headerValueCacheLimits: Map[String, Int],
includeTlsSessionInfoHeader: Boolean,
customMethods: String Option[HttpMethod],
customStatusCodes: Int Option[StatusCode],
customMediaTypes: MediaTypes.FindCustom)
extends akka.http.scaladsl.settings.ParserSettings {
require(maxUriLength > 0, "max-uri-length must be > 0")
@ -76,6 +77,7 @@ object ParserSettingsImpl extends SettingsCompanion[ParserSettingsImpl]("akka.ht
CookieParsingMode(c getString "cookie-parsing-mode"),
c getBoolean "illegal-header-warnings",
ErrorLoggingVerbosity(c getString "error-logging-verbosity"),
IllegalResponseHeaderValueProcessingMode(c getString "illegal-response-header-value-processing-mode"),
cacheConfig.entrySet.asScala.map(kvp kvp.getKey cacheConfig.getInt(kvp.getKey))(collection.breakOut),
c getBoolean "tls-session-info-header",
noCustomMethods,

View file

@ -0,0 +1,29 @@
/**
* Copyright (C) 2015-2016 Lightbend Inc. <http://www.lightbend.com>
*/
package akka.http.impl.util
import java.nio.charset.StandardCharsets
import akka.parboiled2.ParserInput.DefaultParserInput
import akka.util.ByteString
/**
* ParserInput reading directly off a ByteString. (Based on the ByteArrayBasedParserInput)
* This avoids a separate decoding step but assumes that each byte represents exactly one character,
* which is encoded by ISO-8859-1!
* You can therefore use this ParserInput type only if you know that all input will be `ISO-8859-1`-encoded,
* or only contains 7-bit ASCII characters (which is a subset of ISO-8859-1)!
*
* Note that this ParserInput type will NOT work with general `UTF-8`-encoded input as this can contain
* character representations spanning multiple bytes. However, if you know that your input will only ever contain
* 7-bit ASCII characters (0x00-0x7F) then UTF-8 is fine, since the first 127 UTF-8 characters are
* encoded with only one byte that is identical to 7-bit ASCII and ISO-8859-1.
*/
final class ByteStringParserInput(bytes: ByteString) extends DefaultParserInput {
override def charAt(ix: Int): Char = (bytes(ix) & 0xFF).toChar
override def length: Int = bytes.size
override def sliceString(start: Int, end: Int): String = bytes.slice(start, end).decodeString(StandardCharsets.ISO_8859_1)
override def sliceCharArray(start: Int, end: Int): Array[Char] =
StandardCharsets.ISO_8859_1.decode(bytes.slice(start, end).asByteBuffer).array()
}

View file

@ -33,6 +33,7 @@ abstract class ParserSettings private[akka] () extends BodyPartParser.Settings {
def getCookieParsingMode: ParserSettings.CookieParsingMode
def getIllegalHeaderWarnings: Boolean
def getErrorLoggingVerbosity: ParserSettings.ErrorLoggingVerbosity
def getIllegalResponseHeaderValueProcessingMode: ParserSettings.IllegalResponseHeaderValueProcessingMode
def getHeaderValueCacheLimits: ju.Map[String, Int]
def getIncludeTlsSessionInfoHeader: Boolean
def headerValueCacheLimits: Map[String, Int]
@ -81,6 +82,7 @@ abstract class ParserSettings private[akka] () extends BodyPartParser.Settings {
object ParserSettings extends SettingsCompanion[ParserSettings] {
trait CookieParsingMode
trait ErrorLoggingVerbosity
trait IllegalResponseHeaderValueProcessingMode
override def create(config: Config): ParserSettings = ParserSettingsImpl(config)
override def create(configOverrides: String): ParserSettings = ParserSettingsImpl(configOverrides)

View file

@ -9,6 +9,7 @@ import java.util.concurrent.CompletionStage
import javax.net.ssl._
import akka.actor._
import akka.dispatch.ExecutionContexts
import akka.event.{ Logging, LoggingAdapter }
import akka.http.impl.engine.HttpConnectionTimeoutException
import akka.http.impl.engine.client.PoolMasterActor.{ PoolSize, ShutdownAll }
@ -26,6 +27,7 @@ import akka.{ Done, NotUsed }
import akka.stream._
import akka.stream.TLSProtocol._
import akka.stream.scaladsl._
import akka.util.ByteString
import com.typesafe.config.Config
import com.typesafe.sslconfig.akka._
import com.typesafe.sslconfig.akka.util.AkkaLoggerFactory
@ -55,6 +57,27 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) exte
private[this] final val DefaultPortForProtocol = -1 // any negative value
private def fuseServerLayer(settings: ServerSettings, connectionContext: ConnectionContext, log: LoggingAdapter)(implicit mat: Materializer): BidiFlow[HttpResponse, ByteString, ByteString, HttpRequest, NotUsed] = {
val httpLayer = serverLayer(settings, None, log)
val tlsStage = sslTlsStage(connectionContext, Server)
BidiFlow.fromGraph(Fusing.aggressive(GraphDSL.create() { implicit b
import GraphDSL.Implicits._
val http = b.add(httpLayer)
val tls = b.add(tlsStage)
val timeouts = b.add(Flow[ByteString].recover {
case t: TimeoutException throw new HttpConnectionTimeoutException(t.getMessage)
})
tls.out2 ~> http.in2
tls.in1 <~ http.out1
tls.out1 ~> timeouts.in
BidiShape(http.in1, timeouts.out, tls.in2, http.out2)
}))
}
/**
* Creates a [[akka.stream.scaladsl.Source]] of [[akka.http.scaladsl.Http.IncomingConnection]] instances which represents a prospective HTTP server binding
* on the given `endpoint`.
@ -81,14 +104,14 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) exte
settings: ServerSettings = ServerSettings(system),
log: LoggingAdapter = system.log)(implicit fm: Materializer): Source[IncomingConnection, Future[ServerBinding]] = {
val effectivePort = if (port >= 0) port else connectionContext.defaultPort
val tlsStage = sslTlsStage(connectionContext, Server)
val fullLayer = fuseServerLayer(settings, connectionContext, log)
val connections: Source[Tcp.IncomingConnection, Future[Tcp.ServerBinding]] =
Tcp().bind(interface, effectivePort, settings.backlog, settings.socketOptions, halfClose = false, settings.timeouts.idleTimeout)
connections.map {
case Tcp.IncomingConnection(localAddress, remoteAddress, flow)
val layer = serverLayer(settings, Some(remoteAddress), log)
val flowWithTimeoutRecovered = flow.via(MapError { case t: TimeoutException new HttpConnectionTimeoutException(t.getMessage) })
IncomingConnection(localAddress, remoteAddress, layer atop tlsStage join flowWithTimeoutRecovered)
IncomingConnection(localAddress, remoteAddress, fullLayer join flow)
}.mapMaterializedValue {
_.map(tcpBinding ServerBinding(tcpBinding.localAddress)(() tcpBinding.unbind()))(fm.executionContext)
}
@ -110,30 +133,42 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) exte
connectionContext: ConnectionContext = defaultServerHttpContext,
settings: ServerSettings = ServerSettings(system),
log: LoggingAdapter = system.log)(implicit fm: Materializer): Future[ServerBinding] = {
def handleOneConnection(incomingConnection: IncomingConnection): Future[Done] =
try
incomingConnection.flow
.watchTermination()(Keep.right)
.joinMat(handler)(Keep.left)
.run()
catch {
case NonFatal(e)
log.error(e, "Could not materialize handling flow for {}", incomingConnection)
throw e
}
val effectivePort = if (port >= 0) port else connectionContext.defaultPort
val fullLayer: Flow[ByteString, ByteString, Future[Done]] = Flow.fromGraph(Fusing.aggressive(
Flow[HttpRequest]
.watchTermination()(Keep.right)
.viaMat(handler)(Keep.left)
.joinMat(fuseServerLayer(settings, connectionContext, log))(Keep.left)))
val connections: Source[Tcp.IncomingConnection, Future[Tcp.ServerBinding]] =
Tcp().bind(interface, effectivePort, settings.backlog, settings.socketOptions, halfClose = false, settings.timeouts.idleTimeout)
connections.mapAsyncUnordered(settings.maxConnections) {
case incoming: Tcp.IncomingConnection
try {
val layer =
if (settings.remoteAddressHeader) fullLayer.addAttributes(HttpAttributes.remoteAddress(Some(incoming.remoteAddress)))
else fullLayer
layer.joinMat(incoming.flow)(Keep.left)
.run().recover {
// Ignore incoming errors from the connection as they will cancel the binding.
// As far as it is known currently, these errors can only happen if a TCP error bubbles up
// from the TCP layer through the HTTP layer to the Http.IncomingConnection.flow.
// See https://github.com/akka/akka/issues/17992
case NonFatal(ex)
Done
}(ExecutionContexts.sameThreadExecutionContext)
} catch {
case NonFatal(e)
log.error(e, "Could not materialize handling flow for {}", incoming)
throw e
}
}.mapMaterializedValue {
_.map(tcpBinding ServerBinding(tcpBinding.localAddress)(() tcpBinding.unbind()))(fm.executionContext)
}.to(Sink.ignore).run()
bind(interface, port, connectionContext, settings, log)
.mapAsyncUnordered(settings.maxConnections) { connection
handleOneConnection(connection).recoverWith {
// Ignore incoming errors from the connection as they will cancel the binding.
// As far as it is known currently, these errors can only happen if a TCP error bubbles up
// from the TCP layer through the HTTP layer to the Http.IncomingConnection.flow.
// See https://github.com/akka/akka/issues/17992
case NonFatal(_) Future.successful(())
}(fm.executionContext)
}
.to(Sink.ignore)
.run()
}
/**

View file

@ -10,8 +10,9 @@ import akka.http.impl.model.JavaInitialization
import language.implicitConversions
import java.io.File
import java.nio.file.{ Path, Files }
import java.nio.file.{ Files, Path }
import java.lang.{ Iterable JIterable }
import scala.util.control.NonFatal
import scala.concurrent.Future
import scala.concurrent.duration._
@ -20,8 +21,8 @@ import akka.util.ByteString
import akka.stream.scaladsl._
import akka.stream.stage._
import akka.stream._
import akka.{ NotUsed, stream }
import akka.http.scaladsl.model.ContentType.{ NonBinary, Binary }
import akka.{ Done, NotUsed, stream }
import akka.http.scaladsl.model.ContentType.{ Binary, NonBinary }
import akka.http.scaladsl.util.FastFuture
import akka.http.javadsl.{ model jm }
import akka.http.impl.util.{ JavaMapping, StreamUtils }
@ -31,6 +32,8 @@ import scala.compat.java8.OptionConverters._
import scala.compat.java8.FutureConverters._
import java.util.concurrent.CompletionStage
import scala.compat.java8.FutureConverters
/**
* Models the entity (aka "body" or "content) of an HTTP message.
*/
@ -72,6 +75,25 @@ sealed trait HttpEntity extends jm.HttpEntity {
.via(new akka.http.impl.util.ToStrict(timeout, contentType))
.runWith(Sink.head)
/**
* Discards the entities data bytes by running the `dataBytes` Source contained in this `entity`.
*
* Note: It is crucial that entities are either discarded, or consumed by running the underlying [[akka.stream.scaladsl.Source]]
* as otherwise the lack of consuming of the data will trigger back-pressure to the underlying TCP connection
* (as designed), however possibly leading to an idle-timeout that will close the connection, instead of
* just having ignored the data.
*
* Warning: It is not allowed to discard and/or consume the `entity.dataBytes` more than once
* as the stream is directly attached to the "live" incoming data source from the underlying TCP connection.
* Allowing it to be consumable twice would require buffering the incoming data, thus defeating the purpose
* of its streaming nature. If the dataBytes source is materialized a second time, it will fail with an
* "stream can cannot be materialized more than once" exception.
*
* In future versions, more automatic ways to warn or resolve these situations may be introduced, see issue #18716.
*/
override def discardBytes(mat: Materializer): HttpMessage.DiscardedEntity =
new HttpMessage.DiscardedEntity(dataBytes.runWith(Sink.ignore)(mat))
/**
* Returns a copy of the given entity with the ByteString chunks of this entity transformed by the given transformer.
* For a `Chunked` entity, the chunks will be transformed one by one keeping the chunk metadata (but may introduce an
@ -145,6 +167,7 @@ sealed trait HttpEntity extends jm.HttpEntity {
/** Java API */
override def toStrict(timeoutMillis: Long, materializer: Materializer): CompletionStage[jm.HttpEntity.Strict] =
toStrict(timeoutMillis.millis)(materializer).toJava
}
/* An entity that can be used for body parts */
@ -369,6 +392,10 @@ object HttpEntity {
override def productPrefix = "HttpEntity.Default"
override def toString: String = {
s"$productPrefix($contentType,$contentLength bytes total)"
}
/** Java API */
override def getContentLength = contentLength
}
@ -414,6 +441,10 @@ object HttpEntity {
override def withData(data: Source[ByteString, Any]): HttpEntity.CloseDelimited = copy(data = data)
override def productPrefix = "HttpEntity.CloseDelimited"
override def toString: String = {
s"$productPrefix($contentType)"
}
}
/**
@ -431,6 +462,10 @@ object HttpEntity {
override def withData(data: Source[ByteString, Any]): HttpEntity.IndefiniteLength = copy(data = data)
override def productPrefix = "HttpEntity.IndefiniteLength"
override def toString: String = {
s"$productPrefix($contentType)"
}
}
/**
@ -469,6 +504,10 @@ object HttpEntity {
override def productPrefix = "HttpEntity.Chunked"
override def toString: String = {
s"$productPrefix($contentType)"
}
/** Java API */
def getChunks: stream.javadsl.Source[jm.HttpEntity.ChunkStreamPart, AnyRef] =
stream.javadsl.Source.fromGraph(chunks.asInstanceOf[Source[jm.HttpEntity.ChunkStreamPart, AnyRef]])
@ -533,14 +572,14 @@ object HttpEntity {
* to entity constructors.
*/
def limitableByteSource[Mat](source: Source[ByteString, Mat]): Source[ByteString, Mat] =
limitable(source, sizeOfByteString)
source.via(new Limitable(sizeOfByteString))
/**
* Turns the given source into one that respects the `withSizeLimit` calls when used as a parameter
* to entity constructors.
*/
def limitableChunkSource[Mat](source: Source[ChunkStreamPart, Mat]): Source[ChunkStreamPart, Mat] =
limitable(source, sizeOfChunkStreamPart)
source.via(new Limitable(sizeOfChunkStreamPart))
/**
* INTERNAL API
@ -548,35 +587,46 @@ object HttpEntity {
private val sizeOfByteString: ByteString Int = _.size
private val sizeOfChunkStreamPart: ChunkStreamPart Int = _.data.size
/**
* INTERNAL API
*/
private def limitable[Out, Mat](source: Source[Out, Mat], sizeOf: Out Int): Source[Out, Mat] =
source.via(Flow[Out].transform { ()
new PushStage[Out, Out] {
var maxBytes = -1L
var bytesLeft = Long.MaxValue
private val limitableDefaults = Attributes.name("limitable")
override def preStart(ctx: LifecycleContext) =
ctx.attributes.getFirst[SizeLimit] match {
case Some(limit: SizeLimit) if limit.isDisabled
// "no limit"
case Some(SizeLimit(bytes, cl @ Some(contentLength)))
if (contentLength > bytes) throw EntityStreamSizeException(bytes, cl)
// else we still count but never throw an error
case Some(SizeLimit(bytes, None))
maxBytes = bytes
bytesLeft = bytes
case None
}
private final class Limitable[T](sizeOf: T Int) extends GraphStage[FlowShape[T, T]] {
val in = Inlet[T]("Limitable.in")
val out = Outlet[T]("Limitable.out")
override val shape = FlowShape.of(in, out)
override protected val initialAttributes: Attributes = limitableDefaults
def onPush(elem: Out, ctx: stage.Context[Out]): stage.SyncDirective = {
bytesLeft -= sizeOf(elem)
if (bytesLeft >= 0) ctx.push(elem)
else ctx.fail(EntityStreamSizeException(maxBytes))
override def createLogic(attributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) with InHandler with OutHandler {
private var maxBytes = -1L
private var bytesLeft = Long.MaxValue
override def preStart(): Unit = {
attributes.getFirst[SizeLimit] match {
case Some(limit: SizeLimit) if limit.isDisabled
// "no limit"
case Some(SizeLimit(bytes, cl @ Some(contentLength)))
if (contentLength > bytes) throw EntityStreamSizeException(bytes, cl)
// else we still count but never throw an error
case Some(SizeLimit(bytes, None))
maxBytes = bytes
bytesLeft = bytes
case None
}
}
}.named("limitable"))
override def onPush(): Unit = {
val elem = grab(in)
bytesLeft -= sizeOf(elem)
if (bytesLeft >= 0) push(out, elem)
else failStage(EntityStreamSizeException(maxBytes))
}
override def onPull(): Unit = {
pull(in)
}
setHandlers(in, out, this)
}
}
/**
* INTERNAL API
@ -607,4 +657,44 @@ object HttpEntity {
val (newData, whenCompleted) = StreamUtils.captureTermination(x.data)
x.copy(data = newData).asInstanceOf[T] whenCompleted
}
/**
* Represents the currently being-drained HTTP Entity which triggers completion of the contained
* Future once the entity has been drained for the given HttpMessage completely.
*/
final class DiscardedEntity(f: Future[Done]) extends akka.http.javadsl.model.HttpMessage.DiscardedEntity {
/**
* This future completes successfully once the underlying entity stream has been
* successfully drained (and fails otherwise).
*/
def future: Future[Done] = f
/**
* This future completes successfully once the underlying entity stream has been
* successfully drained (and fails otherwise).
*/
def completionStage: CompletionStage[Done] = FutureConverters.toJava(f)
}
/** Adds Scala DSL idiomatic methods to [[HttpEntity]], e.g. versions of methods with an implicit [[Materializer]]. */
implicit final class HttpEntityScalaDSLSugar(val httpEntity: HttpEntity) extends AnyVal {
/**
* Discards the entities data bytes by running the `dataBytes` Source contained in this `entity`.
*
* Note: It is crucial that entities are either discarded, or consumed by running the underlying [[akka.stream.scaladsl.Source]]
* as otherwise the lack of consuming of the data will trigger back-pressure to the underlying TCP connection
* (as designed), however possibly leading to an idle-timeout that will close the connection, instead of
* just having ignored the data.
*
* Warning: It is not allowed to discard and/or consume the `entity.dataBytes` more than once
* as the stream is directly attached to the "live" incoming data source from the underlying TCP connection.
* Allowing it to be consumable twice would require buffering the incoming data, thus defeating the purpose
* of its streaming nature. If the dataBytes source is materialized a second time, it will fail with an
* "stream can cannot be materialized more than once" exception.
*
* In future versions, more automatic ways to warn or resolve these situations may be introduced, see issue #18716.
*/
def discardBytes()(implicit mat: Materializer): HttpMessage.DiscardedEntity =
httpEntity.discardBytes(mat)
}
}

View file

@ -4,14 +4,19 @@
package akka.http.scaladsl.model
import akka.http.scaladsl.settings.ParserSettings
import java.nio.charset.StandardCharsets
import scala.util.{ Success, Failure }
import akka.parboiled2.ParseError
import scala.util.{ Failure, Success }
import akka.parboiled2.{ ParseError, ParserInput }
import akka.http.impl.util.ToStringRenderable
import akka.http.impl.model.parser.{ CharacterClasses, HeaderParser }
import akka.http.javadsl.{ model jm }
import akka.http.scaladsl.model.headers._
import akka.parboiled2.ParserInput.DefaultParserInput
import akka.util.{ ByteString, OptionVal }
import scala.annotation.tailrec
import scala.collection.immutable
/**
* The model of an HTTP header. In its most basic form headers are simple name-value pairs. Header names
@ -80,6 +85,16 @@ object HttpHeader {
}
} else ParsingResult.Error(ErrorInfo(s"Illegal HTTP header name", name))
/** INTERNAL API */
private[akka] def fastFind[T >: Null <: jm.HttpHeader](clazz: Class[T], headers: immutable.Seq[HttpHeader]): OptionVal[T] = {
val it = headers.iterator
while (it.hasNext) it.next() match {
case h if clazz.isInstance(h) return OptionVal.Some[T](h.asInstanceOf[T])
case _ // continue ...
}
OptionVal.None.asInstanceOf[OptionVal[T]]
}
sealed trait ParsingResult {
def errors: List[ErrorInfo]
}

View file

@ -19,13 +19,11 @@ import scala.reflect.{ ClassTag, classTag }
import akka.Done
import akka.parboiled2.CharUtils
import akka.stream.Materializer
import akka.util.{ ByteString, HashCode }
import akka.util.{ ByteString, HashCode, OptionVal }
import akka.http.impl.util._
import akka.http.javadsl.{ model jm }
import akka.http.scaladsl.util.FastFuture._
import akka.stream.scaladsl.Sink
import headers._
import akka.http.impl.util.JavaMapping.Implicits._
/**
* Common base class of HttpRequest and HttpResponse.
@ -41,9 +39,23 @@ sealed trait HttpMessage extends jm.HttpMessage {
def entity: ResponseEntity
def protocol: HttpProtocol
/** Drains entity stream */
def discardEntityBytes(mat: Materializer): HttpMessage.DiscardedEntity =
new HttpMessage.DiscardedEntity(entity.dataBytes.runWith(Sink.ignore)(mat))
/**
* Discards the entities data bytes by running the `dataBytes` Source contained in this HttpMessage.
*
* Note: It is crucial that entities are either discarded, or consumed by running the underlying [[akka.stream.scaladsl.Source]]
* as otherwise the lack of consuming of the data will trigger back-pressure to the underlying TCP connection
* (as designed), however possibly leading to an idle-timeout that will close the connection, instead of
* just having ignored the data.
*
* Warning: It is not allowed to discard and/or consume the `entity.dataBytes` more than once
* as the stream is directly attached to the "live" incoming data source from the underlying TCP connection.
* Allowing it to be consumable twice would require buffering the incoming data, thus defeating the purpose
* of its streaming nature. If the dataBytes source is materialized a second time, it will fail with an
* "stream can cannot be materialized more than once" exception.
*
* In future versions, more automatic ways to warn or resolve these situations may be introduced, see issue #18716.
*/
def discardEntityBytes(mat: Materializer): HttpMessage.DiscardedEntity = entity.discardBytes()(mat)
/** Returns a copy of this message with the list of headers set to the given ones. */
def withHeaders(headers: HttpHeader*): Self = withHeaders(headers.toList)
@ -90,14 +102,13 @@ sealed trait HttpMessage extends jm.HttpMessage {
}
/** Returns the first header of the given type if there is one */
def header[T <: jm.HttpHeader: ClassTag]: Option[T] = {
val erasure = classTag[T].runtimeClass
headers.find(erasure.isInstance).asInstanceOf[Option[T]] match {
case header: Some[T] header
case _ if erasure == classOf[`Content-Type`] Some(entity.contentType).asInstanceOf[Option[T]]
case _ None
def header[T >: Null <: jm.HttpHeader: ClassTag]: Option[T] = {
val clazz = classTag[T].runtimeClass.asInstanceOf[Class[T]]
HttpHeader.fastFind[T](clazz, headers) match {
case OptionVal.Some(h) Some(h)
case _ if clazz == classOf[`Content-Type`] Some(`Content-Type`(entity.contentType)).asInstanceOf[Option[T]]
case _ None
}
}
/**
@ -133,7 +144,11 @@ sealed trait HttpMessage extends jm.HttpMessage {
/** Java API */
def getHeaders: JIterable[jm.HttpHeader] = (headers: immutable.Seq[jm.HttpHeader]).asJava
/** Java API */
def getHeader[T <: jm.HttpHeader](headerClass: Class[T]): Optional[T] = header(ClassTag(headerClass)).asJava
def getHeader[T <: jm.HttpHeader](headerClass: Class[T]): Optional[T] =
HttpHeader.fastFind[jm.HttpHeader](headerClass.asInstanceOf[Class[jm.HttpHeader]], headers) match {
case OptionVal.Some(h) Optional.of(h.asInstanceOf[T])
case _ Optional.empty()
}
/** Java API */
def getHeader(headerName: String): Optional[jm.HttpHeader] = {
val lowerCased = headerName.toRootLowerCase
@ -151,7 +166,7 @@ object HttpMessage {
}
/**
* Represents the the currently being-drained HTTP Entity which triggers completion of the contained
* Represents the currently being-drained HTTP Entity which triggers completion of the contained
* Future once the entity has been drained for the given HttpMessage completely.
*/
final class DiscardedEntity(f: Future[Done]) extends akka.http.javadsl.model.HttpMessage.DiscardedEntity {
@ -178,7 +193,7 @@ object HttpMessage {
* (as designed), however possibly leading to an idle-timeout that will close the connection, instead of
* just having ignored the data.
*
* Warning: It is not allowed to discard and/or consume the the `entity.dataBytes` more than once
* Warning: It is not allowed to discard and/or consume the `entity.dataBytes` more than once
* as the stream is directly attached to the "live" incoming data source from the underlying TCP connection.
* Allowing it to be consumable twice would require buffering the incoming data, thus defeating the purpose
* of its streaming nature. If the dataBytes source is materialized a second time, it will fail with an
@ -310,14 +325,22 @@ object HttpRequest {
* include a valid [[akka.http.scaladsl.model.headers.Host]] header or if URI authority and [[akka.http.scaladsl.model.headers.Host]] header don't match.
*/
def effectiveUri(uri: Uri, headers: immutable.Seq[HttpHeader], securedConnection: Boolean, defaultHostHeader: Host): Uri = {
val hostHeader = headers.collectFirst { case x: Host x }
def findHost(headers: immutable.Seq[HttpHeader]): OptionVal[Host] = {
val it = headers.iterator
while (it.hasNext) it.next() match {
case h: Host return OptionVal.Some(h)
case _ // continue ...
}
OptionVal.None
}
val hostHeader: OptionVal[Host] = findHost(headers)
if (uri.isRelative) {
def fail(detail: String) =
throw IllegalUriException(s"Cannot establish effective URI of request to `$uri`, request has a relative URI and $detail")
val Host(host, port) = hostHeader match {
case None if (defaultHostHeader.isEmpty) fail("is missing a `Host` header") else defaultHostHeader
case Some(x) if x.isEmpty if (defaultHostHeader.isEmpty) fail("an empty `Host` header") else defaultHostHeader
case Some(x) x
case OptionVal.None if (defaultHostHeader.isEmpty) fail("is missing a `Host` header") else defaultHostHeader
case OptionVal.Some(x) if x.isEmpty if (defaultHostHeader.isEmpty) fail("an empty `Host` header") else defaultHostHeader
case OptionVal.Some(x) x
}
uri.toEffectiveHttpRequestUri(host, port, securedConnection)
} else // http://tools.ietf.org/html/rfc7230#section-5.4

View file

@ -8,7 +8,7 @@ import akka.http.impl.util._
import akka.http.javadsl.{ model jm }
import akka.http.scaladsl.model.RequestEntityAcceptance._
sealed trait RequestEntityAcceptance {
sealed trait RequestEntityAcceptance extends jm.RequestEntityAcceptance {
def isEntityAccepted: Boolean
}
object RequestEntityAcceptance {
@ -36,6 +36,7 @@ final case class HttpMethod private[http] (
requestEntityAcceptance: RequestEntityAcceptance) extends jm.HttpMethod with SingletonValueRenderable {
override def isEntityAccepted: Boolean = requestEntityAcceptance.isEntityAccepted
override def toString: String = s"HttpMethod($value)"
override def getRequestEntityAcceptance: jm.RequestEntityAcceptance = requestEntityAcceptance
}
object HttpMethod {

View file

@ -13,7 +13,8 @@ final case class HttpChallenge(scheme: String, realm: String,
params: Map[String, String] = Map.empty) extends jm.headers.HttpChallenge with ValueRenderable {
def render[R <: Rendering](r: R): r.type = {
r ~~ scheme ~~ " realm=" ~~#! realm
r ~~ scheme
if (realm != null) r ~~ " realm=" ~~#! realm
if (params.nonEmpty) params.foreach { case (k, v) r ~~ ',' ~~ k ~~ '=' ~~# v }
r
}
@ -22,6 +23,16 @@ final case class HttpChallenge(scheme: String, realm: String,
def getParams: util.Map[String, String] = params.asJava
}
// FIXME: AbstractFunction3 required for bin compat. remove in Akka 3.0 and change realm in case class to option #20786
object HttpChallenge extends scala.runtime.AbstractFunction3[String, String, Map[String, String], HttpChallenge] {
def apply(scheme: String, realm: Option[String]): HttpChallenge =
HttpChallenge(scheme, realm.orNull, Map.empty[String, String])
def apply(scheme: String, realm: Option[String], params: Map[String, String]): HttpChallenge =
HttpChallenge(scheme, realm.orNull, params)
}
object HttpChallenges {
def basic(realm: String): HttpChallenge = HttpChallenge("Basic", realm)

View file

@ -5,7 +5,6 @@
package akka.http.scaladsl.model.headers
import akka.http.impl.model.JavaInitialization
import akka.util.Unsafe
import language.implicitConversions
import scala.collection.immutable
@ -22,6 +21,7 @@ abstract class HttpOriginRange extends jm.headers.HttpOriginRange with ValueRend
/** Java API */
def matches(origin: jm.headers.HttpOrigin): Boolean = matches(origin.asScala)
}
object HttpOriginRange {
case object `*` extends HttpOriginRange {
def matches(origin: HttpOrigin) = true
@ -43,6 +43,7 @@ object HttpOriginRange {
final case class HttpOrigin(scheme: String, host: Host) extends jm.headers.HttpOrigin with ValueRenderable {
def render[R <: Rendering](r: R): r.type = host.renderValue(r ~~ scheme ~~ "://")
}
object HttpOrigin {
implicit val originsRenderer: Renderer[immutable.Seq[HttpOrigin]] = Renderer.seqRenderer(" ", "null")
@ -50,4 +51,4 @@ object HttpOrigin {
val parser = new UriParser(str, UTF8, Uri.ParsingMode.Relaxed)
parser.parseOrigin()
}
}
}

View file

@ -16,12 +16,13 @@ import scala.util.{ Failure, Success, Try }
import scala.annotation.tailrec
import scala.collection.immutable
import akka.parboiled2.util.Base64
import akka.event.Logging
import akka.http.impl.util._
import akka.http.javadsl.{ model jm }
import akka.http.scaladsl.model._
sealed abstract class ModeledCompanion[T: ClassTag] extends Renderable {
val name = getClass.getSimpleName.replace("$minus", "-").dropRight(1) // trailing $
val name = ModeledCompanion.nameFromClass(getClass)
val lowercaseName = name.toRootLowerCase
private[this] val nameBytes = name.asciiBytes
final def render[R <: Rendering](r: R): r.type = r ~~ nameBytes ~~ ':' ~~ ' '
@ -36,6 +37,20 @@ sealed abstract class ModeledCompanion[T: ClassTag] extends Renderable {
case res Left(res.errors)
}
}
/** INTERNAL API */
private[akka] object ModeledCompanion {
def nameFromClass[T](clazz: Class[T]): String = {
val name = {
val n = Logging.simpleName(clazz).replace("$minus", "-")
if (n.last == '$') n.dropRight(1) // drop trailing $
else n
}
val dollarIndex = name.indexOf('$')
if (dollarIndex != -1) name.drop(dollarIndex + 1)
else name
}
}
sealed trait ModeledHeader extends HttpHeader with Serializable {
def renderInRequests: Boolean = false // default implementation
@ -362,7 +377,8 @@ object `Content-Length` extends ModeledCompanion[`Content-Length`]
* Instances of this class will only be created transiently during header parsing and will never appear
* in HttpMessage.header. To access the Content-Length, see subclasses of HttpEntity.
*/
final case class `Content-Length` private[http] (length: Long) extends RequestResponseHeader {
final case class `Content-Length` private[http] (length: Long) extends jm.headers.ContentLength
with RequestResponseHeader {
def renderValue[R <: Rendering](r: R): r.type = r ~~ length
protected def companion = `Content-Length`
}

View file

@ -34,6 +34,7 @@ abstract class ParserSettings private[akka] () extends akka.http.javadsl.setting
def cookieParsingMode: ParserSettings.CookieParsingMode
def illegalHeaderWarnings: Boolean
def errorLoggingVerbosity: ParserSettings.ErrorLoggingVerbosity
def illegalResponseHeaderValueProcessingMode: ParserSettings.IllegalResponseHeaderValueProcessingMode
def headerValueCacheLimits: Map[String, Int]
def includeTlsSessionInfoHeader: Boolean
def customMethods: String Option[HttpMethod]
@ -56,6 +57,7 @@ abstract class ParserSettings private[akka] () extends akka.http.javadsl.setting
override def getMaxUriLength = maxUriLength
override def getMaxMethodLength = maxMethodLength
override def getErrorLoggingVerbosity: js.ParserSettings.ErrorLoggingVerbosity = errorLoggingVerbosity
override def getIllegalResponseHeaderValueProcessingMode = illegalResponseHeaderValueProcessingMode
override def getCustomMethods = new Function[String, Optional[akka.http.javadsl.model.HttpMethod]] {
override def apply(t: String) = OptionConverters.toJava(customMethods(t))
@ -100,10 +102,12 @@ abstract class ParserSettings private[akka] () extends akka.http.javadsl.setting
val map = types.map(c (c.mainType, c.subType) c).toMap
self.copy(customMediaTypes = (main, sub) map.get((main, sub)))
}
def withIllegalResponseHeaderValueProcessingMode(newValue: ParserSettings.IllegalResponseHeaderValueProcessingMode): ParserSettings =
self.copy(illegalResponseHeaderValueProcessingMode = newValue)
}
object ParserSettings extends SettingsCompanion[ParserSettings] {
trait CookieParsingMode extends akka.http.javadsl.settings.ParserSettings.CookieParsingMode
sealed trait CookieParsingMode extends akka.http.javadsl.settings.ParserSettings.CookieParsingMode
object CookieParsingMode {
case object RFC6265 extends CookieParsingMode
case object Raw extends CookieParsingMode
@ -114,7 +118,7 @@ object ParserSettings extends SettingsCompanion[ParserSettings] {
}
}
trait ErrorLoggingVerbosity extends akka.http.javadsl.settings.ParserSettings.ErrorLoggingVerbosity
sealed trait ErrorLoggingVerbosity extends akka.http.javadsl.settings.ParserSettings.ErrorLoggingVerbosity
object ErrorLoggingVerbosity {
case object Off extends ErrorLoggingVerbosity
case object Simple extends ErrorLoggingVerbosity
@ -129,6 +133,21 @@ object ParserSettings extends SettingsCompanion[ParserSettings] {
}
}
sealed trait IllegalResponseHeaderValueProcessingMode extends akka.http.javadsl.settings.ParserSettings.IllegalResponseHeaderValueProcessingMode
object IllegalResponseHeaderValueProcessingMode {
case object Error extends IllegalResponseHeaderValueProcessingMode
case object Warn extends IllegalResponseHeaderValueProcessingMode
case object Ignore extends IllegalResponseHeaderValueProcessingMode
def apply(string: String): IllegalResponseHeaderValueProcessingMode =
string.toRootLowerCase match {
case "error" Error
case "warn" Warn
case "ignore" Ignore
case x throw new IllegalArgumentException(s"[$x] is not a legal `illegal-response-header-value-processing-mode` setting")
}
}
override def apply(config: Config): ParserSettings = ParserSettingsImpl(config)
override def apply(configOverrides: String): ParserSettings = ParserSettingsImpl(configOverrides)
}

View file

@ -16,7 +16,7 @@ import akka.testkit.AkkaSpec
import akka.http.scaladsl.{ Http, TestUtils }
import akka.http.scaladsl.model._
import akka.stream.testkit.Utils
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.concurrent.PatienceConfiguration.Timeout
class HighLevelOutgoingConnectionSpec extends AkkaSpec {
implicit val materializer = ActorMaterializer(ActorMaterializerSettings(system).withFuzzing(true))
@ -38,8 +38,7 @@ class HighLevelOutgoingConnectionSpec extends AkkaSpec {
.mapAsync(4)(_.entity.toStrict(1.second))
.map { r val s = r.data.utf8String; log.debug(s); s.toInt }
.runFold(0)(_ + _)
result.futureValue(PatienceConfig(10.seconds)) shouldEqual N * (N + 1) / 2
result.futureValue(Timeout(10.seconds)) should ===(N * (N + 1) / 2)
binding.futureValue.unbind()
}
@ -73,7 +72,7 @@ class HighLevelOutgoingConnectionSpec extends AkkaSpec {
.map { r val s = r.data.utf8String; log.debug(s); s.toInt }
.runFold(0)(_ + _)
result.futureValue(PatienceConfig(10.seconds)) shouldEqual C * N * (N + 1) / 2
result.futureValue(Timeout(10.seconds)) should ===(C * N * (N + 1) / 2)
binding.futureValue.unbind()
}

View file

@ -4,6 +4,8 @@
package akka.http.impl.engine.client
import com.typesafe.config.ConfigFactory
import scala.concurrent.duration._
import scala.reflect.ClassTag
import org.scalatest.Inside
@ -326,6 +328,65 @@ class LowLevelOutgoingConnectionSpec extends AkkaSpec("akka.loggers = []\n akka.
}
}
"process the illegal response header value properly" which {
val illegalChar = '\u0001'
val escapeChar = "\\u%04x" format illegalChar.toInt
"catch illegal response header value by default" in new TestSetup {
sendStandardRequest()
sendWireData(
s"""HTTP/1.1 200 OK
|Some-Header: value1$illegalChar
|Other-Header: value2
|
|""")
responsesSub.request(1)
val error @ IllegalResponseException(info) = responses.expectError()
info.summary shouldEqual s"""Illegal character '$escapeChar' in header value"""
netOut.expectError(error)
requestsSub.expectCancellation()
netInSub.expectCancellation()
}
val ignoreConfig =
"""
akka.http.parsing.illegal-response-header-value-processing-mode = ignore
"""
"ignore illegal response header value if setting the config to ignore" in new TestSetup(config = ignoreConfig) {
sendStandardRequest()
sendWireData(
s"""HTTP/1.1 200 OK
|Some-Header: value1$illegalChar
|Other-Header: value2
|
|""")
val HttpResponse(_, headers, _, _) = expectResponse()
val headerStr = headers.map(h s"${h.name}: ${h.value}").mkString(",")
headerStr shouldEqual "Some-Header: value1,Other-Header: value2"
}
val warnConfig =
"""
akka.http.parsing.illegal-response-header-value-processing-mode = warn
"""
"ignore illegal response header value and log a warning message if setting the config to warn" in new TestSetup(config = warnConfig) {
sendStandardRequest()
sendWireData(
s"""HTTP/1.1 200 OK
|Some-Header: value1$illegalChar
|Other-Header: value2
|
|""")
val HttpResponse(_, headers, _, _) = expectResponse()
val headerStr = headers.map(h s"${h.name}: ${h.value}").mkString(",")
headerStr shouldEqual "Some-Header: value1,Other-Header: value2"
}
}
"produce proper errors" which {
"catch the request entity stream being shorter than the Content-Length" in new TestSetup {
@ -808,13 +869,14 @@ class LowLevelOutgoingConnectionSpec extends AkkaSpec("akka.loggers = []\n akka.
}
}
class TestSetup(maxResponseContentLength: Int = -1) {
class TestSetup(maxResponseContentLength: Int = -1, config: String = "") {
val requests = TestPublisher.manualProbe[HttpRequest]()
val responses = TestSubscriber.manualProbe[HttpResponse]()
def settings = {
val s = ClientConnectionSettings(system)
.withUserAgentHeader(Some(`User-Agent`(List(ProductVersion("akka-http", "test")))))
val s = ClientConnectionSettings(
ConfigFactory.parseString(config).withFallback(system.settings.config)
).withUserAgentHeader(Some(`User-Agent`(List(ProductVersion("akka-http", "test")))))
if (maxResponseContentLength < 0) s
else s.withParserSettings(s.parserSettings.withMaxContentLength(maxResponseContentLength))
}
@ -873,5 +935,6 @@ class LowLevelOutgoingConnectionSpec extends AkkaSpec("akka.loggers = []\n akka.
responsesSub.request(1)
responses.expectNext()
}
}
}

View file

@ -246,6 +246,7 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll
val parser = {
val p = HttpHeaderParser.unprimed(
settings = ParserSettings(system),
system.log,
warnOnIllegalHeader = info system.log.warning(info.formatPretty))
if (primed) HttpHeaderParser.prime(p) else p
}

View file

@ -15,7 +15,7 @@ object HttpHeaderParserTestBed extends App {
val system = ActorSystem("HttpHeaderParserTestBed", testConf)
val parser = HttpHeaderParser.prime {
HttpHeaderParser.unprimed(ParserSettings(system), warnOnIllegalHeader = info system.log.warning(info.formatPretty))
HttpHeaderParser.unprimed(ParserSettings(system), system.log, warnOnIllegalHeader = info system.log.warning(info.formatPretty))
}
println {

View file

@ -42,7 +42,7 @@ class RequestParserSpec extends FreeSpec with Matchers with BeforeAndAfterAll {
akka.event-handlers = ["akka.testkit.TestEventListener"]
akka.loglevel = WARNING
akka.http.parsing.max-header-value-length = 32
akka.http.parsing.max-uri-length = 20
akka.http.parsing.max-uri-length = 40
akka.http.parsing.max-content-length = 4000000000""")
implicit val system = ActorSystem(getClass.getSimpleName, testConf)
import system.dispatcher
@ -79,6 +79,14 @@ class RequestParserSpec extends FreeSpec with Matchers with BeforeAndAfterAll {
closeAfterResponseCompletion shouldEqual Seq(false)
}
"with absolute uri in request-target" in new Test {
"""GET http://127.0.0.1:8080/hello HTTP/1.1
|Host: 127.0.0.1:8080
|
|""" should parseTo(HttpRequest(uri = "http://127.0.0.1:8080/hello", headers = List(Host("127.0.0.1", 8080))))
closeAfterResponseCompletion shouldEqual Seq(false)
}
"with 3 headers and a body" in new Test {
"""POST /resource/yes HTTP/1.0
|User-Agent: curl/7.19.7 xyz
@ -300,7 +308,7 @@ class RequestParserSpec extends FreeSpec with Matchers with BeforeAndAfterAll {
"support `rawRequestUriHeader` setting" in new Test {
override protected def newParser: HttpRequestParser =
new HttpRequestParser(parserSettings, rawRequestUriHeader = true, _headerParser = HttpHeaderParser(parserSettings)())
new HttpRequestParser(parserSettings, rawRequestUriHeader = true, headerParser = HttpHeaderParser(parserSettings, system.log)())
"""GET /f%6f%6fbar?q=b%61z HTTP/1.1
|Host: ping
@ -408,9 +416,9 @@ class RequestParserSpec extends FreeSpec with Matchers with BeforeAndAfterAll {
}
"a too-long URI" in new Test {
"GET /23456789012345678901 HTTP/1.1" should parseToError(
"GET /2345678901234567890123456789012345678901 HTTP/1.1" should parseToError(
RequestUriTooLong,
ErrorInfo("URI length exceeds the configured limit of 20 characters"))
ErrorInfo("URI length exceeds the configured limit of 40 characters"))
}
"HTTP version 1.2" in new Test {
@ -557,7 +565,7 @@ class RequestParserSpec extends FreeSpec with Matchers with BeforeAndAfterAll {
def multiParse(parser: HttpRequestParser)(input: Seq[String]): Seq[Either[RequestOutput, StrictEqualHttpRequest]] =
Source(input.toList)
.map(bytes SessionBytes(TLSPlacebo.dummySession, ByteString(bytes)))
.via(parser.stage).named("parser")
.via(parser).named("parser")
.splitWhen(x x.isInstanceOf[MessageStart] || x.isInstanceOf[EntityStreamError])
.prefixAndTail(1)
.collect {
@ -582,7 +590,7 @@ class RequestParserSpec extends FreeSpec with Matchers with BeforeAndAfterAll {
.awaitResult(awaitAtMost)
protected def parserSettings: ParserSettings = ParserSettings(system)
protected def newParser = new HttpRequestParser(parserSettings, false, HttpHeaderParser(parserSettings)())
protected def newParser = new HttpRequestParser(parserSettings, false, HttpHeaderParser(parserSettings, system.log)())
private def compactEntity(entity: RequestEntity): Future[RequestEntity] =
entity match {

View file

@ -320,7 +320,7 @@ class ResponseParserSpec extends FreeSpec with Matchers with BeforeAndAfterAll {
protected def parserSettings: ParserSettings = ParserSettings(system)
def newParserStage(requestMethod: HttpMethod = GET) = {
val parser = new HttpResponseParser(parserSettings, HttpHeaderParser(parserSettings)())
val parser = new HttpResponseParser(parserSettings, HttpHeaderParser(parserSettings, system.log)())
parser.setContextForNextResponse(HttpResponseParser.ResponseContext(requestMethod, None))
parser.stage
}

View file

@ -0,0 +1,91 @@
/*
* Copyright (C) 2009-2016 Lightbend Inc. <http://www.lightbend.com>
*/
package akka.http.impl.engine.server
import akka.http.scaladsl.model.HttpEntity.Chunked
import akka.http.scaladsl.model.HttpMethods._
import akka.http.scaladsl.model.{ ContentType, HttpRequest, HttpResponse }
import akka.http.scaladsl.model.MediaTypes._
import akka.stream.{ ActorMaterializer, Materializer }
import akka.stream.testkit.Utils.{ TE, _ }
import akka.testkit.{ AkkaSpec, EventFilter }
import org.scalatest.Inside
import scala.concurrent.Await
import scala.concurrent.duration._
class HttpServerBug2108Spec extends AkkaSpec(
"""
akka.loglevel = WARNING
akka.loggers = ["akka.testkit.TestEventListener", "akka.event.Logging$DefaultLogger"]
akka.http.server.request-timeout = infinite
akka.test.filter-leeway=300ms""") with Inside { spec
implicit val materializer = ActorMaterializer()
"The HttpServer" should {
"not cause internal graph failures when consuming a `100 Continue` entity triggers a failure" in assertAllStagesStopped(new HttpServerTestSetupBase {
override implicit def system = HttpServerBug2108Spec.this.system
override implicit def materializer: Materializer = HttpServerBug2108Spec.this.materializer
send("""POST / HTTP/1.1
|Host: example.com
|Expect: 100-continue
|Transfer-Encoding: chunked
|
|""")
inside(expectRequest()) {
case HttpRequest(POST, _, _, Chunked(ContentType(`application/octet-stream`, None), data), _)
val done = data.runForeach(_ throw TE("failed on first chunk"))
expectResponseWithWipedDate(
"""HTTP/1.1 100 Continue
|Server: akka-http/test
|Date: XXXX
|
|""")
send("""10
|0123456789ABCDEF
|0
|
|""")
// Bug #21008 does not actually ever make the request fail instead it logs the exception and behaves
// nicely so we need to check that the exception didn't get logged
var sawException = false
try {
EventFilter[IllegalArgumentException](occurrences = 1) intercept {
// make sure the failure has happened
Await.ready(done, 10.seconds)
// and then when the failure has happened/future completes, we push a reply
responses.sendNext(HttpResponse(entity = "Yeah"))
}
// got such an error, that is bad,
sawException = true
} catch {
case _: AssertionError sawException = false
}
if (sawException) fail("HttpServerBluePrint.ControllerStage: requirement failed: Cannot pull closed port (requestParsingIn)")
// and the client should still get that ok
expectResponseWithWipedDate(
"""HTTP/1.1 200 OK
|Server: akka-http/test
|Date: XXXX
|Connection: close
|Content-Type: text/plain; charset=UTF-8
|Content-Length: 4
|
|Yeah""")
}
netIn.sendComplete()
netOut.expectComplete()
})
}
}

View file

@ -5,25 +5,27 @@
package akka.http.impl.engine.server
import java.net.{ InetAddress, InetSocketAddress }
import akka.http.impl.util._
import akka.http.scaladsl.Http.ServerLayer
import akka.http.scaladsl.model.HttpEntity._
import akka.http.scaladsl.model.HttpMethods._
import akka.http.scaladsl.model.MediaTypes._
import akka.http.scaladsl.model._
import akka.http.scaladsl.model.headers._
import akka.http.scaladsl.settings.ServerSettings
import scala.reflect.ClassTag
import scala.util.Random
import akka.stream.scaladsl._
import akka.stream.testkit.Utils.assertAllStagesStopped
import akka.stream.testkit._
import akka.stream.{ ActorMaterializer, Fusing }
import akka.testkit.AkkaSpec
import akka.util.ByteString
import org.scalatest.Inside
import scala.annotation.tailrec
import scala.concurrent.duration._
import org.scalatest.Inside
import org.scalatest.concurrent.ScalaFutures
import akka.util.ByteString
import akka.stream.scaladsl._
import akka.stream.ActorMaterializer
import akka.stream.testkit._
import akka.http.scaladsl.model._
import akka.http.impl.util._
import headers._
import HttpEntity._
import MediaTypes._
import HttpMethods._
import Utils.assertAllStagesStopped
import akka.testkit.AkkaSpec
import scala.reflect.ClassTag
import scala.util.Random
class HttpServerSpec extends AkkaSpec(
"""akka.loggers = []
@ -813,6 +815,32 @@ class HttpServerSpec extends AkkaSpec(
shutdownBlueprint()
})
"support remote-address-header when blueprint not constructed with it" in assertAllStagesStopped(new TestSetup {
// coverage for #21130
lazy val theAddress = InetAddress.getByName("127.5.2.1")
override def settings: ServerSettings =
super.settings.withRemoteAddressHeader(true)
// this is the normal behavior for bindAndHandle(flow), it will set an attribute
// with remote ip before flow is materialized, rather than from the blueprint apply method
override def modifyServer(server: ServerLayer): ServerLayer = {
BidiFlow.fromGraph(Fusing.aggressive(server).withAttributes(
HttpAttributes.remoteAddress(Some(new InetSocketAddress(theAddress, 8080)))
))
}
send("""GET / HTTP/1.1
|Host: example.com
|
|""".stripMarginWithNewline("\r\n"))
val request = expectRequest()
request.headers should contain(`Remote-Address`(RemoteAddress(theAddress, Some(8080))))
shutdownBlueprint()
})
"support request timeouts" which {
"are defined via the config" in assertAllStagesStopped(new RequestTimeoutTestSetup(10.millis) {
@ -832,10 +860,10 @@ class HttpServerSpec extends AkkaSpec(
netOut.expectComplete()
})
"are programmatically increased (not expiring)" in assertAllStagesStopped(new RequestTimeoutTestSetup(10.millis) {
"are programmatically increased (not expiring)" in assertAllStagesStopped(new RequestTimeoutTestSetup(50.millis) {
send("GET / HTTP/1.1\r\nHost: example.com\r\n\r\n")
expectRequest().header[`Timeout-Access`].foreach(_.timeoutAccess.updateTimeout(50.millis))
netOut.expectNoBytes(30.millis)
expectRequest().header[`Timeout-Access`].foreach(_.timeoutAccess.updateTimeout(250.millis))
netOut.expectNoBytes(150.millis)
responses.sendNext(HttpResponse())
expectResponseWithWipedDate(
"""HTTP/1.1 200 OK
@ -849,10 +877,10 @@ class HttpServerSpec extends AkkaSpec(
netOut.expectComplete()
})
"are programmatically increased (expiring)" in assertAllStagesStopped(new RequestTimeoutTestSetup(10.millis) {
"are programmatically increased (expiring)" in assertAllStagesStopped(new RequestTimeoutTestSetup(50.millis) {
send("GET / HTTP/1.1\r\nHost: example.com\r\n\r\n")
expectRequest().header[`Timeout-Access`].foreach(_.timeoutAccess.updateTimeout(50.millis))
netOut.expectNoBytes(30.millis)
expectRequest().header[`Timeout-Access`].foreach(_.timeoutAccess.updateTimeout(250.millis))
netOut.expectNoBytes(150.millis)
expectResponseWithWipedDate(
"""HTTP/1.1 503 Service Unavailable
|Server: akka-http/test
@ -867,9 +895,9 @@ class HttpServerSpec extends AkkaSpec(
netOut.expectComplete()
})
"are programmatically decreased" in assertAllStagesStopped(new RequestTimeoutTestSetup(50.millis) {
"are programmatically decreased" in assertAllStagesStopped(new RequestTimeoutTestSetup(250.millis) {
send("GET / HTTP/1.1\r\nHost: example.com\r\n\r\n")
expectRequest().header[`Timeout-Access`].foreach(_.timeoutAccess.updateTimeout(10.millis))
expectRequest().header[`Timeout-Access`].foreach(_.timeoutAccess.updateTimeout(50.millis))
val mark = System.nanoTime()
expectResponseWithWipedDate(
"""HTTP/1.1 503 Service Unavailable
@ -880,7 +908,7 @@ class HttpServerSpec extends AkkaSpec(
|
|The server was not able to produce a timely response to your request.
|Please try again in a short while!""")
(System.nanoTime() - mark) should be < (40 * 1000000L)
(System.nanoTime() - mark) should be < (200 * 1000000L)
netIn.sendComplete()
netOut.expectComplete()

View file

@ -5,9 +5,11 @@
package akka.http.impl.engine.server
import java.net.InetSocketAddress
import akka.http.impl.engine.ws.ByteStringSinkProbe
import akka.http.scaladsl.settings.ServerSettings
import akka.stream.TLSProtocol._
import scala.concurrent.duration.FiniteDuration
import akka.actor.ActorSystem
import akka.event.NoLogging
@ -16,8 +18,9 @@ import akka.stream._
import akka.stream.scaladsl._
import akka.stream.testkit.{ TestPublisher, TestSubscriber }
import akka.http.impl.util._
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.headers.{ ProductVersion, Server }
import akka.http.scaladsl.model.{ HttpResponse, HttpRequest }
import akka.http.scaladsl.model.{ HttpRequest, HttpResponse }
abstract class HttpServerTestSetupBase {
implicit def system: ActorSystem
@ -30,11 +33,14 @@ abstract class HttpServerTestSetupBase {
.withServerHeader(Some(Server(List(ProductVersion("akka-http", "test")))))
def remoteAddress: Option[InetSocketAddress] = None
// hook to modify server, for example add attributes
def modifyServer(server: Http.ServerLayer): Http.ServerLayer = server
val (netIn, netOut) = {
val netIn = TestPublisher.probe[ByteString]()
val netOut = ByteStringSinkProbe()
RunnableGraph.fromGraph(GraphDSL.create(HttpServerBluePrint(settings, remoteAddress = remoteAddress, log = NoLogging)) { implicit b server
RunnableGraph.fromGraph(GraphDSL.create(modifyServer(HttpServerBluePrint(settings, remoteAddress = remoteAddress, log = NoLogging))) { implicit b server
import GraphDSL.Implicits._
Source.fromPublisher(netIn) ~> Flow[ByteString].map(SessionBytes(null, _)) ~> server.in2
server.out1 ~> Flow[SslTlsOutbound].collect { case SendBytes(x) x }.buffer(1, OverflowStrategy.backpressure) ~> netOut.sink

View file

@ -3,7 +3,7 @@
*/
package akka.http.impl.engine.ws
import scala.concurrent.{ Await, Promise }
import scala.concurrent.{ Await, Future, Promise }
import scala.concurrent.duration.DurationInt
import org.scalactic.ConversionCheckedTripleEquals
import org.scalatest.concurrent.ScalaFutures
@ -20,12 +20,15 @@ import org.scalatest.concurrent.Eventually
import java.net.InetSocketAddress
import akka.Done
import akka.http.scaladsl.settings.ClientConnectionSettings
import akka.stream.impl.fusing.GraphStages
import akka.stream.stage.{ GraphStageLogic, GraphStageWithMaterializedValue, InHandler, OutHandler }
import akka.util.ByteString
import akka.stream.testkit.scaladsl.TestSink
import akka.testkit.{ AkkaSpec, EventFilter }
import scala.util.{ Failure, Success }
class WebSocketIntegrationSpec extends AkkaSpec("akka.stream.materializer.debug.fuzzing-mode=off")
with Eventually {
@ -196,4 +199,19 @@ class WebSocketIntegrationSpec extends AkkaSpec("akka.stream.materializer.debug.
}
"A websocket client" should {
"fail the materialized future if the request fails" in {
val flow = Http().webSocketClientFlow(
WebSocketRequest("ws://127.0.0.1:65535/no/server/here"),
settings = ClientConnectionSettings(system).withConnectingTimeout(250.millis))
val future = Source.maybe[Message].viaMat(flow)(Keep.right).toMat(Sink.ignore)(Keep.left).run()
import system.dispatcher
whenReady(future.map(r Success(r)).recover { case ex Failure(ex) }) { resTry
resTry.isFailure should ===(true)
resTry.failed.get.getMessage should ===("Connection failed.")
}
}
}
}

View file

@ -293,6 +293,8 @@ class HttpHeaderSpec extends FreeSpec with Matchers {
"Host: [2001:db8::1]" =!= Host("[2001:db8::1]")
"Host: [::FFFF:129.144.52.38]" =!= Host("[::FFFF:129.144.52.38]")
"Host: spray.io:80000" =!= ErrorInfo("Illegal HTTP header 'Host': requirement failed", "Illegal port: 80000")
"Host: 127.0.0.1:9000" =!= Host("127.0.0.1", 9000)
"Host: 127.0.0.1" =!= Host("127.0.0.1")
}
"If-Match" in {
@ -373,7 +375,7 @@ class HttpHeaderSpec extends FreeSpec with Matchers {
"Proxy-Authenticate" in {
"Proxy-Authenticate: Basic realm=\"WallyWorld\",attr=\"val>ue\", Fancy realm=\"yeah\"" =!=
`Proxy-Authenticate`(HttpChallenge("Basic", "WallyWorld", Map("attr" "val>ue")), HttpChallenge("Fancy", "yeah"))
`Proxy-Authenticate`(HttpChallenge("Basic", Some("WallyWorld"), Map("attr" "val>ue")), HttpChallenge("Fancy", Some("yeah")))
}
"Proxy-Authorization" in {
@ -544,11 +546,13 @@ class HttpHeaderSpec extends FreeSpec with Matchers {
}
"WWW-Authenticate" in {
"WWW-Authenticate: Basic" =!=
`WWW-Authenticate`(HttpChallenge("Basic", None))
"WWW-Authenticate: Basic realm=\"WallyWorld\"" =!=
`WWW-Authenticate`(HttpChallenge("Basic", "WallyWorld"))
`WWW-Authenticate`(HttpChallenge("Basic", Some("WallyWorld")))
"WWW-Authenticate: BaSiC rEaLm=WallyWorld" =!=
`WWW-Authenticate`(HttpChallenge("BaSiC", "WallyWorld")).renderedTo("BaSiC realm=\"WallyWorld\"")
"WWW-Authenticate: Basic realm=\"foo<bar\"" =!= `WWW-Authenticate`(HttpChallenge("Basic", "foo<bar"))
"WWW-Authenticate: Basic realm=\"foo<bar\"" =!= `WWW-Authenticate`(HttpChallenge("Basic", Some("foo<bar")))
"""WWW-Authenticate: Digest
realm="testrealm@host.com",
qop="auth,auth-int",
@ -559,9 +563,9 @@ class HttpHeaderSpec extends FreeSpec with Matchers {
"nonce" "dcd98b7102dd2f0e8b11d0f600bfb0c093", "opaque" "5ccc069c403ebaf9f0171e9517f40e41"))).renderedTo(
"Digest realm=\"testrealm@host.com\",qop=\"auth,auth-int\",nonce=dcd98b7102dd2f0e8b11d0f600bfb0c093,opaque=5ccc069c403ebaf9f0171e9517f40e41")
"WWW-Authenticate: Basic realm=\"WallyWorld\",attr=\"val>ue\", Fancy realm=\"yeah\"" =!=
`WWW-Authenticate`(HttpChallenge("Basic", "WallyWorld", Map("attr" "val>ue")), HttpChallenge("Fancy", "yeah"))
`WWW-Authenticate`(HttpChallenge("Basic", Some("WallyWorld"), Map("attr" "val>ue")), HttpChallenge("Fancy", Some("yeah")))
"""WWW-Authenticate: Fancy realm="Secure Area",nonce=42""" =!=
`WWW-Authenticate`(HttpChallenge("Fancy", "Secure Area", Map("nonce" "42")))
`WWW-Authenticate`(HttpChallenge("Fancy", Some("Secure Area"), Map("nonce" "42")))
}
"X-Forwarded-For" in {

View file

@ -0,0 +1,34 @@
package akka.http.impl.util
import akka.util.ByteString
import org.scalatest.{ Matchers, WordSpec }
class ByteStringParserInputSpec extends WordSpec with Matchers {
"The ByteStringParserInput" should {
val parser = new ByteStringParserInput(ByteString("abcde", "ISO-8859-1"))
"return the correct character for index" in {
parser.charAt(0) should ===('a')
parser.charAt(4) should ===('e')
}
"return the correct length" in {
parser.length should ===(5)
}
"slice the bytes correctly into a string" in {
parser.sliceString(0, 3) should ===("abc")
parser.sliceString(3, 5) should ===("de")
}
"slice the bytes correctly into a char array" in {
val array = parser.sliceCharArray(0, 3)
array(0) should ===('a')
array(1) should ===('b')
array(2) should ===('c')
array.length should ===(3)
}
}
}

View file

@ -4,13 +4,22 @@
package akka.http.scaladsl
import java.io.File
import java.nio.file.spi.FileSystemProvider
import java.nio.file.{ FileSystem, Path }
import com.typesafe.config.{ Config, ConfigFactory }
import scala.util.{ Failure, Success }
import akka.actor.{ UnhandledMessage, ActorSystem }
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{ Sink, Source }
import akka.actor.{ ActorSystem, UnhandledMessage }
import akka.stream.{ ActorMaterializer, IOResult }
import akka.stream.scaladsl.{ FileIO, Sink, Source }
import akka.http.scaladsl.model._
import akka.http.impl.util._
import akka.util.ByteString
import scala.concurrent.{ Await, Future }
import scala.concurrent.duration._
object TestClient extends App {
val testConf: Config = ConfigFactory.parseString("""
@ -62,5 +71,47 @@ object TestClient extends App {
}
}
// for gathering dumps of entity and headers from akka http client
// and curl in parallel to compare
def fetchAndStoreABunchOfUrlsWithHttpAndCurl(urls: Seq[String]): Unit = {
assert(urls.nonEmpty)
assert(new File("/tmp/client-dumps/").exists(), "you need to create /tmp/client-dumps/ before running")
val testConf: Config = ConfigFactory.parseString("""
akka.loglevel = DEBUG
akka.log-dead-letters = off
akka.io.tcp.trace-logging = off""")
implicit val system = ActorSystem("ServerTest", testConf)
implicit val fm = ActorMaterializer()
import system.dispatcher
try {
val done = Future.traverse(urls.zipWithIndex) {
case (url, index)
Http().singleRequest(HttpRequest(uri = url)).map { response
val path = new File(s"/tmp/client-dumps/akka-body-$index.dump").toPath
val headersPath = new File(s"/tmp/client-dumps/akka-headers-$index.dump").toPath
import scala.sys.process._
(s"""curl -D /tmp/client-dumps/curl-headers-$index.dump $url""" #> new File(s"/tmp/client-dumps/curl-body-$index.dump")).!
val headers = Source(response.headers).map(header ByteString(header.name + ": " + header.value + "\n"))
.runWith(FileIO.toPath(headersPath))
val body = response.entity.dataBytes
.runWith(FileIO.toPath(path))
.map(res (url, path, res)): Future[(String, Path, IOResult)]
headers.flatMap(_ body)
}
}
println("Fetched urls: " + Await.result(done, 10.minutes))
} finally {
Http().shutdownAllConnectionPools().flatMap(_ system.terminate())
}
}
def shutdown(): Unit = system.terminate()
}

View file

@ -13,7 +13,7 @@ import scala.concurrent.Await
import akka.actor.ActorSystem
import akka.http.scaladsl.model._
import akka.http.scaladsl.model.ws._
import akka.stream.ActorMaterializer
import akka.stream._
import akka.stream.scaladsl.{ Source, Flow }
import com.typesafe.config.{ ConfigFactory, Config }
import HttpMethods._
@ -23,10 +23,17 @@ object TestServer extends App {
akka.loglevel = INFO
akka.log-dead-letters = off
akka.stream.materializer.debug.fuzzing-mode = off
akka.actor.serialize-creators = off
akka.actor.serialize-messages = off
akka.actor.default-dispatcher.throughput = 1000
""")
implicit val system = ActorSystem("ServerTest", testConf)
implicit val fm = ActorMaterializer()
val settings = ActorMaterializerSettings(system)
.withFuzzing(false)
// .withSyncProcessingLimit(Int.MaxValue)
.withInputBuffer(128, 128)
implicit val fm = ActorMaterializer(settings)
try {
val binding = Http().bindAndHandleSync({
case req @ HttpRequest(GET, Uri.Path("/"), _, _, _) if req.header[UpgradeToWebSocket].isDefined

View file

@ -141,14 +141,23 @@ class HttpEntitySpec extends FreeSpec with MustMatchers with BeforeAndAfterAll {
"Default" in {
val entity = Default(tpe, 11, source(abc, de, fgh, ijk))
entity.toString must include(entity.productPrefix)
entity.toString must include("11")
entity.toString mustNot include("Source")
}
"CloseDelimited" in {
val entity = CloseDelimited(tpe, source(abc, de, fgh, ijk))
entity.toString must include(entity.productPrefix)
entity.toString mustNot include("Source")
}
"Chunked" in {
val entity = Chunked(tpe, source(Chunk(abc)))
entity.toString must include(entity.productPrefix)
entity.toString mustNot include("Source")
}
"IndefiniteLength" in {
val entity = IndefiniteLength(tpe, source(abc, de, fgh, ijk))
entity.toString must include(entity.productPrefix)
entity.toString mustNot include("Source")
}
}
"support withoutSizeLimit" - {

View file

@ -4,7 +4,9 @@
package akka.http.scaladsl.model
import akka.util.ByteString
import headers.Host
import headers.`Content-Type`
import org.scalatest.{ Matchers, WordSpec }
class HttpMessageSpec extends WordSpec with Matchers {
@ -29,4 +31,12 @@ class HttpMessageSpec extends WordSpec with Matchers {
fail("http://example.com/", Host("example.com", 8080))
}
}
"HttpMessage" should {
"not throw a ClassCastException on header[`Content-Type`]" in {
val entity = HttpEntity.Strict(ContentTypes.`text/plain(UTF-8)`, ByteString.fromString("hello akka"))
HttpResponse(entity = entity).header[`Content-Type`] shouldBe Some(`Content-Type`(ContentTypes.`text/plain(UTF-8)`))
}
}
}

View file

@ -153,7 +153,7 @@ class HeaderSpec extends FreeSpec with Matchers {
`Last-Modified`(DateTime(2016, 2, 4, 9, 9, 0)),
Link(Uri("http://example.com"), LinkParams.`title*`("example")),
Location(Uri("http://example.com")),
`Proxy-Authenticate`(HttpChallenge("Basic", "example.com")),
`Proxy-Authenticate`(HttpChallenge("Basic", Some("example.com"))),
`Sec-WebSocket-Accept`("dGhlIHNhbXBsZSBub25jZQ"),
`Sec-WebSocket-Extensions`(Vector(WebSocketExtension("foo"))),
`Sec-WebSocket-Version`(Vector(13)),
@ -161,7 +161,7 @@ class HeaderSpec extends FreeSpec with Matchers {
`Set-Cookie`(HttpCookie("sessionId", "b0eb8b8b3ad246")),
`Transfer-Encoding`(TransferEncodings.chunked),
Upgrade(Vector(UpgradeProtocol("HTTP", Some("2.0")))),
`WWW-Authenticate`(HttpChallenge("Basic", "example.com")))
`WWW-Authenticate`(HttpChallenge("Basic", Some("example.com"))))
responseHeaders.foreach { header
header shouldBe 'renderInResponses