From ffd18fa3c07c85e812a90a65f631faf100ab2d63 Mon Sep 17 00:00:00 2001 From: Mathias Date: Fri, 2 Oct 2015 14:50:35 +0200 Subject: [PATCH] =htc #18397 fix ArrayIndexOutOfBoundsException in HttpHeaderParser This error could occur if the parser sees relatively few, relatively long headers on one connection. --- .../engine/parsing/HttpHeaderParser.scala | 23 ++++++++++++------- .../engine/parsing/HttpHeaderParserSpec.scala | 15 ++++++++++-- 2 files changed, 28 insertions(+), 10 deletions(-) diff --git a/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpHeaderParser.scala b/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpHeaderParser.scala index 269fd7c71f..b63d110ea2 100644 --- a/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpHeaderParser.scala +++ b/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpHeaderParser.scala @@ -103,9 +103,12 @@ private[engine] final class HttpHeaderParser private ( try { val valueIx = newValueIndex // compute early in order to trigger OutOfTrieSpaceExceptions before any change unshareIfRequired() - values(rootValueIx) = ValueBranch(rootValueIx, valueParser, branchRootNodeIx = nodeCount, valueCount = 1) + val nodeIx = nodeCount insertRemainingCharsAsNewNodes(input, header)(cursor, endIx, valueIx) - } catch { case OutOfTrieSpaceException ⇒ /* if we cannot insert then we simply don't */ } + values(rootValueIx) = ValueBranch(rootValueIx, valueParser, branchRootNodeIx = nodeIx, valueCount = 1) + } catch { + case OutOfTrieSpaceException ⇒ // if we cannot insert a value then we simply don't + } resultHeader = header endIx } @@ -208,10 +211,11 @@ private[engine] final class HttpHeaderParser private ( val valueIx = newValueIndex // compute early in order to trigger OutOfTrieSpaceExceptions before any change val rowIx = newBranchDataRowIndex unshareIfRequired() + val newNodeIx = nodeCount.toShort + insertRemainingCharsAsNewNodes(input, value)(cursor, endIx, valueIx, colonIx) nodes(nodeIx) = nodeBits(rowIx, nodeChar) branchData(rowIx + 1) = (nodeIx + 1).toShort - branchData(rowIx + 1 + signum) = nodeCount.toShort - insertRemainingCharsAsNewNodes(input, value)(cursor, endIx, valueIx, colonIx) + branchData(rowIx + 1 + signum) = newNodeIx case msb ⇒ if (nodeChar == 0) { // leaf node require(cursor == endIx, "Cannot insert key of which a prefix already has a value") @@ -222,8 +226,9 @@ private[engine] final class HttpHeaderParser private ( case 0 ⇒ // branch doesn't exist yet, create val valueIx = newValueIndex // compute early in order to trigger OutOfTrieSpaceExceptions before any change unshareIfRequired() - branchData(branchIndex) = nodeCount.toShort // make the previously implicit "equals" sub node explicit + val newNodeIx = nodeCount.toShort insertRemainingCharsAsNewNodes(input, value)(cursor, endIx, valueIx, colonIx) + branchData(branchIndex) = newNodeIx // make the previously implicit "equals" sub node explicit case subNodeIx ⇒ // descend, but advance only on match insert(input, value)(cursor + 1 - math.abs(signum), endIx, subNodeIx, colonIx) } @@ -260,9 +265,11 @@ private[engine] final class HttpHeaderParser private ( private def newNodeIndex: Int = { val index = nodeCount - if (index == nodes.length) nodes = copyOf(nodes, index * 3 / 2) - nodeCount = index + 1 - index + if (index < Short.MaxValue) { + if (index == nodes.length) nodes = copyOf(nodes, math.min(index * 3 / 2, Short.MaxValue)) + nodeCount = index + 1 + index + } else throw OutOfTrieSpaceException } private def newBranchDataRowIndex: Int = { diff --git a/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/HttpHeaderParserSpec.scala b/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/HttpHeaderParserSpec.scala index e18b79b06e..513dd9bb32 100644 --- a/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/HttpHeaderParserSpec.scala +++ b/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/HttpHeaderParserSpec.scala @@ -168,7 +168,7 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll "HTTP header value exceeds the configured limit of 1000 characters" } - "continue parsing raw headers even if the overall cache capacity is reached" in new TestSetup() { + "continue parsing raw headers even if the overall cache value capacity is reached" in new TestSetup() { val randomHeaders = Stream.continually { val name = nextRandomString(nextRandomAlphaNumChar, nextRandomInt(4, 16)) val value = nextRandomString(nextRandomPrintableChar, nextRandomInt(4, 16)) @@ -179,7 +179,7 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll } should be < 300 // number of cache hits is smaller headers successfully parsed } - "continue parsing modelled headers even if the overall cache capacity is reached" in new TestSetup() { + "continue parsing modelled headers even if the overall cache value capacity is reached" in new TestSetup() { val randomHostHeaders = Stream.continually { Host( host = nextRandomString(nextRandomAlphaNumChar, nextRandomInt(4, 8)), @@ -190,6 +190,17 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll } should be < 300 // number of cache hits is smaller headers successfully parsed } + "continue parsing headers even if the overall cache node capacity is reached" in new TestSetup() { + val randomHostHeaders = Stream.continually { + RawHeader( + name = nextRandomString(nextRandomAlphaNumChar, 60), + value = nextRandomString(nextRandomAlphaNumChar, 1000)) + } + randomHostHeaders.take(100).foldLeft(0) { + case (acc, header) ⇒ acc + parseAndCache(header.toString + "\r\nx", header) + } should be < 300 // number of cache hits is smaller headers successfully parsed + } + "continue parsing raw headers even if the header-specific cache capacity is reached" in new TestSetup() { val randomHeaders = Stream.continually { val value = nextRandomString(nextRandomPrintableChar, nextRandomInt(4, 16))