=htc #18397 fix ArrayIndexOutOfBoundsException in HttpHeaderParser

This error could occur if the parser sees relatively few, relatively long headers on one connection.
This commit is contained in:
Mathias 2015-10-02 14:50:35 +02:00
parent 78a5a21819
commit ffd18fa3c0
2 changed files with 28 additions and 10 deletions

View file

@ -103,9 +103,12 @@ private[engine] final class HttpHeaderParser private (
try {
val valueIx = newValueIndex // compute early in order to trigger OutOfTrieSpaceExceptions before any change
unshareIfRequired()
values(rootValueIx) = ValueBranch(rootValueIx, valueParser, branchRootNodeIx = nodeCount, valueCount = 1)
val nodeIx = nodeCount
insertRemainingCharsAsNewNodes(input, header)(cursor, endIx, valueIx)
} catch { case OutOfTrieSpaceException /* if we cannot insert then we simply don't */ }
values(rootValueIx) = ValueBranch(rootValueIx, valueParser, branchRootNodeIx = nodeIx, valueCount = 1)
} catch {
case OutOfTrieSpaceException // if we cannot insert a value then we simply don't
}
resultHeader = header
endIx
}
@ -208,10 +211,11 @@ private[engine] final class HttpHeaderParser private (
val valueIx = newValueIndex // compute early in order to trigger OutOfTrieSpaceExceptions before any change
val rowIx = newBranchDataRowIndex
unshareIfRequired()
val newNodeIx = nodeCount.toShort
insertRemainingCharsAsNewNodes(input, value)(cursor, endIx, valueIx, colonIx)
nodes(nodeIx) = nodeBits(rowIx, nodeChar)
branchData(rowIx + 1) = (nodeIx + 1).toShort
branchData(rowIx + 1 + signum) = nodeCount.toShort
insertRemainingCharsAsNewNodes(input, value)(cursor, endIx, valueIx, colonIx)
branchData(rowIx + 1 + signum) = newNodeIx
case msb
if (nodeChar == 0) { // leaf node
require(cursor == endIx, "Cannot insert key of which a prefix already has a value")
@ -222,8 +226,9 @@ private[engine] final class HttpHeaderParser private (
case 0 // branch doesn't exist yet, create
val valueIx = newValueIndex // compute early in order to trigger OutOfTrieSpaceExceptions before any change
unshareIfRequired()
branchData(branchIndex) = nodeCount.toShort // make the previously implicit "equals" sub node explicit
val newNodeIx = nodeCount.toShort
insertRemainingCharsAsNewNodes(input, value)(cursor, endIx, valueIx, colonIx)
branchData(branchIndex) = newNodeIx // make the previously implicit "equals" sub node explicit
case subNodeIx // descend, but advance only on match
insert(input, value)(cursor + 1 - math.abs(signum), endIx, subNodeIx, colonIx)
}
@ -260,9 +265,11 @@ private[engine] final class HttpHeaderParser private (
private def newNodeIndex: Int = {
val index = nodeCount
if (index == nodes.length) nodes = copyOf(nodes, index * 3 / 2)
nodeCount = index + 1
index
if (index < Short.MaxValue) {
if (index == nodes.length) nodes = copyOf(nodes, math.min(index * 3 / 2, Short.MaxValue))
nodeCount = index + 1
index
} else throw OutOfTrieSpaceException
}
private def newBranchDataRowIndex: Int = {

View file

@ -168,7 +168,7 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll
"HTTP header value exceeds the configured limit of 1000 characters"
}
"continue parsing raw headers even if the overall cache capacity is reached" in new TestSetup() {
"continue parsing raw headers even if the overall cache value capacity is reached" in new TestSetup() {
val randomHeaders = Stream.continually {
val name = nextRandomString(nextRandomAlphaNumChar, nextRandomInt(4, 16))
val value = nextRandomString(nextRandomPrintableChar, nextRandomInt(4, 16))
@ -179,7 +179,7 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll
} should be < 300 // number of cache hits is smaller headers successfully parsed
}
"continue parsing modelled headers even if the overall cache capacity is reached" in new TestSetup() {
"continue parsing modelled headers even if the overall cache value capacity is reached" in new TestSetup() {
val randomHostHeaders = Stream.continually {
Host(
host = nextRandomString(nextRandomAlphaNumChar, nextRandomInt(4, 8)),
@ -190,6 +190,17 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll
} should be < 300 // number of cache hits is smaller headers successfully parsed
}
"continue parsing headers even if the overall cache node capacity is reached" in new TestSetup() {
val randomHostHeaders = Stream.continually {
RawHeader(
name = nextRandomString(nextRandomAlphaNumChar, 60),
value = nextRandomString(nextRandomAlphaNumChar, 1000))
}
randomHostHeaders.take(100).foldLeft(0) {
case (acc, header) acc + parseAndCache(header.toString + "\r\nx", header)
} should be < 300 // number of cache hits is smaller headers successfully parsed
}
"continue parsing raw headers even if the header-specific cache capacity is reached" in new TestSetup() {
val randomHeaders = Stream.continually {
val value = nextRandomString(nextRandomPrintableChar, nextRandomInt(4, 16))