=htc #16488 add missing assertions in tests
- replaced pure `===` by `shouldEqual` - fixed the backport of spray/spray#927 in ae8c47acc170907ee9c8dfbcb946b400035f6877 - fixed HttpHeaderParserSpec
This commit is contained in:
parent
bd3ee6b54f
commit
75f262d047
4 changed files with 82 additions and 83 deletions
|
|
@ -43,7 +43,7 @@ private[http] object CharacterClasses {
|
||||||
val `pchar-base-nc` = unreserved ++ `sub-delims` ++ '@'
|
val `pchar-base-nc` = unreserved ++ `sub-delims` ++ '@'
|
||||||
val `pchar-base` = `pchar-base-nc` ++ ':' // pchar without percent
|
val `pchar-base` = `pchar-base-nc` ++ ':' // pchar without percent
|
||||||
val `query-fragment-char` = `pchar-base` ++ "/?"
|
val `query-fragment-char` = `pchar-base` ++ "/?"
|
||||||
val `strict-query-char` = `query-fragment-char` -- "&="
|
val `strict-query-char` = `query-fragment-char` -- "&=;"
|
||||||
val `strict-query-char-np` = `strict-query-char` -- '+'
|
val `strict-query-char-np` = `strict-query-char` -- '+'
|
||||||
|
|
||||||
val `relaxed-fragment-char` = VCHAR -- '%'
|
val `relaxed-fragment-char` = VCHAR -- '%'
|
||||||
|
|
|
||||||
|
|
@ -109,82 +109,81 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll
|
||||||
"prime an empty parser with all defined HeaderValueParsers" in new TestSetup() {
|
"prime an empty parser with all defined HeaderValueParsers" in new TestSetup() {
|
||||||
check {
|
check {
|
||||||
""" ┌─\r-\n- EmptyHeader
|
""" ┌─\r-\n- EmptyHeader
|
||||||
| | ┌─c-h-a-r-s-e-t-:- (Accept-Charset)
|
| | ┌─c-h-a-r-s-e-t-:- (accept-charset)
|
||||||
| | ┌─p-t---e-n-c-o-d-i-n-g-:- (Accept-Encoding)
|
| | | | ┌─e-n-c-o-d-i-n-g-:- (accept-encoding)
|
||||||
| | | | | ┌─l-a-n-g-u-a-g-e-:- (Accept-Language)
|
| | | └─l-a-n-g-u-a-g-e-:- (accept-language)
|
||||||
| | | | └─r-a-n-g-e-s-:- (Accept-Ranges)
|
| | ┌─p-t---r-a-n-g-e-s-:- (accept-ranges)
|
||||||
| | | | ┌─\r-\n- Accept: */*
|
| | | | ┌─\r-\n- Accept: */*
|
||||||
| | | └─:-(Accept)- -*-/-*-\r-\n- Accept: */*
|
| | | └─:-(accept)- -*-/-*-\r-\n- Accept: */*
|
||||||
| | | ┌─a-l-l-o-w---c-r-e-d-e-n-t-i-a-l-s-:- (Access-Control-Allow-Credentials)
|
| | | ┌─c-r-e-d-e-n-t-i-a-l-s-:- (access-control-allow-credentials)
|
||||||
| | | | | | ┌─h-e-a-d-e-r-s-:- (Access-Control-Allow-Headers)
|
| | | ┌─h-e-a-d-e-r-s-:- (access-control-allow-headers)
|
||||||
| | | | | | ┌─m-e-t-h-o-d-s-:- (Access-Control-Allow-Methods)
|
| | | ┌─a-l-l-o-w---m-e-t-h-o-d-s-:- (access-control-allow-methods)
|
||||||
| | | | | └─o-r-i-g-i-n-:- (Access-Control-Allow-Origin)
|
| | | | | └─o-r-i-g-i-n-:- (access-control-allow-origin)
|
||||||
| | | | | ┌─e-x-p-o-s-e---h-e-a-d-e-r-s-:- (Access-Control-Expose-Headers)
|
| | | | └─e-x-p-o-s-e---h-e-a-d-e-r-s-:- (access-control-expose-headers)
|
||||||
| | | | └─m-a-x---a-g-e-:- (Access-Control-Max-Age)
|
| ┌─a-c-c-e-s-s---c-o-n-t-r-o-l---m-a-x---a-g-e-:- (access-control-max-age)
|
||||||
| ┌─a-c-c-e-s-s---c-o-n-t-r-o-l---r-e-q-u-e-s-t---h-e-a-d-e-r-s-:- (Access-Control-Request-Headers)
|
| | | | ┌─h-e-a-d-e-r-s-:- (access-control-request-headers)
|
||||||
| | | | └─m-e-t-h-o-d-:- (Access-Control-Request-Method)
|
| | | └─r-e-q-u-e-s-t---m-e-t-h-o-d-:- (access-control-request-method)
|
||||||
| | | | ┌─l-l-o-w-:- (Allow)
|
| | └─l-l-o-w-:- (allow)
|
||||||
| | | └─u-t-h-o-r-i-z-a-t-i-o-n-:- (Authorization)
|
| | └─u-t-h-o-r-i-z-a-t-i-o-n-:- (authorization)
|
||||||
| | | ┌─a-c-h-e---c-o-n-t-r-o-l-:-(Cache-Control)- -m-a-x---a-g-e-=-0-\r-\n- Cache-Control: max-age=0
|
| | ┌─a-c-h-e---c-o-n-t-r-o-l-:-(cache-control)- -m-a-x---a-g-e-=-0-\r-\n- Cache-Control: max-age=0
|
||||||
| | | | └─n-o---c-a-c-h-e-\r-\n- Cache-Control: no-cache
|
| | | └─n-o---c-a-c-h-e-\r-\n- Cache-Control: no-cache
|
||||||
| | | | ┌─n-e-c-t-i-o-n-:-(Connection)- -K-e-e-p---A-l-i-v-e-\r-\n- Connection: Keep-Alive
|
| | | ┌─n-n-e-c-t-i-o-n-:-(connection)- -K-e-e-p---A-l-i-v-e-\r-\n- Connection: Keep-Alive
|
||||||
| | | | | | ┌─c-l-o-s-e-\r-\n- Connection: close
|
| | | | | | ┌─c-l-o-s-e-\r-\n- Connection: close
|
||||||
| | | | | └─k-e-e-p---a-l-i-v-e-\r-\n- Connection: keep-alive
|
| | | | | └─k-e-e-p---a-l-i-v-e-\r-\n- Connection: keep-alive
|
||||||
| | └─c-o-n-t-e-n-t---d-i-s-p-o-s-i-t-i-o-n-:- (Content-Disposition)
|
| | | | | ┌─d-i-s-p-o-s-i-t-i-o-n-:- (content-disposition)
|
||||||
| | | | ┌─e-n-c-o-d-i-n-g-:- (Content-Encoding)
|
| | | | | ┌─e-n-c-o-d-i-n-g-:- (content-encoding)
|
||||||
| | | | ┌─l-e-n-g-t-h-:-(Content-Length)- -0-\r-\n- Content-Length: 0
|
| | | | └─t-e-n-t---l-e-n-g-t-h-:-(Content-Length)- -0-\r-\n- Content-Length: 0
|
||||||
| | | └─r-a-n-g-e-:- (Content-Range)
|
| | | | | ┌─r-a-n-g-e-:- (content-range)
|
||||||
| | | └─t-y-p-e-:- (Content-Type)
|
| | | | └─t-y-p-e-:- (content-type)
|
||||||
| | └─o-k-i-e-:- (Cookie)
|
|-c-o-o-k-i-e-:- (cookie)
|
||||||
|-d-a-t-e-:- (Date)
|
| | ┌─d-a-t-e-:- (date)
|
||||||
| | ┌─t-a-g-:- (ETag)
|
| | | ┌─t-a-g-:- (etag)
|
||||||
| | ┌─e-x-p-e-c-t-:-(Expect)- -1-0-0---c-o-n-t-i-n-u-e-\r-\n- Expect: 100-continue
|
| | ┌─e-x-p-e-c-t-:-(expect)- -1-0-0---c-o-n-t-i-n-u-e-\r-\n- Expect: 100-continue
|
||||||
| | ┌─h-o-s-t-:- (Host)
|
| | | └─h-o-s-t-:- (host)
|
||||||
| | | ┌─a-t-c-h-:- (If-Match)
|
| | | ┌─a-t-c-h-:- (if-match)
|
||||||
| | | ┌─m-o-d-i-f-i-e-d---s-i-n-c-e-:- (If-Modified-Since)
|
| | ┌─i-f---m-o-d-i-f-i-e-d---s-i-n-c-e-:- (if-modified-since)
|
||||||
| | ┌─i-f---n-o-n-e---m-a-t-c-h-:- (If-None-Match)
|
| | | | ┌─n-o-n-e---m-a-t-c-h-:- (if-none-match)
|
||||||
| | | | | ┌─r-a-n-g-e-:- (If-Range)
|
| | | | ┌─r-a-n-g-e-:- (if-range)
|
||||||
| | | | └─u-n-m-o-d-i-f-i-e-d---s-i-n-c-e-:- (If-Unmodified-Since)
|
| | | └─u-n-m-o-d-i-f-i-e-d---s-i-n-c-e-:- (if-unmodified-since)
|
||||||
| | | └─l-a-s-t---m-o-d-i-f-i-e-d-:- (Last-Modified)
|
| | | ┌─a-s-t---m-o-d-i-f-i-e-d-:- (last-modified)
|
||||||
| | | | ┌─i-n-k-:- (Link)
|
| | | ┌─i-n-k-:- (link)
|
||||||
| | | └─o-c-a-t-i-o-n-:- (Location)
|
| └─l-o-c-a-t-i-o-n-:- (location)
|
||||||
| └─o-r-i-g-i-n-:- (Origin)
|
| | ┌─o-r-i-g-i-n-:- (origin)
|
||||||
| | ┌─e-n-t-i-c-a-t-e-:- (Proxy-Authenticate)
|
| | | ┌─e-n-t-i-c-a-t-e-:- (proxy-authenticate)
|
||||||
| | ┌─p-r-o-x-y---a-u-t-h-o-r-i-z-a-t-i-o-n-:- (Proxy-Authorization)
|
| | ┌─p-r-o-x-y---a-u-t-h-o-r-i-z-a-t-i-o-n-:- (proxy-authorization)
|
||||||
| | ┌─r-a-n-g-e-:- (Range)
|
| | | └─r-a-n-g-e-:- (range)
|
||||||
| | | └─e-m-o-t-e---a-d-d-r-e-s-s-:- (Remote-Address)
|
| └─s-e-r-v-e-r-:- (server)
|
||||||
| | | ┌─r-v-e-r-:- (Server)
|
| | └─t---c-o-o-k-i-e-:- (set-cookie)
|
||||||
| └─s-e-t---c-o-o-k-i-e-:- (Set-Cookie)
|
| | ┌─t-r-a-n-s-f-e-r---e-n-c-o-d-i-n-g-:- (transfer-encoding)
|
||||||
| | ┌─t-r-a-n-s-f-e-r---e-n-c-o-d-i-n-g-:- (Transfer-Encoding)
|
| └─u-s-e-r---a-g-e-n-t-:- (user-agent)
|
||||||
| | ┌─u-s-e-r---a-g-e-n-t-:- (User-Agent)
|
| | ┌─w-w-w---a-u-t-h-e-n-t-i-c-a-t-e-:- (www-authenticate)
|
||||||
| └─w-w-w---a-u-t-h-e-n-t-i-c-a-t-e-:- (WWW-Authenticate)
|
| └─x---f-o-r-w-a-r-d-e-d---f-o-r-:- (x-forwarded-for)
|
||||||
| └─x---f-o-r-w-a-r-d-e-d---f-o-r-:- (X-Forwarded-For)
|
|
||||||
|""" -> parser.formatTrie
|
|""" -> parser.formatTrie
|
||||||
}
|
}
|
||||||
parser.formatSizes === "607 nodes, 41 branchData rows, 56 values"
|
parser.formatSizes shouldEqual "592 nodes, 40 branchData rows, 55 values"
|
||||||
parser.contentHistogram ===
|
parser.contentHistogram shouldEqual
|
||||||
Map("Connection" -> 3, "Content-Length" -> 1, "Accept" -> 2, "Cache-Control" -> 2, "Expect" -> 1)
|
Map("connection" -> 3, "Content-Length" -> 1, "accept" -> 2, "cache-control" -> 2, "expect" -> 1)
|
||||||
}
|
}
|
||||||
|
|
||||||
"retrieve the EmptyHeader" in new TestSetup() {
|
"retrieve the EmptyHeader" in new TestSetup() {
|
||||||
parseAndCache("\r\n")() === HttpHeaderParser.EmptyHeader
|
parseAndCache("\r\n")() shouldEqual HttpHeaderParser.EmptyHeader
|
||||||
}
|
}
|
||||||
|
|
||||||
"retrieve a cached header with an exact header name match" in new TestSetup() {
|
"retrieve a cached header with an exact header name match" in new TestSetup() {
|
||||||
parseAndCache("Connection: close\r\nx")() === Connection("close")
|
parseAndCache("Connection: close\r\nx")() shouldEqual Connection("close")
|
||||||
}
|
}
|
||||||
|
|
||||||
"retrieve a cached header with a case-insensitive header-name match" in new TestSetup() {
|
"retrieve a cached header with a case-insensitive header-name match" in new TestSetup() {
|
||||||
parseAndCache("Connection: close\r\nx")("coNNection: close\r\nx") === Connection("close")
|
parseAndCache("Connection: close\r\nx")("coNNection: close\r\nx") shouldEqual Connection("close")
|
||||||
}
|
}
|
||||||
|
|
||||||
"parse and cache a modelled header" in new TestSetup() {
|
"parse and cache a modelled header" in new TestSetup() {
|
||||||
parseAndCache("Host: spray.io:123\r\nx")("HOST: spray.io:123\r\nx") === Host("spray.io", 123)
|
parseAndCache("Host: spray.io:123\r\nx")("HOST: spray.io:123\r\nx") shouldEqual Host("spray.io", 123)
|
||||||
}
|
}
|
||||||
|
|
||||||
"parse and cache an invalid modelled header as RawHeader" in new TestSetup() {
|
"parse and cache an invalid modelled header as RawHeader" in new TestSetup() {
|
||||||
parseAndCache("Content-Type: abc:123\r\nx")() === RawHeader("Content-Type", "abc:123")
|
parseAndCache("Content-Type: abc:123\r\nx")() shouldEqual RawHeader("content-type", "abc:123")
|
||||||
parseAndCache("Origin: localhost:8080\r\nx")() === RawHeader("Origin", "localhost:8080")
|
parseAndCache("Origin: localhost:8080\r\nx")() shouldEqual RawHeader("origin", "localhost:8080")
|
||||||
}
|
}
|
||||||
|
|
||||||
"parse and cache a raw header" in new TestSetup(primed = false) {
|
"parse and cache a raw header" in new TestSetup(primed = false) {
|
||||||
|
|
@ -196,17 +195,17 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll
|
||||||
|-h-e-l-l-o-:- -b-o-b- 'Hello
|
|-h-e-l-l-o-:- -b-o-b- 'Hello
|
||||||
|""" -> parser.formatTrie
|
|""" -> parser.formatTrie
|
||||||
}
|
}
|
||||||
ixA === ixB
|
ixA shouldEqual ixB
|
||||||
headerA === RawHeader("Fancy-Pants", "foo")
|
headerA shouldEqual RawHeader("Fancy-Pants", "foo")
|
||||||
headerA should be theSameInstanceAs headerB
|
headerA should be theSameInstanceAs headerB
|
||||||
}
|
}
|
||||||
|
|
||||||
"parse and cache a modelled header with line-folding" in new TestSetup() {
|
"parse and cache a modelled header with line-folding" in new TestSetup() {
|
||||||
parseAndCache("Connection: foo,\r\n bar\r\nx")("Connection: foo,\r\n bar\r\nx") === Connection("foo", "bar")
|
parseAndCache("Connection: foo,\r\n bar\r\nx")("Connection: foo,\r\n bar\r\nx") shouldEqual Connection("foo", "bar")
|
||||||
}
|
}
|
||||||
|
|
||||||
"parse and cache a header with a tab char in the value" in new TestSetup() {
|
"parse and cache a header with a tab char in the value" in new TestSetup() {
|
||||||
parseAndCache("Fancy: foo\tbar\r\nx")() === RawHeader("Fancy", "foo bar")
|
parseAndCache("Fancy: foo\tbar\r\nx")() shouldEqual RawHeader("Fancy", "foo bar")
|
||||||
}
|
}
|
||||||
|
|
||||||
"produce an error message for lines with an illegal header name" in new TestSetup() {
|
"produce an error message for lines with an illegal header name" in new TestSetup() {
|
||||||
|
|
@ -233,8 +232,8 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll
|
||||||
}
|
}
|
||||||
randomHeaders.take(300).foldLeft(0) {
|
randomHeaders.take(300).foldLeft(0) {
|
||||||
case (acc, rawHeader) ⇒ acc + parseAndCache(rawHeader.toString + "\r\nx", rawHeader)
|
case (acc, rawHeader) ⇒ acc + parseAndCache(rawHeader.toString + "\r\nx", rawHeader)
|
||||||
} === 99 // number of cached headers
|
} shouldEqual 100 // number of cache hits
|
||||||
parser.formatSizes === "3040 nodes, 114 branchData rows, 255 values"
|
parser.formatSizes shouldEqual "3050 nodes, 114 branchData rows, 255 values"
|
||||||
}
|
}
|
||||||
|
|
||||||
"continue parsing modelled headers even if the overall cache capacity is reached" in new TestSetup() {
|
"continue parsing modelled headers even if the overall cache capacity is reached" in new TestSetup() {
|
||||||
|
|
@ -245,8 +244,8 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll
|
||||||
}
|
}
|
||||||
randomHostHeaders.take(300).foldLeft(0) {
|
randomHostHeaders.take(300).foldLeft(0) {
|
||||||
case (acc, header) ⇒ acc + parseAndCache(header.toString + "\r\nx", header)
|
case (acc, header) ⇒ acc + parseAndCache(header.toString + "\r\nx", header)
|
||||||
} === 199 // number of cached headers
|
} shouldEqual 12 // number of cache hits
|
||||||
parser.formatSizes === "3173 nodes, 186 branchData rows, 255 values"
|
parser.formatSizes shouldEqual "756 nodes, 49 branchData rows, 67 values"
|
||||||
}
|
}
|
||||||
|
|
||||||
"continue parsing raw headers even if the header-specific cache capacity is reached" in new TestSetup() {
|
"continue parsing raw headers even if the header-specific cache capacity is reached" in new TestSetup() {
|
||||||
|
|
@ -256,7 +255,7 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll
|
||||||
}
|
}
|
||||||
randomHeaders.take(20).foldLeft(0) {
|
randomHeaders.take(20).foldLeft(0) {
|
||||||
case (acc, rawHeader) ⇒ acc + parseAndCache(rawHeader.toString + "\r\nx", rawHeader)
|
case (acc, rawHeader) ⇒ acc + parseAndCache(rawHeader.toString + "\r\nx", rawHeader)
|
||||||
} === 12
|
} shouldEqual 12
|
||||||
}
|
}
|
||||||
|
|
||||||
"continue parsing modelled headers even if the header-specific cache capacity is reached" in new TestSetup() {
|
"continue parsing modelled headers even if the header-specific cache capacity is reached" in new TestSetup() {
|
||||||
|
|
@ -265,7 +264,7 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll
|
||||||
}
|
}
|
||||||
randomHeaders.take(40).foldLeft(0) {
|
randomHeaders.take(40).foldLeft(0) {
|
||||||
case (acc, header) ⇒ acc + parseAndCache(header.toString + "\r\nx", header)
|
case (acc, header) ⇒ acc + parseAndCache(header.toString + "\r\nx", header)
|
||||||
} === 32
|
} shouldEqual 12
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -273,7 +272,7 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll
|
||||||
|
|
||||||
def check(pair: (String, String)) = {
|
def check(pair: (String, String)) = {
|
||||||
val (expected, actual) = pair
|
val (expected, actual) = pair
|
||||||
actual === expected.stripMarginWithNewline("\n")
|
actual shouldEqual expected.stripMarginWithNewline("\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
abstract class TestSetup(primed: Boolean = true) {
|
abstract class TestSetup(primed: Boolean = true) {
|
||||||
|
|
@ -292,7 +291,7 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll
|
||||||
def parseAndCache(lineA: String)(lineB: String = lineA): HttpHeader = {
|
def parseAndCache(lineA: String)(lineB: String = lineA): HttpHeader = {
|
||||||
val (ixA, headerA) = parseLine(lineA)
|
val (ixA, headerA) = parseLine(lineA)
|
||||||
val (ixB, headerB) = parseLine(lineB)
|
val (ixB, headerB) = parseLine(lineB)
|
||||||
ixA === ixB
|
ixA shouldEqual ixB
|
||||||
headerA should be theSameInstanceAs headerB
|
headerA should be theSameInstanceAs headerB
|
||||||
headerA
|
headerA
|
||||||
}
|
}
|
||||||
|
|
@ -300,9 +299,9 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll
|
||||||
def parseAndCache(line: String, header: HttpHeader): Int = {
|
def parseAndCache(line: String, header: HttpHeader): Int = {
|
||||||
val (ixA, headerA) = parseLine(line)
|
val (ixA, headerA) = parseLine(line)
|
||||||
val (ixB, headerB) = parseLine(line)
|
val (ixB, headerB) = parseLine(line)
|
||||||
headerA === header
|
headerA shouldEqual header
|
||||||
headerB === header
|
headerB shouldEqual header
|
||||||
ixA === ixB
|
ixA shouldEqual ixB
|
||||||
if (headerA eq headerB) 1 else 0
|
if (headerA eq headerB) 1 else 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -303,16 +303,16 @@ class UriSpec extends WordSpec with Matchers {
|
||||||
Query("k" -> "v") shouldEqual ("k" -> "v") +: Empty
|
Query("k" -> "v") shouldEqual ("k" -> "v") +: Empty
|
||||||
}
|
}
|
||||||
"encode special separators in query parameter names" in {
|
"encode special separators in query parameter names" in {
|
||||||
Query("a=b" -> "c").toString() === "a%3Db=c"
|
Query("a=b" -> "c").toString() shouldEqual "a%3Db=c"
|
||||||
Query("a&b" -> "c").toString() === "a%26b=c"
|
Query("a&b" -> "c").toString() shouldEqual "a%26b=c"
|
||||||
Query("a+b" -> "c").toString() === "a%2Bb=c"
|
Query("a+b" -> "c").toString() shouldEqual "a%2Bb=c"
|
||||||
Query("a;b" -> "c").toString() === "a%3Bb=c"
|
Query("a;b" -> "c").toString() shouldEqual "a%3Bb=c"
|
||||||
}
|
}
|
||||||
"encode special separators in query parameter values" in {
|
"encode special separators in query parameter values" in {
|
||||||
Query("a" -> "b=c").toString() === "a=b%3Dc"
|
Query("a" -> "b=c").toString() shouldEqual "a=b%3Dc"
|
||||||
Query("a" -> "b&c").toString() === "a=b%26c"
|
Query("a" -> "b&c").toString() shouldEqual "a=b%26c"
|
||||||
Query("a" -> "b+c").toString() === "a=b%2Bc"
|
Query("a" -> "b+c").toString() shouldEqual "a=b%2Bc"
|
||||||
Query("a" -> "b;c").toString() === "a=b%3Bc"
|
Query("a" -> "b;c").toString() shouldEqual "a=b%3Bc"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -219,7 +219,7 @@ class HttpHeaderSpec extends FreeSpec with Matchers {
|
||||||
path = Some("/hello"),
|
path = Some("/hello"),
|
||||||
httpOnly = true,
|
httpOnly = true,
|
||||||
extension = Some("fancyPants"),
|
extension = Some("fancyPants"),
|
||||||
secure = true)).toString === "Cookie: SID=31d4d96e407aad42"
|
secure = true)).toString shouldEqual "Cookie: SID=31d4d96e407aad42"
|
||||||
}
|
}
|
||||||
|
|
||||||
"Date" in {
|
"Date" in {
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue