dotty phase 3: Some required syntax changes (#29139)
Co-authored-by: Bùi Việt Thành <thanhbv@sandinh.net>
This commit is contained in:
parent
6b402f4bf0
commit
01501148d7
22 changed files with 90 additions and 91 deletions
|
|
@ -253,7 +253,7 @@ final class ActorTestKit private[akka] (
|
||||||
*/
|
*/
|
||||||
def stop[T](ref: ActorRef[T], max: FiniteDuration = timeout.duration): Unit =
|
def stop[T](ref: ActorRef[T], max: FiniteDuration = timeout.duration): Unit =
|
||||||
try {
|
try {
|
||||||
Await.result(internalTestKitGuardian.ask { x: ActorRef[ActorTestKitGuardian.Ack.type] =>
|
Await.result(internalTestKitGuardian.ask { (x: ActorRef[ActorTestKitGuardian.Ack.type]) =>
|
||||||
ActorTestKitGuardian.StopActor(ref, x)
|
ActorTestKitGuardian.StopActor(ref, x)
|
||||||
}, max)
|
}, max)
|
||||||
} catch {
|
} catch {
|
||||||
|
|
|
||||||
|
|
@ -70,12 +70,12 @@ object BehaviorTestKitSpec {
|
||||||
context.stop(child)
|
context.stop(child)
|
||||||
Behaviors.same
|
Behaviors.same
|
||||||
case SpawnAdapter =>
|
case SpawnAdapter =>
|
||||||
context.spawnMessageAdapter { r: Reproduce =>
|
context.spawnMessageAdapter { (r: Reproduce) =>
|
||||||
SpawnAnonymous(r.times)
|
SpawnAnonymous(r.times)
|
||||||
}
|
}
|
||||||
Behaviors.same
|
Behaviors.same
|
||||||
case SpawnAdapterWithName(name) =>
|
case SpawnAdapterWithName(name) =>
|
||||||
context.spawnMessageAdapter({ r: Reproduce =>
|
context.spawnMessageAdapter({ (r: Reproduce) =>
|
||||||
SpawnAnonymous(r.times)
|
SpawnAnonymous(r.times)
|
||||||
}, name)
|
}, name)
|
||||||
Behaviors.same
|
Behaviors.same
|
||||||
|
|
|
||||||
|
|
@ -442,7 +442,7 @@ object SupervisorHierarchySpec {
|
||||||
Ping(ref)
|
Ping(ref)
|
||||||
case x =>
|
case x =>
|
||||||
// fail one child
|
// fail one child
|
||||||
val pick = ((if (x >= 0.25) x - 0.25 else x) * 4 * activeChildren.size).toInt
|
val pick = ((if (x >= 0.25f) x - 0.25f else x) * 4 * activeChildren.size).toInt
|
||||||
Fail(activeChildren(pick), if (x > 0.25) Restart else Resume)
|
Fail(activeChildren(pick), if (x > 0.25) Restart else Resume)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -411,7 +411,7 @@ abstract class ActorModelSpec(config: String) extends AkkaSpec(config) with Defa
|
||||||
"Teammates left: " + team.size + " stopLatch: " + stopLatch.getCount + " inhab:" + dispatcher.inhabitants)
|
"Teammates left: " + team.size + " stopLatch: " + stopLatch.getCount + " inhab:" + dispatcher.inhabitants)
|
||||||
|
|
||||||
import akka.util.ccompat.JavaConverters._
|
import akka.util.ccompat.JavaConverters._
|
||||||
team.asScala.toList.sortBy(_.self.path).foreach { cell: ActorCell =>
|
team.asScala.toList.sortBy(_.self.path).foreach { (cell: ActorCell) =>
|
||||||
System.err.println(
|
System.err.println(
|
||||||
" - " + cell.self.path + " " + cell.isTerminated + " " + cell.mailbox.currentStatus + " "
|
" - " + cell.self.path + " " + cell.isTerminated + " " + cell.mailbox.currentStatus + " "
|
||||||
+ cell.mailbox.numberOfMessages + " " + cell.mailbox.systemDrain(SystemMessageList.LNil).size)
|
+ cell.mailbox.numberOfMessages + " " + cell.mailbox.systemDrain(SystemMessageList.LNil).size)
|
||||||
|
|
|
||||||
|
|
@ -944,7 +944,7 @@ class TcpConnectionSpec extends AkkaSpec("""
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
trait SmallRcvBuffer { _: LocalServerTest =>
|
trait SmallRcvBuffer { this: LocalServerTest =>
|
||||||
override def setServerSocketOptions(): Unit = localServerChannel.socket.setReceiveBufferSize(1024)
|
override def setServerSocketOptions(): Unit = localServerChannel.socket.setReceiveBufferSize(1024)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,7 @@ import akka.io.Inet.SocketOption
|
||||||
import akka.testkit.{ AkkaSpec, TestProbe }
|
import akka.testkit.{ AkkaSpec, TestProbe }
|
||||||
import akka.testkit.SocketUtil.temporaryServerAddress
|
import akka.testkit.SocketUtil.temporaryServerAddress
|
||||||
|
|
||||||
trait TcpIntegrationSpecSupport { _: AkkaSpec =>
|
trait TcpIntegrationSpecSupport { this: AkkaSpec =>
|
||||||
|
|
||||||
class TestSetup(shouldBindServer: Boolean = true, runClientInExtraSystem: Boolean = true) {
|
class TestSetup(shouldBindServer: Boolean = true, runClientInExtraSystem: Boolean = true) {
|
||||||
val clientSystem =
|
val clientSystem =
|
||||||
|
|
|
||||||
|
|
@ -749,7 +749,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
|
|
||||||
"be equal to the original" when {
|
"be equal to the original" when {
|
||||||
"compacting" in {
|
"compacting" in {
|
||||||
check { xs: ByteString =>
|
check { (xs: ByteString) =>
|
||||||
val ys = xs.compact; (xs == ys) && ys.isCompact
|
val ys = xs.compact; (xs == ys) && ys.isCompact
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -785,19 +785,19 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
|
|
||||||
"behave as expected" when {
|
"behave as expected" when {
|
||||||
"created from and decoding to String" in {
|
"created from and decoding to String" in {
|
||||||
check { s: String =>
|
check { (s: String) =>
|
||||||
ByteString(s, "UTF-8").decodeString("UTF-8") == s
|
ByteString(s, "UTF-8").decodeString("UTF-8") == s
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
"taking its own length" in {
|
"taking its own length" in {
|
||||||
check { b: ByteString =>
|
check { (b: ByteString) =>
|
||||||
b.take(b.length) eq b
|
b.take(b.length) eq b
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
"created from and decoding to Base64" in {
|
"created from and decoding to Base64" in {
|
||||||
check { a: ByteString =>
|
check { (a: ByteString) =>
|
||||||
val encoded = a.encodeBase64
|
val encoded = a.encodeBase64
|
||||||
encoded == ByteString(java.util.Base64.getEncoder.encode(a.toArray)) &&
|
encoded == ByteString(java.util.Base64.getEncoder.encode(a.toArray)) &&
|
||||||
encoded.decodeBase64 == a
|
encoded.decodeBase64 == a
|
||||||
|
|
@ -805,7 +805,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
}
|
}
|
||||||
|
|
||||||
"compacting" in {
|
"compacting" in {
|
||||||
check { a: ByteString =>
|
check { (a: ByteString) =>
|
||||||
val wasCompact = a.isCompact
|
val wasCompact = a.isCompact
|
||||||
val b = a.compact
|
val b = a.compact
|
||||||
((!wasCompact) || (b eq a)) &&
|
((!wasCompact) || (b eq a)) &&
|
||||||
|
|
@ -848,7 +848,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
}
|
}
|
||||||
|
|
||||||
"calling apply" in {
|
"calling apply" in {
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
slice match {
|
slice match {
|
||||||
case (xs, i1, i2) =>
|
case (xs, i1, i2) =>
|
||||||
likeVector(xs) { seq =>
|
likeVector(xs) { seq =>
|
||||||
|
|
@ -859,27 +859,27 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
}
|
}
|
||||||
|
|
||||||
"calling head" in {
|
"calling head" in {
|
||||||
check { a: ByteString =>
|
check { (a: ByteString) =>
|
||||||
a.isEmpty || likeVector(a) { _.head }
|
a.isEmpty || likeVector(a) { _.head }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"calling tail" in {
|
"calling tail" in {
|
||||||
check { a: ByteString =>
|
check { (a: ByteString) =>
|
||||||
a.isEmpty || likeVector(a) { _.tail }
|
a.isEmpty || likeVector(a) { _.tail }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"calling last" in {
|
"calling last" in {
|
||||||
check { a: ByteString =>
|
check { (a: ByteString) =>
|
||||||
a.isEmpty || likeVector(a) { _.last }
|
a.isEmpty || likeVector(a) { _.last }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"calling init" in {
|
"calling init" in {
|
||||||
check { a: ByteString =>
|
check { (a: ByteString) =>
|
||||||
a.isEmpty || likeVector(a) { _.init }
|
a.isEmpty || likeVector(a) { _.init }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"calling length" in {
|
"calling length" in {
|
||||||
check { a: ByteString =>
|
check { (a: ByteString) =>
|
||||||
likeVector(a) { _.length }
|
likeVector(a) { _.length }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -919,25 +919,25 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
}
|
}
|
||||||
|
|
||||||
"calling foreach" in {
|
"calling foreach" in {
|
||||||
check { a: ByteString =>
|
check { (a: ByteString) =>
|
||||||
likeVector(a) { it =>
|
likeVector(a) { it =>
|
||||||
var acc = 0; it.foreach { acc += _ }; acc
|
var acc = 0; it.foreach { acc += _ }; acc
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"calling foldLeft" in {
|
"calling foldLeft" in {
|
||||||
check { a: ByteString =>
|
check { (a: ByteString) =>
|
||||||
likeVector(a) { _.foldLeft(0) { _ + _ } }
|
likeVector(a) { _.foldLeft(0) { _ + _ } }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"calling toArray" in {
|
"calling toArray" in {
|
||||||
check { a: ByteString =>
|
check { (a: ByteString) =>
|
||||||
likeVector(a) { _.toArray.toSeq }
|
likeVector(a) { _.toArray.toSeq }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
"calling slice" in {
|
"calling slice" in {
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
slice match {
|
slice match {
|
||||||
case (xs, from, until) =>
|
case (xs, from, until) =>
|
||||||
likeVector(xs)({
|
likeVector(xs)({
|
||||||
|
|
@ -948,7 +948,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
}
|
}
|
||||||
|
|
||||||
"calling take and drop" in {
|
"calling take and drop" in {
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
slice match {
|
slice match {
|
||||||
case (xs, from, until) =>
|
case (xs, from, until) =>
|
||||||
likeVector(xs)({
|
likeVector(xs)({
|
||||||
|
|
@ -959,7 +959,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
}
|
}
|
||||||
|
|
||||||
"calling grouped" in {
|
"calling grouped" in {
|
||||||
check { grouped: ByteStringGrouped =>
|
check { (grouped: ByteStringGrouped) =>
|
||||||
likeVector(grouped.bs) {
|
likeVector(grouped.bs) {
|
||||||
_.grouped(grouped.size).toIndexedSeq
|
_.grouped(grouped.size).toIndexedSeq
|
||||||
}
|
}
|
||||||
|
|
@ -967,7 +967,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
}
|
}
|
||||||
|
|
||||||
"calling copyToArray" in {
|
"calling copyToArray" in {
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
slice match {
|
slice match {
|
||||||
case (xs, from, until) =>
|
case (xs, from, until) =>
|
||||||
likeVector(xs)({ it =>
|
likeVector(xs)({ it =>
|
||||||
|
|
@ -995,7 +995,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
}
|
}
|
||||||
|
|
||||||
"given all types of ByteString" in {
|
"given all types of ByteString" in {
|
||||||
check { bs: ByteString =>
|
check { (bs: ByteString) =>
|
||||||
testSer(bs)
|
testSer(bs)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1020,27 +1020,27 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
}
|
}
|
||||||
|
|
||||||
"calling head" in {
|
"calling head" in {
|
||||||
check { a: ByteString =>
|
check { (a: ByteString) =>
|
||||||
a.isEmpty || likeVecIt(a) { _.head }
|
a.isEmpty || likeVecIt(a) { _.head }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"calling next" in {
|
"calling next" in {
|
||||||
check { a: ByteString =>
|
check { (a: ByteString) =>
|
||||||
a.isEmpty || likeVecIt(a) { _.next() }
|
a.isEmpty || likeVecIt(a) { _.next() }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"calling hasNext" in {
|
"calling hasNext" in {
|
||||||
check { a: ByteString =>
|
check { (a: ByteString) =>
|
||||||
likeVecIt(a) { _.hasNext }
|
likeVecIt(a) { _.hasNext }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"calling length" in {
|
"calling length" in {
|
||||||
check { a: ByteString =>
|
check { (a: ByteString) =>
|
||||||
likeVecIt(a)(_.length, strict = false)
|
likeVecIt(a)(_.length, strict = false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"calling duplicate" in {
|
"calling duplicate" in {
|
||||||
check { a: ByteString =>
|
check { (a: ByteString) =>
|
||||||
likeVecIt(a)({ _.duplicate match { case (a, b) => (a.toSeq, b.toSeq) } }, strict = false)
|
likeVecIt(a)({ _.duplicate match { case (a, b) => (a.toSeq, b.toSeq) } }, strict = false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1075,30 +1075,30 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"calling toSeq" in {
|
"calling toSeq" in {
|
||||||
check { a: ByteString =>
|
check { (a: ByteString) =>
|
||||||
likeVecIt(a) { _.toSeq }
|
likeVecIt(a) { _.toSeq }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"calling foreach" in {
|
"calling foreach" in {
|
||||||
check { a: ByteString =>
|
check { (a: ByteString) =>
|
||||||
likeVecIt(a) { it =>
|
likeVecIt(a) { it =>
|
||||||
var acc = 0; it.foreach { acc += _ }; acc
|
var acc = 0; it.foreach { acc += _ }; acc
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"calling foldLeft" in {
|
"calling foldLeft" in {
|
||||||
check { a: ByteString =>
|
check { (a: ByteString) =>
|
||||||
likeVecIt(a) { _.foldLeft(0) { _ + _ } }
|
likeVecIt(a) { _.foldLeft(0) { _ + _ } }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"calling toArray" in {
|
"calling toArray" in {
|
||||||
check { a: ByteString =>
|
check { (a: ByteString) =>
|
||||||
likeVecIt(a) { _.toArray.toSeq }
|
likeVecIt(a) { _.toArray.toSeq }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
"calling slice" in {
|
"calling slice" in {
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
slice match {
|
slice match {
|
||||||
case (xs, from, until) =>
|
case (xs, from, until) =>
|
||||||
likeVecIt(xs)({
|
likeVecIt(xs)({
|
||||||
|
|
@ -1109,7 +1109,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
}
|
}
|
||||||
|
|
||||||
"calling take and drop" in {
|
"calling take and drop" in {
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
slice match {
|
slice match {
|
||||||
case (xs, from, until) =>
|
case (xs, from, until) =>
|
||||||
likeVecIt(xs)({
|
likeVecIt(xs)({
|
||||||
|
|
@ -1120,7 +1120,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
}
|
}
|
||||||
|
|
||||||
"calling copyToArray" in {
|
"calling copyToArray" in {
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
slice match {
|
slice match {
|
||||||
case (xs, from, until) =>
|
case (xs, from, until) =>
|
||||||
likeVecIt(xs)({ it =>
|
likeVecIt(xs)({ it =>
|
||||||
|
|
@ -1136,7 +1136,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
"function as expected" when {
|
"function as expected" when {
|
||||||
"getting Bytes, using getByte and getBytes" in {
|
"getting Bytes, using getByte and getBytes" in {
|
||||||
// mixing getByte and getBytes here for more rigorous testing
|
// mixing getByte and getBytes here for more rigorous testing
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
val (bytes, from, to) = slice
|
val (bytes, from, to) = slice
|
||||||
val input = bytes.iterator
|
val input = bytes.iterator
|
||||||
val output = new Array[Byte](bytes.length)
|
val output = new Array[Byte](bytes.length)
|
||||||
|
|
@ -1148,7 +1148,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
}
|
}
|
||||||
|
|
||||||
"getting Bytes with a given length" in {
|
"getting Bytes with a given length" in {
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
val (bytes, _, _) = slice
|
val (bytes, _, _) = slice
|
||||||
val input = bytes.iterator
|
val input = bytes.iterator
|
||||||
(input.getBytes(bytes.length).toSeq == bytes) && input.isEmpty
|
(input.getBytes(bytes.length).toSeq == bytes) && input.isEmpty
|
||||||
|
|
@ -1156,7 +1156,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
}
|
}
|
||||||
|
|
||||||
"getting ByteString with a given length" in {
|
"getting ByteString with a given length" in {
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
val (bytes, _, _) = slice
|
val (bytes, _, _) = slice
|
||||||
val input = bytes.iterator
|
val input = bytes.iterator
|
||||||
(input.getByteString(bytes.length) == bytes) && input.isEmpty
|
(input.getByteString(bytes.length) == bytes) && input.isEmpty
|
||||||
|
|
@ -1165,7 +1165,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
|
|
||||||
"getting Bytes, using the InputStream wrapper" in {
|
"getting Bytes, using the InputStream wrapper" in {
|
||||||
// combining skip and both read methods here for more rigorous testing
|
// combining skip and both read methods here for more rigorous testing
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
val (bytes, from, to) = slice
|
val (bytes, from, to) = slice
|
||||||
val a = (0 max from) min bytes.length
|
val a = (0 max from) min bytes.length
|
||||||
val b = (a max to) min bytes.length
|
val b = (a max to) min bytes.length
|
||||||
|
|
@ -1192,7 +1192,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
}
|
}
|
||||||
|
|
||||||
"calling copyToBuffer" in {
|
"calling copyToBuffer" in {
|
||||||
check { bytes: ByteString =>
|
check { (bytes: ByteString) =>
|
||||||
import java.nio.ByteBuffer
|
import java.nio.ByteBuffer
|
||||||
val buffer = ByteBuffer.allocate(bytes.size)
|
val buffer = ByteBuffer.allocate(bytes.size)
|
||||||
bytes.copyToBuffer(buffer)
|
bytes.copyToBuffer(buffer)
|
||||||
|
|
@ -1223,52 +1223,52 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
|
|
||||||
"decode data correctly" when {
|
"decode data correctly" when {
|
||||||
"decoding Short in big-endian" in {
|
"decoding Short in big-endian" in {
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
testShortDecoding(slice, BIG_ENDIAN)
|
testShortDecoding(slice, BIG_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"decoding Short in little-endian" in {
|
"decoding Short in little-endian" in {
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
testShortDecoding(slice, LITTLE_ENDIAN)
|
testShortDecoding(slice, LITTLE_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"decoding Int in big-endian" in {
|
"decoding Int in big-endian" in {
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
testIntDecoding(slice, BIG_ENDIAN)
|
testIntDecoding(slice, BIG_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"decoding Int in little-endian" in {
|
"decoding Int in little-endian" in {
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
testIntDecoding(slice, LITTLE_ENDIAN)
|
testIntDecoding(slice, LITTLE_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"decoding Long in big-endian" in {
|
"decoding Long in big-endian" in {
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
testLongDecoding(slice, BIG_ENDIAN)
|
testLongDecoding(slice, BIG_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"decoding Long in little-endian" in {
|
"decoding Long in little-endian" in {
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
testLongDecoding(slice, LITTLE_ENDIAN)
|
testLongDecoding(slice, LITTLE_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"decoding Float in big-endian" in {
|
"decoding Float in big-endian" in {
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
testFloatDecoding(slice, BIG_ENDIAN)
|
testFloatDecoding(slice, BIG_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"decoding Float in little-endian" in {
|
"decoding Float in little-endian" in {
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
testFloatDecoding(slice, LITTLE_ENDIAN)
|
testFloatDecoding(slice, LITTLE_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"decoding Double in big-endian" in {
|
"decoding Double in big-endian" in {
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
testDoubleDecoding(slice, BIG_ENDIAN)
|
testDoubleDecoding(slice, BIG_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"decoding Double in little-endian" in {
|
"decoding Double in little-endian" in {
|
||||||
check { slice: ByteStringSlice =>
|
check { (slice: ByteStringSlice) =>
|
||||||
testDoubleDecoding(slice, LITTLE_ENDIAN)
|
testDoubleDecoding(slice, LITTLE_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1296,7 +1296,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
"function as expected" when {
|
"function as expected" when {
|
||||||
"putting Bytes, using putByte and putBytes" in {
|
"putting Bytes, using putByte and putBytes" in {
|
||||||
// mixing putByte and putBytes here for more rigorous testing
|
// mixing putByte and putBytes here for more rigorous testing
|
||||||
check { slice: ArraySlice[Byte] =>
|
check { (slice: ArraySlice[Byte]) =>
|
||||||
val (data, from, to) = slice
|
val (data, from, to) = slice
|
||||||
val builder = ByteString.newBuilder
|
val builder = ByteString.newBuilder
|
||||||
for (i <- 0 until from) builder.putByte(data(i))
|
for (i <- 0 until from) builder.putByte(data(i))
|
||||||
|
|
@ -1308,7 +1308,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
|
|
||||||
"putting Bytes, using the OutputStream wrapper" in {
|
"putting Bytes, using the OutputStream wrapper" in {
|
||||||
// mixing the write methods here for more rigorous testing
|
// mixing the write methods here for more rigorous testing
|
||||||
check { slice: ArraySlice[Byte] =>
|
check { (slice: ArraySlice[Byte]) =>
|
||||||
val (data, from, to) = slice
|
val (data, from, to) = slice
|
||||||
val builder = ByteString.newBuilder
|
val builder = ByteString.newBuilder
|
||||||
for (i <- 0 until from) builder.asOutputStream.write(data(i).toInt)
|
for (i <- 0 until from) builder.asOutputStream.write(data(i).toInt)
|
||||||
|
|
@ -1321,62 +1321,62 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
|
|
||||||
"encode data correctly" when {
|
"encode data correctly" when {
|
||||||
"encoding Short in big-endian" in {
|
"encoding Short in big-endian" in {
|
||||||
check { slice: ArraySlice[Short] =>
|
check { (slice: ArraySlice[Short]) =>
|
||||||
testShortEncoding(slice, BIG_ENDIAN)
|
testShortEncoding(slice, BIG_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"encoding Short in little-endian" in {
|
"encoding Short in little-endian" in {
|
||||||
check { slice: ArraySlice[Short] =>
|
check { (slice: ArraySlice[Short]) =>
|
||||||
testShortEncoding(slice, LITTLE_ENDIAN)
|
testShortEncoding(slice, LITTLE_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"encoding Int in big-endian" in {
|
"encoding Int in big-endian" in {
|
||||||
check { slice: ArraySlice[Int] =>
|
check { (slice: ArraySlice[Int]) =>
|
||||||
testIntEncoding(slice, BIG_ENDIAN)
|
testIntEncoding(slice, BIG_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"encoding Int in little-endian" in {
|
"encoding Int in little-endian" in {
|
||||||
check { slice: ArraySlice[Int] =>
|
check { (slice: ArraySlice[Int]) =>
|
||||||
testIntEncoding(slice, LITTLE_ENDIAN)
|
testIntEncoding(slice, LITTLE_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"encoding Long in big-endian" in {
|
"encoding Long in big-endian" in {
|
||||||
check { slice: ArraySlice[Long] =>
|
check { (slice: ArraySlice[Long]) =>
|
||||||
testLongEncoding(slice, BIG_ENDIAN)
|
testLongEncoding(slice, BIG_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"encoding Long in little-endian" in {
|
"encoding Long in little-endian" in {
|
||||||
check { slice: ArraySlice[Long] =>
|
check { (slice: ArraySlice[Long]) =>
|
||||||
testLongEncoding(slice, LITTLE_ENDIAN)
|
testLongEncoding(slice, LITTLE_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"encoding LongPart in big-endian" in {
|
"encoding LongPart in big-endian" in {
|
||||||
check { slice: ArrayNumBytes[Long] =>
|
check { (slice: ArrayNumBytes[Long]) =>
|
||||||
testLongPartEncoding(slice, BIG_ENDIAN)
|
testLongPartEncoding(slice, BIG_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"encoding LongPart in little-endian" in {
|
"encoding LongPart in little-endian" in {
|
||||||
check { slice: ArrayNumBytes[Long] =>
|
check { (slice: ArrayNumBytes[Long]) =>
|
||||||
testLongPartEncoding(slice, LITTLE_ENDIAN)
|
testLongPartEncoding(slice, LITTLE_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"encoding Float in big-endian" in {
|
"encoding Float in big-endian" in {
|
||||||
check { slice: ArraySlice[Float] =>
|
check { (slice: ArraySlice[Float]) =>
|
||||||
testFloatEncoding(slice, BIG_ENDIAN)
|
testFloatEncoding(slice, BIG_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"encoding Float in little-endian" in {
|
"encoding Float in little-endian" in {
|
||||||
check { slice: ArraySlice[Float] =>
|
check { (slice: ArraySlice[Float]) =>
|
||||||
testFloatEncoding(slice, LITTLE_ENDIAN)
|
testFloatEncoding(slice, LITTLE_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"encoding Double in big-endian" in {
|
"encoding Double in big-endian" in {
|
||||||
check { slice: ArraySlice[Double] =>
|
check { (slice: ArraySlice[Double]) =>
|
||||||
testDoubleEncoding(slice, BIG_ENDIAN)
|
testDoubleEncoding(slice, BIG_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"encoding Double in little-endian" in {
|
"encoding Double in little-endian" in {
|
||||||
check { slice: ArraySlice[Double] =>
|
check { (slice: ArraySlice[Double]) =>
|
||||||
testDoubleEncoding(slice, LITTLE_ENDIAN)
|
testDoubleEncoding(slice, LITTLE_ENDIAN)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1384,12 +1384,12 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
|
|
||||||
"have correct empty info" when {
|
"have correct empty info" when {
|
||||||
"is empty" in {
|
"is empty" in {
|
||||||
check { a: ByteStringBuilder =>
|
check { (a: ByteStringBuilder) =>
|
||||||
a.isEmpty
|
a.isEmpty
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"is nonEmpty" in {
|
"is nonEmpty" in {
|
||||||
check { a: ByteStringBuilder =>
|
check { (a: ByteStringBuilder) =>
|
||||||
a.putByte(1.toByte)
|
a.putByte(1.toByte)
|
||||||
a.nonEmpty
|
a.nonEmpty
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -84,7 +84,7 @@ class ActorRefIgnoreSpec extends ScalaTestWithActorTestKit() with AnyWordSpecLik
|
||||||
|
|
||||||
val failedAsk =
|
val failedAsk =
|
||||||
askMeRef
|
askMeRef
|
||||||
.ask { _: ActorRef[Request] =>
|
.ask { (_: ActorRef[Request]) =>
|
||||||
Request(testKit.system.ignoreRef) // <- pass the ignoreRef instead, so Future never completes
|
Request(testKit.system.ignoreRef) // <- pass the ignoreRef instead, so Future never completes
|
||||||
}
|
}
|
||||||
.failed
|
.failed
|
||||||
|
|
|
||||||
|
|
@ -75,7 +75,7 @@ class ActorSystemSpec
|
||||||
Behaviors.receiveMessage[Done] { _ =>
|
Behaviors.receiveMessage[Done] { _ =>
|
||||||
Behaviors.stopped
|
Behaviors.stopped
|
||||||
}
|
}
|
||||||
withSystem("shutdown", stoppable, doTerminate = false) { sys: ActorSystem[Done] =>
|
withSystem("shutdown", stoppable, doTerminate = false) { (sys: ActorSystem[Done]) =>
|
||||||
sys ! Done
|
sys ! Done
|
||||||
sys.whenTerminated.futureValue
|
sys.whenTerminated.futureValue
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -59,7 +59,7 @@ object ReceptionistApiSpec {
|
||||||
|
|
||||||
// another more "normal" is subscribe using an adapter
|
// another more "normal" is subscribe using an adapter
|
||||||
// FIXME inference doesn't work with partial function
|
// FIXME inference doesn't work with partial function
|
||||||
val adapter = context.spawnMessageAdapter { listing: Receptionist.Listing =>
|
val adapter = context.spawnMessageAdapter { (listing: Receptionist.Listing) =>
|
||||||
listing.serviceInstances(key) // Set[ActorRef[String]] !!
|
listing.serviceInstances(key) // Set[ActorRef[String]] !!
|
||||||
}
|
}
|
||||||
context.system.receptionist ! Receptionist.Subscribe(key, adapter)
|
context.system.receptionist ! Receptionist.Subscribe(key, adapter)
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,7 @@ object FSMDocSpec {
|
||||||
// initial state
|
// initial state
|
||||||
def apply(): Behavior[Event] = idle(Uninitialized)
|
def apply(): Behavior[Event] = idle(Uninitialized)
|
||||||
|
|
||||||
private def idle(data: Data): Behavior[Event] = Behaviors.receiveMessage[Event] { message: Event =>
|
private def idle(data: Data): Behavior[Event] = Behaviors.receiveMessage[Event] { message =>
|
||||||
(message, data) match {
|
(message, data) match {
|
||||||
case (SetTarget(ref), Uninitialized) =>
|
case (SetTarget(ref), Uninitialized) =>
|
||||||
idle(Todo(ref, Vector.empty))
|
idle(Todo(ref, Vector.empty))
|
||||||
|
|
|
||||||
|
|
@ -20,7 +20,7 @@ import akka.util.ccompat.JavaConverters._
|
||||||
* Actor system extensions registry
|
* Actor system extensions registry
|
||||||
*/
|
*/
|
||||||
@InternalApi
|
@InternalApi
|
||||||
private[akka] trait ExtensionsImpl extends Extensions { self: ActorSystem[_] =>
|
private[akka] trait ExtensionsImpl extends Extensions { self: ActorSystem[_] with InternalRecipientRef[_] =>
|
||||||
|
|
||||||
private val extensions = new ConcurrentHashMap[ExtensionId[_], AnyRef]
|
private val extensions = new ConcurrentHashMap[ExtensionId[_], AnyRef]
|
||||||
|
|
||||||
|
|
@ -55,9 +55,8 @@ private[akka] trait ExtensionsImpl extends Extensions { self: ActorSystem[_] =>
|
||||||
|
|
||||||
def idFromJavaSingletonAccessor(extensionIdFQCN: String): Try[ExtensionId[Extension]] =
|
def idFromJavaSingletonAccessor(extensionIdFQCN: String): Try[ExtensionId[Extension]] =
|
||||||
dynamicAccess.getClassFor[ExtensionId[Extension]](extensionIdFQCN).flatMap[ExtensionId[Extension]] {
|
dynamicAccess.getClassFor[ExtensionId[Extension]](extensionIdFQCN).flatMap[ExtensionId[Extension]] {
|
||||||
clazz: Class[_] =>
|
(clazz: Class[_]) =>
|
||||||
Try {
|
Try {
|
||||||
|
|
||||||
val singletonAccessor = clazz.getDeclaredMethod("getInstance")
|
val singletonAccessor = clazz.getDeclaredMethod("getInstance")
|
||||||
singletonAccessor.invoke(null).asInstanceOf[ExtensionId[Extension]]
|
singletonAccessor.invoke(null).asInstanceOf[ExtensionId[Extension]]
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -103,7 +103,7 @@ object ActorRef {
|
||||||
* the unique id of the actor is not taken into account when comparing actor paths.
|
* the unique id of the actor is not taken into account when comparing actor paths.
|
||||||
*/
|
*/
|
||||||
abstract class ActorRef extends java.lang.Comparable[ActorRef] with Serializable {
|
abstract class ActorRef extends java.lang.Comparable[ActorRef] with Serializable {
|
||||||
scalaRef: InternalActorRef =>
|
scalaRef: InternalActorRef with ActorRefScope =>
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the path for this actor (from this actor up to the root actor).
|
* Returns the path for this actor (from this actor up to the root actor).
|
||||||
|
|
@ -166,7 +166,7 @@ abstract class ActorRef extends java.lang.Comparable[ActorRef] with Serializable
|
||||||
* There are implicit conversions in package.scala
|
* There are implicit conversions in package.scala
|
||||||
* from ActorRef -> ScalaActorRef and back
|
* from ActorRef -> ScalaActorRef and back
|
||||||
*/
|
*/
|
||||||
trait ScalaActorRef { ref: ActorRef =>
|
trait ScalaActorRef { ref: ActorRef with InternalActorRef with ActorRefScope =>
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sends a one-way asynchronous message. E.g. fire-and-forget semantics.
|
* Sends a one-way asynchronous message. E.g. fire-and-forget semantics.
|
||||||
|
|
|
||||||
|
|
@ -8,6 +8,7 @@ import java.util
|
||||||
import java.util.concurrent.{ ConcurrentHashMap, TimeUnit }
|
import java.util.concurrent.{ ConcurrentHashMap, TimeUnit }
|
||||||
|
|
||||||
import scala.util.{ Failure, Success, Try }
|
import scala.util.{ Failure, Success, Try }
|
||||||
|
import java.lang.Enum
|
||||||
|
|
||||||
import com.typesafe.config._
|
import com.typesafe.config._
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -258,7 +258,7 @@ object Util {
|
||||||
if (i.hasNext) {
|
if (i.hasNext) {
|
||||||
val builder = new immutable.VectorBuilder[T]
|
val builder = new immutable.VectorBuilder[T]
|
||||||
|
|
||||||
do { builder += i.next() } while (i.hasNext)
|
while ({ builder += i.next(); i.hasNext }) ()
|
||||||
|
|
||||||
builder.result()
|
builder.result()
|
||||||
} else EmptyImmutableSeq
|
} else EmptyImmutableSeq
|
||||||
|
|
|
||||||
|
|
@ -22,9 +22,7 @@ import akka.util.ccompat.JavaConverters._
|
||||||
class Index[K, V](val mapSize: Int, val valueComparator: Comparator[V]) {
|
class Index[K, V](val mapSize: Int, val valueComparator: Comparator[V]) {
|
||||||
|
|
||||||
def this(mapSize: Int, cmp: (V, V) => Int) =
|
def this(mapSize: Int, cmp: (V, V) => Int) =
|
||||||
this(mapSize, new Comparator[V] {
|
this(mapSize, ((a: V, b: V) => cmp(a, b)): Comparator[V])
|
||||||
def compare(a: V, b: V): Int = cmp(a, b)
|
|
||||||
})
|
|
||||||
|
|
||||||
private val container = new ConcurrentHashMap[K, ConcurrentSkipListSet[V]](mapSize)
|
private val container = new ConcurrentHashMap[K, ConcurrentSkipListSet[V]](mapSize)
|
||||||
private val emptySet = new ConcurrentSkipListSet[V]
|
private val emptySet = new ConcurrentSkipListSet[V]
|
||||||
|
|
|
||||||
|
|
@ -91,7 +91,7 @@ class LogRoleReplace {
|
||||||
|
|
||||||
private val RoleStarted =
|
private val RoleStarted =
|
||||||
"""\[([\w\-]+)\].*Role \[([\w]+)\] started with address \[[\w\-\+\.]+://.*@([\w\-\.]+):([0-9]+)\]""".r
|
"""\[([\w\-]+)\].*Role \[([\w]+)\] started with address \[[\w\-\+\.]+://.*@([\w\-\.]+):([0-9]+)\]""".r
|
||||||
private val ColorCode = """\u001B?\[[0-9]+m"""
|
private val ColorCode = "\u001B?\\[[0-9]+m"
|
||||||
|
|
||||||
private var replacements: Map[String, String] = Map.empty
|
private var replacements: Map[String, String] = Map.empty
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -204,7 +204,7 @@ class PhiAccrualFailureDetector(
|
||||||
-math.log10(1.0 - 1.0 / (1.0 + e))
|
-math.log10(1.0 - 1.0 / (1.0 + e))
|
||||||
}
|
}
|
||||||
|
|
||||||
private val minStdDeviationMillis = minStdDeviation.toMillis
|
private val minStdDeviationMillis = minStdDeviation.toMillis.toDouble
|
||||||
|
|
||||||
private def ensureValidStdDeviation(stdDeviation: Double): Double = math.max(stdDeviation, minStdDeviationMillis)
|
private def ensureValidStdDeviation(stdDeviation: Double): Double = math.max(stdDeviation, minStdDeviationMillis)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -53,14 +53,15 @@ class TlsTcpWithRotatingKeysSSLEngineSpec extends TlsTcpSpec(ConfigFactory.parse
|
||||||
akka.remote.artery.ssl {
|
akka.remote.artery.ssl {
|
||||||
ssl-engine-provider = akka.remote.artery.tcp.ssl.RotatingKeysSSLEngineProvider
|
ssl-engine-provider = akka.remote.artery.tcp.ssl.RotatingKeysSSLEngineProvider
|
||||||
rotating-keys-engine {
|
rotating-keys-engine {
|
||||||
key-file = ${getClass.getClassLoader.getResource("ssl/node.example.com.pem").getPath}
|
key-file = ${TlsTcpSpec.resourcePath("ssl/node.example.com.pem")}
|
||||||
cert-file = ${getClass.getClassLoader.getResource("ssl/node.example.com.crt").getPath}
|
cert-file = ${TlsTcpSpec.resourcePath("ssl/node.example.com.crt")}
|
||||||
ca-cert-file = ${getClass.getClassLoader.getResource("ssl/exampleca.crt").getPath}
|
ca-cert-file = ${TlsTcpSpec.resourcePath("ssl/exampleca.crt")}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""))
|
"""))
|
||||||
|
|
||||||
object TlsTcpSpec {
|
object TlsTcpSpec {
|
||||||
|
def resourcePath(name: String): String = getClass.getClassLoader.getResource(name).getPath
|
||||||
|
|
||||||
lazy val config: Config = {
|
lazy val config: Config = {
|
||||||
ConfigFactory.parseString(s"""
|
ConfigFactory.parseString(s"""
|
||||||
|
|
|
||||||
|
|
@ -63,7 +63,7 @@ object Slf4jLoggerSpec {
|
||||||
val output = new ByteArrayOutputStream
|
val output = new ByteArrayOutputStream
|
||||||
def outputString: String = output.toString("UTF-8")
|
def outputString: String = output.toString("UTF-8")
|
||||||
|
|
||||||
class TestAppender extends OutputStreamAppender {
|
class TestAppender[E] extends OutputStreamAppender[E] {
|
||||||
|
|
||||||
override def start(): Unit = {
|
override def start(): Unit = {
|
||||||
setOutputStream(output)
|
setOutputStream(output)
|
||||||
|
|
|
||||||
|
|
@ -56,7 +56,7 @@ trait WithLogCapturing extends SuiteMixin { this: TestSuite =>
|
||||||
override def write(b: Int): Unit = oldOut.write(b)
|
override def write(b: Int): Unit = oldOut.write(b)
|
||||||
}) {
|
}) {
|
||||||
override def println(x: Any): Unit =
|
override def println(x: Any): Unit =
|
||||||
oldOut.println(prefix + String.valueOf(x).replaceAllLiterally("\n", s"\n$prefix"))
|
oldOut.println(prefix + String.valueOf(x).replace("\n", s"\n$prefix"))
|
||||||
}
|
}
|
||||||
|
|
||||||
Console.withOut(prefixingOut) {
|
Console.withOut(prefixingOut) {
|
||||||
|
|
|
||||||
|
|
@ -98,8 +98,8 @@ class AkkaConsoleReporter(registry: AkkaMetricRegistry, verbose: Boolean, output
|
||||||
output.print(" 1-minute rate = %2.2f calls/%s%n".format(convertRate(timer.getOneMinuteRate), getRateUnit))
|
output.print(" 1-minute rate = %2.2f calls/%s%n".format(convertRate(timer.getOneMinuteRate), getRateUnit))
|
||||||
output.print(" 5-minute rate = %2.2f calls/%s%n".format(convertRate(timer.getFiveMinuteRate), getRateUnit))
|
output.print(" 5-minute rate = %2.2f calls/%s%n".format(convertRate(timer.getFiveMinuteRate), getRateUnit))
|
||||||
output.print(" 15-minute rate = %2.2f calls/%s%n".format(convertRate(timer.getFifteenMinuteRate), getRateUnit))
|
output.print(" 15-minute rate = %2.2f calls/%s%n".format(convertRate(timer.getFifteenMinuteRate), getRateUnit))
|
||||||
output.print(" min = %2.2f %s%n".format(convertDuration(snapshot.getMin), getDurationUnit))
|
output.print(" min = %2.2f %s%n".format(convertDuration(snapshot.getMin.toDouble), getDurationUnit))
|
||||||
output.print(" max = %2.2f %s%n".format(convertDuration(snapshot.getMax), getDurationUnit))
|
output.print(" max = %2.2f %s%n".format(convertDuration(snapshot.getMax.toDouble), getDurationUnit))
|
||||||
output.print(" mean = %2.2f %s%n".format(convertDuration(snapshot.getMean), getDurationUnit))
|
output.print(" mean = %2.2f %s%n".format(convertDuration(snapshot.getMean), getDurationUnit))
|
||||||
output.print(" stddev = %2.2f %s%n".format(convertDuration(snapshot.getStdDev), getDurationUnit))
|
output.print(" stddev = %2.2f %s%n".format(convertDuration(snapshot.getStdDev), getDurationUnit))
|
||||||
output.print(" median = %2.2f %s%n".format(convertDuration(snapshot.getMedian), getDurationUnit))
|
output.print(" median = %2.2f %s%n".format(convertDuration(snapshot.getMedian), getDurationUnit))
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue