Replace scalactic deprecations (#25886)
This commit is contained in:
parent
847a7c1afd
commit
90bf989630
20 changed files with 82 additions and 85 deletions
|
|
@ -105,12 +105,12 @@ trait SchedulerSpec extends BeforeAndAfterEach with DefaultTimeout with Implicit
|
|||
|
||||
// should not be run immediately
|
||||
assert(countDownLatch.await(100, TimeUnit.MILLISECONDS) == false)
|
||||
countDownLatch.getCount should ===(3)
|
||||
countDownLatch.getCount should ===(3L)
|
||||
|
||||
// after 1 second the wait should fail
|
||||
assert(countDownLatch.await(2, TimeUnit.SECONDS) == false)
|
||||
// should still be 1 left
|
||||
countDownLatch.getCount should ===(1)
|
||||
countDownLatch.getCount should ===(1L)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -98,7 +98,7 @@ class DispatcherActorSpec extends AkkaSpec(DispatcherActorSpec.config) with Defa
|
|||
latch.await(10, TimeUnit.SECONDS)
|
||||
system.stop(fastOne)
|
||||
system.stop(slowOne)
|
||||
assert(latch.getCount() === 0)
|
||||
assert(latch.getCount() === 0L)
|
||||
}
|
||||
|
||||
"respect throughput deadline" in {
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ class DispatcherActorsSpec extends AkkaSpec {
|
|||
fFinished.await
|
||||
assert(sFinished.getCount > 0)
|
||||
sFinished.await
|
||||
assert(sFinished.getCount === 0)
|
||||
assert(sFinished.getCount === 0L)
|
||||
system.stop(f)
|
||||
system.stop(s)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ class ConfigSpec extends AkkaSpec(ConfigFactory.defaultReference(ActorSystem.fin
|
|||
settings.SerializeAllMessages should ===(true)
|
||||
|
||||
getInt("akka.scheduler.ticks-per-wheel") should ===(512)
|
||||
getDuration("akka.scheduler.tick-duration", TimeUnit.MILLISECONDS) should ===(10)
|
||||
getDuration("akka.scheduler.tick-duration", TimeUnit.MILLISECONDS) should ===(10L)
|
||||
getString("akka.scheduler.implementation") should ===("akka.actor.LightArrayRevolverScheduler")
|
||||
|
||||
getBoolean("akka.daemonic") should ===(false)
|
||||
|
|
@ -77,9 +77,9 @@ class ConfigSpec extends AkkaSpec(ConfigFactory.defaultReference(ActorSystem.fin
|
|||
{
|
||||
c.getString("type") should ===("Dispatcher")
|
||||
c.getString("executor") should ===("default-executor")
|
||||
c.getDuration("shutdown-timeout", TimeUnit.MILLISECONDS) should ===(1 * 1000)
|
||||
c.getDuration("shutdown-timeout", TimeUnit.MILLISECONDS) should ===(1 * 1000L)
|
||||
c.getInt("throughput") should ===(5)
|
||||
c.getDuration("throughput-deadline-time", TimeUnit.MILLISECONDS) should ===(0)
|
||||
c.getDuration("throughput-deadline-time", TimeUnit.MILLISECONDS) should ===(0L)
|
||||
c.getBoolean("attempt-teamwork") should ===(true)
|
||||
}
|
||||
|
||||
|
|
@ -104,7 +104,7 @@ class ConfigSpec extends AkkaSpec(ConfigFactory.defaultReference(ActorSystem.fin
|
|||
{
|
||||
val pool = c.getConfig("thread-pool-executor")
|
||||
import pool._
|
||||
getDuration("keep-alive-time", TimeUnit.MILLISECONDS) should ===(60 * 1000)
|
||||
getDuration("keep-alive-time", TimeUnit.MILLISECONDS) should ===(60 * 1000L)
|
||||
getDouble("core-pool-size-factor") should ===(3.0)
|
||||
getDouble("max-pool-size-factor") should ===(3.0)
|
||||
getInt("task-queue-size") should ===(-1)
|
||||
|
|
@ -148,7 +148,7 @@ class ConfigSpec extends AkkaSpec(ConfigFactory.defaultReference(ActorSystem.fin
|
|||
|
||||
{
|
||||
c.getInt("mailbox-capacity") should ===(1000)
|
||||
c.getDuration("mailbox-push-timeout-time", TimeUnit.MILLISECONDS) should ===(10 * 1000)
|
||||
c.getDuration("mailbox-push-timeout-time", TimeUnit.MILLISECONDS) should ===(10 * 1000L)
|
||||
c.getString("mailbox-type") should ===("akka.dispatch.UnboundedMailbox")
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ class DurationSpec extends AkkaSpec {
|
|||
(2 * one) should ===(two)
|
||||
(three - two) should ===(one)
|
||||
(three / 3) should ===(one)
|
||||
(two / one) should ===(2)
|
||||
(two / one) should ===(2D)
|
||||
(one + zero) should ===(one)
|
||||
(one / 1000000) should ===(1.micro)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,54 +20,54 @@ class TokenBucketSpec extends AkkaSpec {
|
|||
val bucket = new TestBucket(10, 1)
|
||||
bucket.init()
|
||||
|
||||
bucket.offer(1) should ===(0)
|
||||
bucket.offer(1) should ===(0)
|
||||
bucket.offer(1) should ===(0)
|
||||
bucket.offer(7) should ===(0)
|
||||
bucket.offer(1) should ===(0L)
|
||||
bucket.offer(1) should ===(0L)
|
||||
bucket.offer(1) should ===(0L)
|
||||
bucket.offer(7) should ===(0L)
|
||||
|
||||
bucket.offer(3) should ===(3)
|
||||
bucket.offer(3) should ===(3L)
|
||||
}
|
||||
|
||||
"calculate correctly with different rates and capacities" in {
|
||||
val bucketRate2 = new TestBucket(10, 2)
|
||||
bucketRate2.init()
|
||||
|
||||
bucketRate2.offer(5) should ===(0)
|
||||
bucketRate2.offer(5) should ===(0)
|
||||
bucketRate2.offer(5) should ===(10)
|
||||
bucketRate2.offer(5) should ===(0L)
|
||||
bucketRate2.offer(5) should ===(0L)
|
||||
bucketRate2.offer(5) should ===(10L)
|
||||
|
||||
val bucketRate3 = new TestBucket(8, 3)
|
||||
bucketRate3.init()
|
||||
bucketRate3.offer(5) should ===(0)
|
||||
bucketRate3.offer(5) should ===(6)
|
||||
bucketRate3.offer(5) should ===(0L)
|
||||
bucketRate3.offer(5) should ===(6L)
|
||||
|
||||
bucketRate3.currentTime = 6
|
||||
bucketRate3.offer(3) should ===(9)
|
||||
bucketRate3.offer(3) should ===(9L)
|
||||
}
|
||||
|
||||
"allow sending elements larger than capacity" in {
|
||||
val bucket = new TestBucket(10, 2)
|
||||
bucket.init()
|
||||
|
||||
bucket.offer(5) should ===(0)
|
||||
bucket.offer(20) should ===(30)
|
||||
bucket.offer(5) should ===(0L)
|
||||
bucket.offer(20) should ===(30L)
|
||||
|
||||
bucket.currentTime = 30
|
||||
bucket.offer(1) should ===(2)
|
||||
bucket.offer(1) should ===(2L)
|
||||
|
||||
bucket.currentTime = 34
|
||||
bucket.offer(1) should ===(0)
|
||||
bucket.offer(1) should ===(2)
|
||||
bucket.offer(1) should ===(0L)
|
||||
bucket.offer(1) should ===(2L)
|
||||
}
|
||||
|
||||
"work with zero capacity" in {
|
||||
val bucket = new TestBucket(0, 2)
|
||||
bucket.init()
|
||||
|
||||
bucket.offer(10) should ===(20)
|
||||
bucket.offer(10) should ===(20L)
|
||||
|
||||
bucket.currentTime = 40
|
||||
bucket.offer(10) should ===(20)
|
||||
bucket.offer(10) should ===(20L)
|
||||
}
|
||||
|
||||
"not delay if rate is higher than production" in {
|
||||
|
|
@ -76,7 +76,7 @@ class TokenBucketSpec extends AkkaSpec {
|
|||
|
||||
for (time ← 0 to 100 by 10) {
|
||||
bucket.currentTime = time
|
||||
bucket.offer(1) should ===(0)
|
||||
bucket.offer(1) should ===(0L)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -84,10 +84,10 @@ class TokenBucketSpec extends AkkaSpec {
|
|||
"maintain maximum capacity" in {
|
||||
val bucket = new TestBucket(10, 1)
|
||||
bucket.init()
|
||||
bucket.offer(10) should ===(0)
|
||||
bucket.offer(10) should ===(0L)
|
||||
|
||||
bucket.currentTime = 100000
|
||||
bucket.offer(20) should ===(10)
|
||||
bucket.offer(20) should ===(10L)
|
||||
}
|
||||
|
||||
"work if currentTime is negative" in {
|
||||
|
|
@ -95,12 +95,12 @@ class TokenBucketSpec extends AkkaSpec {
|
|||
bucket.currentTime = -100 // Must be set before init()!
|
||||
bucket.init()
|
||||
|
||||
bucket.offer(5) should ===(0)
|
||||
bucket.offer(10) should ===(5)
|
||||
bucket.offer(5) should ===(0L)
|
||||
bucket.offer(10) should ===(5L)
|
||||
|
||||
bucket.currentTime += 10
|
||||
|
||||
bucket.offer(5) should ===(0)
|
||||
bucket.offer(5) should ===(0L)
|
||||
}
|
||||
|
||||
"work if currentTime wraps over" in {
|
||||
|
|
@ -108,19 +108,19 @@ class TokenBucketSpec extends AkkaSpec {
|
|||
bucket.currentTime = Long.MaxValue - 5 // Must be set before init()!
|
||||
bucket.init()
|
||||
|
||||
bucket.offer(5) should ===(0)
|
||||
bucket.offer(10) should ===(5)
|
||||
bucket.offer(5) should ===(0L)
|
||||
bucket.offer(10) should ===(5L)
|
||||
|
||||
bucket.currentTime += 10
|
||||
|
||||
bucket.offer(5) should ===(0)
|
||||
bucket.offer(5) should ===(0L)
|
||||
}
|
||||
|
||||
"(attempt to) maintain equal time between token renewal intervals" in {
|
||||
val bucket = new TestBucket(5, 3)
|
||||
bucket.init()
|
||||
|
||||
bucket.offer(10) should ===(15)
|
||||
bucket.offer(10) should ===(15L)
|
||||
|
||||
bucket.currentTime = 16
|
||||
// At this point there is no token in the bucket (we consumed it at T15) but the next token will arrive at T18!
|
||||
|
|
@ -133,7 +133,7 @@ class TokenBucketSpec extends AkkaSpec {
|
|||
// emitted here --+ +---- currently here (T16)
|
||||
//
|
||||
|
||||
bucket.offer(1) should ===(2)
|
||||
bucket.offer(1) should ===(2L)
|
||||
|
||||
bucket.currentTime = 19
|
||||
// At 18 bucket is empty, and so is at 19. For a cost of 2 we need to wait until T24 which is 5 units.
|
||||
|
|
@ -143,19 +143,19 @@ class TokenBucketSpec extends AkkaSpec {
|
|||
// ^ ^
|
||||
// emptied here --+ +---- currently here (T19)
|
||||
//
|
||||
bucket.offer(2) should ===(5)
|
||||
bucket.offer(2) should ===(5L)
|
||||
|
||||
// Another case
|
||||
val bucket2 = new TestBucket(10, 3)
|
||||
bucket2.init()
|
||||
|
||||
bucket2.currentTime = 4
|
||||
bucket2.offer(6) should ===(0)
|
||||
bucket2.offer(6) should ===(0L)
|
||||
|
||||
// 4 tokens remain and new tokens arrive at T6 and T9 so here we have 6 tokens remaining.
|
||||
// We need 1 more, which will arrive at T12
|
||||
bucket2.currentTime = 10
|
||||
bucket2.offer(7) should ===(2)
|
||||
bucket2.offer(7) should ===(2L)
|
||||
}
|
||||
|
||||
"work with cost of zero" in {
|
||||
|
|
@ -167,7 +167,7 @@ class TokenBucketSpec extends AkkaSpec {
|
|||
bucket.offer(0)
|
||||
bucket.offer(0)
|
||||
|
||||
bucket.offer(10) should ===(0)
|
||||
bucket.offer(10) should ===(0L)
|
||||
|
||||
// Bucket is empty now
|
||||
// Still can be called any number of times
|
||||
|
|
@ -187,7 +187,7 @@ class TokenBucketSpec extends AkkaSpec {
|
|||
// Collect 5 tokens
|
||||
bucket.currentTime += 5 * T
|
||||
|
||||
bucket.offer(4) should ===(0)
|
||||
bucket.offer(4) should ===(0L)
|
||||
bucket.offer(2) should ===(T)
|
||||
}
|
||||
|
||||
|
|
@ -222,7 +222,7 @@ class TokenBucketSpec extends AkkaSpec {
|
|||
|
||||
if (delaying && idealBucket == 0) {
|
||||
// Actual emit time should equal to what the optimized token bucket calculates
|
||||
time should ===(nextEmit)
|
||||
time.toLong should ===(nextEmit)
|
||||
untilNextElement = time + Random.nextInt(arrivalPeriod)
|
||||
if (Debug) println(s" EMITTING")
|
||||
delaying = false
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ package akka.util
|
|||
|
||||
import org.scalatest.WordSpec
|
||||
import org.scalatest.Matchers
|
||||
import org.scalactic.ConversionCheckedTripleEquals
|
||||
import org.scalactic.TypeCheckedTripleEquals
|
||||
|
||||
object TypedMultiMapSpec {
|
||||
trait AbstractKey { type Type }
|
||||
|
|
@ -16,7 +16,7 @@ object TypedMultiMapSpec {
|
|||
type KV[K <: AbstractKey] = MyValue[K#Type]
|
||||
}
|
||||
|
||||
class TypedMultiMapSpec extends WordSpec with Matchers with ConversionCheckedTripleEquals {
|
||||
class TypedMultiMapSpec extends WordSpec with Matchers with TypeCheckedTripleEquals {
|
||||
import TypedMultiMapSpec._
|
||||
|
||||
"A TypedMultiMap" must {
|
||||
|
|
|
|||
|
|
@ -10,10 +10,8 @@ import akka.stream._
|
|||
import akka.util.ByteString
|
||||
import java.nio.ByteOrder
|
||||
import akka.stream.stage._
|
||||
import scala.annotation.tailrec
|
||||
import scala.concurrent.duration._
|
||||
import scala.concurrent.Await
|
||||
import org.scalactic.ConversionCheckedTripleEquals
|
||||
|
||||
object BidiFlowDocSpec {
|
||||
//#codec
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class DeviceSpec extends AkkaSpec {
|
|||
|
||||
deviceActor.tell(Device.ReadTemperature(requestId = 42), probe.ref)
|
||||
val response = probe.expectMsgType[Device.RespondTemperature]
|
||||
response.requestId should ===(42)
|
||||
response.requestId should ===(42L)
|
||||
response.value should ===(None)
|
||||
}
|
||||
//#device-read-test
|
||||
|
|
@ -34,7 +34,7 @@ class DeviceSpec extends AkkaSpec {
|
|||
|
||||
deviceActor.tell(Device.ReadTemperature(requestId = 2), probe.ref)
|
||||
val response1 = probe.expectMsgType[Device.RespondTemperature]
|
||||
response1.requestId should ===(2)
|
||||
response1.requestId should ===(2L)
|
||||
response1.value should ===(Some(24.0))
|
||||
|
||||
deviceActor.tell(Device.RecordTemperature(requestId = 3, 55.0), probe.ref)
|
||||
|
|
@ -42,7 +42,7 @@ class DeviceSpec extends AkkaSpec {
|
|||
|
||||
deviceActor.tell(Device.ReadTemperature(requestId = 4), probe.ref)
|
||||
val response2 = probe.expectMsgType[Device.RespondTemperature]
|
||||
response2.requestId should ===(4)
|
||||
response2.requestId should ===(4L)
|
||||
response2.value should ===(Some(55.0))
|
||||
}
|
||||
//#device-write-read-test
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ class DeviceSpec extends AkkaSpec {
|
|||
|
||||
deviceActor.tell(Device.ReadTemperature(requestId = 42), probe.ref)
|
||||
val response = probe.expectMsgType[Device.RespondTemperature]
|
||||
response.requestId should ===(42)
|
||||
response.requestId should ===(42L)
|
||||
response.value should ===(None)
|
||||
}
|
||||
|
||||
|
|
@ -53,7 +53,7 @@ class DeviceSpec extends AkkaSpec {
|
|||
|
||||
deviceActor.tell(Device.ReadTemperature(requestId = 2), probe.ref)
|
||||
val response1 = probe.expectMsgType[Device.RespondTemperature]
|
||||
response1.requestId should ===(2)
|
||||
response1.requestId should ===(2L)
|
||||
response1.value should ===(Some(24.0))
|
||||
|
||||
deviceActor.tell(Device.RecordTemperature(requestId = 3, 55.0), probe.ref)
|
||||
|
|
@ -61,7 +61,7 @@ class DeviceSpec extends AkkaSpec {
|
|||
|
||||
deviceActor.tell(Device.ReadTemperature(requestId = 4), probe.ref)
|
||||
val response2 = probe.expectMsgType[Device.RespondTemperature]
|
||||
response2.requestId should ===(4)
|
||||
response2.requestId should ===(4L)
|
||||
response2.value should ===(Some(55.0))
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ class EnvelopeBufferSpec extends AkkaSpec {
|
|||
envelope.parseHeader(headerOut)
|
||||
|
||||
headerOut.version should ===(version)
|
||||
headerOut.uid should ===(42)
|
||||
headerOut.uid should ===(42L)
|
||||
headerOut.inboundActorRefCompressionTableVersion should ===(28.toByte)
|
||||
headerOut.inboundClassManifestCompressionTableVersion should ===(35.toByte)
|
||||
headerOut.serializer should ===(4)
|
||||
|
|
@ -117,7 +117,7 @@ class EnvelopeBufferSpec extends AkkaSpec {
|
|||
envelope.parseHeader(headerOut)
|
||||
|
||||
headerOut.version should ===(version)
|
||||
headerOut.uid should ===(42)
|
||||
headerOut.uid should ===(42L)
|
||||
headerOut.serializer should ===(4)
|
||||
headerOut.senderActorRefPath should ===(OptionVal.Some("akka://EnvelopeBufferSpec/uncompressable0"))
|
||||
headerOut.senderActorRef(originUid) should ===(OptionVal.None)
|
||||
|
|
@ -145,7 +145,7 @@ class EnvelopeBufferSpec extends AkkaSpec {
|
|||
envelope.parseHeader(headerOut)
|
||||
|
||||
headerOut.version should ===(version)
|
||||
headerOut.uid should ===(42)
|
||||
headerOut.uid should ===(42L)
|
||||
headerOut.serializer should ===(4)
|
||||
headerOut.senderActorRef(originUid).get.path.toSerializationFormat should ===("akka://EnvelopeBufferSpec/reallylongcompressablestring")
|
||||
headerOut.senderActorRefPath should ===(OptionVal.None)
|
||||
|
|
@ -198,7 +198,7 @@ class EnvelopeBufferSpec extends AkkaSpec {
|
|||
envelope.parseHeader(headerOut)
|
||||
|
||||
headerOut.version should ===(version)
|
||||
headerOut.uid should ===(42)
|
||||
headerOut.uid should ===(42L)
|
||||
headerOut.serializer should ===(4)
|
||||
headerOut.senderActorRef(originUid).get.path.toSerializationFormat should ===("akka://EnvelopeBufferSpec/reallylongcompressablestring")
|
||||
headerOut.senderActorRefPath should ===(OptionVal.None)
|
||||
|
|
|
|||
|
|
@ -37,19 +37,19 @@ class FlightRecorderSpec extends AkkaSpec {
|
|||
|
||||
def checkLogInitialized(log: reader.RollingLog): Unit = {
|
||||
log.logs(0).state should ===(Live)
|
||||
log.logs(0).head should ===(0)
|
||||
log.logs(0).head should ===(0L)
|
||||
log.logs(0).richEntries.toSeq should ===(Nil)
|
||||
|
||||
log.logs(1).state should ===(Empty)
|
||||
log.logs(1).head should ===(0)
|
||||
log.logs(1).head should ===(0L)
|
||||
log.logs(1).richEntries.toSeq should ===(Nil)
|
||||
|
||||
log.logs(2).state should ===(Empty)
|
||||
log.logs(2).head should ===(0)
|
||||
log.logs(2).head should ===(0L)
|
||||
log.logs(2).richEntries.toSeq should ===(Nil)
|
||||
|
||||
log.logs(3).state should ===(Empty)
|
||||
log.logs(3).head should ===(0)
|
||||
log.logs(3).head should ===(0L)
|
||||
log.logs(3).richEntries.toSeq should ===(Nil)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ class CompressionTableSpec extends AkkaSpec {
|
|||
val decomp = CompressionTable(17L, 1, Map("0" → 0, "1" → 1, "2" → 2, "3" → 3)).invert
|
||||
decomp.table should ===(Array("0", "1", "2", "3"))
|
||||
decomp.originUid should ===(17L)
|
||||
decomp.version should ===(1)
|
||||
decomp.version should ===(1.toByte)
|
||||
}
|
||||
|
||||
"enforce to start allocating from 0th index" in {
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ class TestPublisherSubscriberSpec extends AkkaSpec {
|
|||
|
||||
upstreamSubscription.sendNext(1)
|
||||
downstreamSubscription.request(1)
|
||||
upstream.expectEventPF { case RequestMore(_, e) ⇒ e } should ===(1)
|
||||
upstream.expectEventPF { case RequestMore(_, e) ⇒ e } should ===(1L)
|
||||
downstream.expectEventPF { case OnNext(e) ⇒ e } should ===(1)
|
||||
|
||||
upstreamSubscription.sendNext(1)
|
||||
|
|
@ -70,7 +70,7 @@ class TestPublisherSubscriberSpec extends AkkaSpec {
|
|||
.expectSubscription()
|
||||
.request(10)
|
||||
|
||||
upstream.expectRequest() should ===(10)
|
||||
upstream.expectRequest() should ===(10L)
|
||||
upstream.sendNext(1)
|
||||
downstream.expectNext(1)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ class FileSinkSpec extends StreamSpec(UnboundedMailboxConfig) {
|
|||
val completion2 = write(lastWrite)
|
||||
val result = Await.result(completion2, 3.seconds)
|
||||
|
||||
result.count should ===(lastWrite.flatten.length)
|
||||
result.count should ===(lastWrite.flatten.length.toLong)
|
||||
checkFileContents(f, lastWrite.mkString("") + TestLines.mkString("").drop(100))
|
||||
}
|
||||
}
|
||||
|
|
@ -101,7 +101,7 @@ class FileSinkSpec extends StreamSpec(UnboundedMailboxConfig) {
|
|||
val completion2 = write(lastWrite)
|
||||
val result = Await.result(completion2, 3.seconds)
|
||||
|
||||
result.count should ===(lastWrite.flatten.length)
|
||||
result.count should ===(lastWrite.flatten.length.toLong)
|
||||
checkFileContents(f, lastWrite.mkString(""))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ class BidiFlowSpec extends StreamSpec {
|
|||
ClosedShape
|
||||
}).run()
|
||||
|
||||
Await.result(top, 1.second) should ===(3)
|
||||
Await.result(top, 1.second) should ===(3L)
|
||||
Await.result(bottom, 1.second) should ===(str)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class SinkAsJavaStreamSpec extends StreamSpec(UnboundedMailboxConfig) {
|
|||
|
||||
"work in happy case" in {
|
||||
val javaSource = Source(1 to 100).runWith(StreamConverters.asJavaStream())
|
||||
javaSource.count() should ===(100)
|
||||
javaSource.count() should ===(100L)
|
||||
}
|
||||
|
||||
"fail if parent stream is failed" in {
|
||||
|
|
@ -41,12 +41,12 @@ class SinkAsJavaStreamSpec extends StreamSpec(UnboundedMailboxConfig) {
|
|||
|
||||
"work with empty stream" in {
|
||||
val javaSource = Source.empty.runWith(StreamConverters.asJavaStream())
|
||||
javaSource.count() should ===(0)
|
||||
javaSource.count() should ===(0L)
|
||||
}
|
||||
|
||||
"work with endless stream" in assertAllStagesStopped {
|
||||
val javaSource = Source.repeat(1).runWith(StreamConverters.asJavaStream())
|
||||
javaSource.limit(10).count() should ===(10)
|
||||
javaSource.limit(10).count() should ===(10L)
|
||||
javaSource.close()
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -228,22 +228,22 @@ class SinkSpec extends StreamSpec with DefaultTimeout with ScalaFutures {
|
|||
Source(1 to 100).runWith(StreamConverters
|
||||
.javaCollectorParallelUnordered(4)(
|
||||
() ⇒ Collectors.summingInt[Int](intIdentity)))
|
||||
.futureValue should ===(5050)
|
||||
.futureValue.toInt should ===(5050)
|
||||
}
|
||||
|
||||
"be reusable" in {
|
||||
val sink = StreamConverters.javaCollector[Int, Integer](() ⇒ Collectors.summingInt[Int](intIdentity))
|
||||
Source(1 to 4).runWith(sink).futureValue should ===(10)
|
||||
Source(1 to 4).runWith(sink).futureValue.toInt should ===(10)
|
||||
|
||||
// Collector has state so it preserves all previous elements that went though
|
||||
Source(4 to 6).runWith(sink).futureValue should ===(15)
|
||||
Source(4 to 6).runWith(sink).futureValue.toInt should ===(15)
|
||||
}
|
||||
|
||||
"be reusable with parallel version" in {
|
||||
val sink = StreamConverters.javaCollectorParallelUnordered(4)(() ⇒ Collectors.summingInt[Int](intIdentity))
|
||||
|
||||
Source(1 to 4).runWith(sink).futureValue should ===(10)
|
||||
Source(4 to 6).runWith(sink).futureValue should ===(15)
|
||||
Source(1 to 4).runWith(sink).futureValue.toInt should ===(10)
|
||||
Source(4 to 6).runWith(sink).futureValue.toInt should ===(15)
|
||||
}
|
||||
|
||||
"fail if getting the supplier fails" in {
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
package akka.testkit
|
||||
|
||||
import org.scalactic.Constraint
|
||||
import org.scalactic.{ CanEqual, TypeCheckedTripleEquals }
|
||||
|
||||
import language.postfixOps
|
||||
import org.scalatest.{ BeforeAndAfterAll, WordSpecLike }
|
||||
|
|
@ -17,7 +17,6 @@ import scala.concurrent.Future
|
|||
import com.typesafe.config.{ Config, ConfigFactory }
|
||||
import akka.dispatch.Dispatchers
|
||||
import akka.testkit.TestEvent._
|
||||
import org.scalactic.ConversionCheckedTripleEquals
|
||||
import org.scalatest.concurrent.ScalaFutures
|
||||
import org.scalatest.time.Span
|
||||
|
||||
|
|
@ -59,7 +58,7 @@ object AkkaSpec {
|
|||
|
||||
abstract class AkkaSpec(_system: ActorSystem)
|
||||
extends TestKit(_system) with WordSpecLike with Matchers with BeforeAndAfterAll with WatchedByCoroner
|
||||
with ConversionCheckedTripleEquals with ScalaFutures {
|
||||
with TypeCheckedTripleEquals with ScalaFutures {
|
||||
|
||||
implicit val patience = PatienceConfig(testKitSettings.DefaultTimeout.duration, Span(100, org.scalatest.time.Millis))
|
||||
|
||||
|
|
@ -109,13 +108,13 @@ abstract class AkkaSpec(_system: ActorSystem)
|
|||
}
|
||||
|
||||
// for ScalaTest === compare of Class objects
|
||||
implicit def classEqualityConstraint[A, B]: Constraint[Class[A], Class[B]] =
|
||||
new Constraint[Class[A], Class[B]] {
|
||||
implicit def classEqualityConstraint[A, B]: CanEqual[Class[A], Class[B]] =
|
||||
new CanEqual[Class[A], Class[B]] {
|
||||
def areEqual(a: Class[A], b: Class[B]) = a == b
|
||||
}
|
||||
|
||||
implicit def setEqualityConstraint[A, T <: Set[_ <: A]]: Constraint[Set[A], T] =
|
||||
new Constraint[Set[A], T] {
|
||||
implicit def setEqualityConstraint[A, T <: Set[_ <: A]]: CanEqual[Set[A], T] =
|
||||
new CanEqual[Set[A], T] {
|
||||
def areEqual(a: Set[A], b: T) = a == b
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class TestTimeSpec extends AkkaSpec(Map("akka.test.timefactor" → 2.0)) {
|
|||
"A TestKit" must {
|
||||
|
||||
"correctly dilate times" taggedAs TimingTest in {
|
||||
1.second.dilated.toNanos should ===(1000000000L * testKitSettings.TestTimeFactor)
|
||||
1.second.dilated.toNanos should ===(1000000000L * testKitSettings.TestTimeFactor.toLong)
|
||||
|
||||
val probe = TestProbe()
|
||||
val now = System.nanoTime
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue