From 7a0e5b31f8105aaff75e71f130048545c9edffe1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johan=20Andr=C3=A9n?= Date: Mon, 13 Mar 2017 17:49:45 +0100 Subject: [PATCH] Avoid Array.ofDim where possible #22516 --- .../test/scala/akka/util/ByteStringSpec.scala | 42 +++++++++---------- .../main/scala/akka/actor/TypedActor.scala | 4 +- .../akka/serialization/Serialization.scala | 4 +- .../scala/akka/serialization/Serializer.scala | 4 +- .../main/scala/akka/util/ByteIterator.scala | 7 ++-- .../akka/remote/artery/BenchTestSource.scala | 2 +- .../metrics/ClusterMetricsRouting.scala | 2 +- .../akka/cluster/ddata/DurableStore.scala | 4 +- .../ReplicatorMessageSerializer.scala | 2 +- .../actor/ByteBufferSerializerDocSpec.scala | 2 +- .../persistence/journal/JournalPerfSpec.scala | 2 +- .../serialization/SnapshotSerializer.scala | 2 +- .../persistence/serialization/package.scala | 2 +- .../remote/artery/MaxThroughputSpec.scala | 2 +- .../akka/remote/artery/Association.scala | 2 +- .../scala/akka/remote/artery/BufferPool.scala | 8 ++-- .../remote/artery/FlightRecorderReader.scala | 2 +- .../akka/remote/artery/ImmutableLongMap.scala | 10 ++--- .../akka/remote/artery/LruBoundedCache.scala | 2 +- .../artery/compress/CompressionTable.scala | 4 +- .../artery/compress/TopHeavyHitters.scala | 4 +- .../serialization/PrimitiveSerializers.scala | 8 ++-- .../akka/remote/artery/AeronSinkSpec.scala | 2 +- .../remote/artery/FlightRecorderSpec.scala | 4 +- .../artery/LargeMessagesStreamSpec.scala | 2 +- .../RollingEventLogSimulationSpec.scala | 2 +- .../PrimitivesSerializationSpec.scala | 2 +- .../akka/stream/impl/TraversalTestUtils.scala | 4 +- .../impl/fusing/GraphInterpreterSpecKit.scala | 4 +- .../akka/stream/scaladsl/FramingSpec.scala | 2 +- .../scala/akka/stream/scaladsl/SinkSpec.scala | 2 +- .../UnfoldResourceAsyncSourceSpec.scala | 2 +- .../scaladsl/UnfoldResourceSourceSpec.scala | 2 +- 33 files changed, 74 insertions(+), 75 deletions(-) diff --git a/akka-actor-tests/src/test/scala/akka/util/ByteStringSpec.scala b/akka-actor-tests/src/test/scala/akka/util/ByteStringSpec.scala index 72c1de3249..d6ff811670 100644 --- a/akka-actor-tests/src/test/scala/akka/util/ByteStringSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/util/ByteStringSpec.scala @@ -151,10 +151,10 @@ class ByteStringSpec extends WordSpec with Matchers with Checkers { val elemSize = 2 val (bytes, from, until) = slice val (n, a, b) = (bytes.length / elemSize, from / elemSize, until / elemSize) - val reference = Array.ofDim[Short](n) + val reference = new Array[Short](n) bytes.asByteBuffer.order(byteOrder).asShortBuffer.get(reference, 0, n) val input = bytes.iterator - val decoded = Array.ofDim[Short](n) + val decoded = new Array[Short](n) for (i ← 0 until a) decoded(i) = input.getShort(byteOrder) input.getShorts(decoded, a, b - a)(byteOrder) for (i ← b until n) decoded(i) = input.getShort(byteOrder) @@ -165,10 +165,10 @@ class ByteStringSpec extends WordSpec with Matchers with Checkers { val elemSize = 4 val (bytes, from, until) = slice val (n, a, b) = (bytes.length / elemSize, from / elemSize, until / elemSize) - val reference = Array.ofDim[Int](n) + val reference = new Array[Int](n) bytes.asByteBuffer.order(byteOrder).asIntBuffer.get(reference, 0, n) val input = bytes.iterator - val decoded = Array.ofDim[Int](n) + val decoded = new Array[Int](n) for (i ← 0 until a) decoded(i) = input.getInt(byteOrder) input.getInts(decoded, a, b - a)(byteOrder) for (i ← b until n) decoded(i) = input.getInt(byteOrder) @@ -179,10 +179,10 @@ class ByteStringSpec extends WordSpec with Matchers with Checkers { val elemSize = 8 val (bytes, from, until) = slice val (n, a, b) = (bytes.length / elemSize, from / elemSize, until / elemSize) - val reference = Array.ofDim[Long](n) + val reference = new Array[Long](n) bytes.asByteBuffer.order(byteOrder).asLongBuffer.get(reference, 0, n) val input = bytes.iterator - val decoded = Array.ofDim[Long](n) + val decoded = new Array[Long](n) for (i ← 0 until a) decoded(i) = input.getLong(byteOrder) input.getLongs(decoded, a, b - a)(byteOrder) for (i ← b until n) decoded(i) = input.getLong(byteOrder) @@ -193,10 +193,10 @@ class ByteStringSpec extends WordSpec with Matchers with Checkers { val elemSize = 4 val (bytes, from, until) = slice val (n, a, b) = (bytes.length / elemSize, from / elemSize, until / elemSize) - val reference = Array.ofDim[Float](n) + val reference = new Array[Float](n) bytes.asByteBuffer.order(byteOrder).asFloatBuffer.get(reference, 0, n) val input = bytes.iterator - val decoded = Array.ofDim[Float](n) + val decoded = new Array[Float](n) for (i ← 0 until a) decoded(i) = input.getFloat(byteOrder) input.getFloats(decoded, a, b - a)(byteOrder) for (i ← b until n) decoded(i) = input.getFloat(byteOrder) @@ -208,10 +208,10 @@ class ByteStringSpec extends WordSpec with Matchers with Checkers { val elemSize = 8 val (bytes, from, until) = slice val (n, a, b) = (bytes.length / elemSize, from / elemSize, until / elemSize) - val reference = Array.ofDim[Double](n) + val reference = new Array[Double](n) bytes.asByteBuffer.order(byteOrder).asDoubleBuffer.get(reference, 0, n) val input = bytes.iterator - val decoded = Array.ofDim[Double](n) + val decoded = new Array[Double](n) for (i ← 0 until a) decoded(i) = input.getDouble(byteOrder) input.getDoubles(decoded, a, b - a)(byteOrder) for (i ← b until n) decoded(i) = input.getDouble(byteOrder) @@ -222,7 +222,7 @@ class ByteStringSpec extends WordSpec with Matchers with Checkers { def testShortEncoding(slice: ArraySlice[Short], byteOrder: ByteOrder): Boolean = { val elemSize = 2 val (data, from, to) = slice - val reference = Array.ofDim[Byte](data.length * elemSize) + val reference = new Array[Byte](data.length * elemSize) ByteBuffer.wrap(reference).order(byteOrder).asShortBuffer.put(data) val builder = ByteString.newBuilder for (i ← 0 until from) builder.putShort(data(i))(byteOrder) @@ -234,7 +234,7 @@ class ByteStringSpec extends WordSpec with Matchers with Checkers { def testIntEncoding(slice: ArraySlice[Int], byteOrder: ByteOrder): Boolean = { val elemSize = 4 val (data, from, to) = slice - val reference = Array.ofDim[Byte](data.length * elemSize) + val reference = new Array[Byte](data.length * elemSize) ByteBuffer.wrap(reference).order(byteOrder).asIntBuffer.put(data) val builder = ByteString.newBuilder for (i ← 0 until from) builder.putInt(data(i))(byteOrder) @@ -246,7 +246,7 @@ class ByteStringSpec extends WordSpec with Matchers with Checkers { def testLongEncoding(slice: ArraySlice[Long], byteOrder: ByteOrder): Boolean = { val elemSize = 8 val (data, from, to) = slice - val reference = Array.ofDim[Byte](data.length * elemSize) + val reference = new Array[Byte](data.length * elemSize) ByteBuffer.wrap(reference).order(byteOrder).asLongBuffer.put(data) val builder = ByteString.newBuilder for (i ← 0 until from) builder.putLong(data(i))(byteOrder) @@ -259,7 +259,7 @@ class ByteStringSpec extends WordSpec with Matchers with Checkers { val elemSize = 8 val (data, nBytes) = anb - val reference = Array.ofDim[Byte](data.length * elemSize) + val reference = new Array[Byte](data.length * elemSize) ByteBuffer.wrap(reference).order(byteOrder).asLongBuffer.put(data) val builder = ByteString.newBuilder for (i ← 0 until data.length) builder.putLongPart(data(i), nBytes)(byteOrder) @@ -273,7 +273,7 @@ class ByteStringSpec extends WordSpec with Matchers with Checkers { def testFloatEncoding(slice: ArraySlice[Float], byteOrder: ByteOrder): Boolean = { val elemSize = 4 val (data, from, to) = slice - val reference = Array.ofDim[Byte](data.length * elemSize) + val reference = new Array[Byte](data.length * elemSize) ByteBuffer.wrap(reference).order(byteOrder).asFloatBuffer.put(data) val builder = ByteString.newBuilder for (i ← 0 until from) builder.putFloat(data(i))(byteOrder) @@ -285,7 +285,7 @@ class ByteStringSpec extends WordSpec with Matchers with Checkers { def testDoubleEncoding(slice: ArraySlice[Double], byteOrder: ByteOrder): Boolean = { val elemSize = 8 val (data, from, to) = slice - val reference = Array.ofDim[Byte](data.length * elemSize) + val reference = new Array[Byte](data.length * elemSize) ByteBuffer.wrap(reference).order(byteOrder).asDoubleBuffer.put(data) val builder = ByteString.newBuilder for (i ← 0 until from) builder.putDouble(data(i))(byteOrder) @@ -734,7 +734,7 @@ class ByteStringSpec extends WordSpec with Matchers with Checkers { check { slice: ByteStringSlice ⇒ slice match { case (xs, from, until) ⇒ likeVector(xs)({ it ⇒ - val array = Array.ofDim[Byte](xs.length) + val array = new Array[Byte](xs.length) it.slice(from, until).copyToArray(array, from, until) array.toSeq }) @@ -815,7 +815,7 @@ class ByteStringSpec extends WordSpec with Matchers with Checkers { check { slice: ByteStringSlice ⇒ slice match { case (xs, from, until) ⇒ likeVecIt(xs)({ it ⇒ - val array = Array.ofDim[Byte](xs.length) + val array = new Array[Byte](xs.length) it.slice(from, until).copyToArray(array, from, until) array.toSeq }, strict = false) @@ -830,7 +830,7 @@ class ByteStringSpec extends WordSpec with Matchers with Checkers { check { slice: ByteStringSlice ⇒ val (bytes, from, to) = slice val input = bytes.iterator - val output = Array.ofDim[Byte](bytes.length) + val output = new Array[Byte](bytes.length) for (i ← 0 until from) output(i) = input.getByte input.getBytes(output, from, to - from) for (i ← to until bytes.length) output(i) = input.getByte @@ -863,7 +863,7 @@ class ByteStringSpec extends WordSpec with Matchers with Checkers { val a = (0 max from) min bytes.length val b = (a max to) min bytes.length val input = bytes.iterator - val output = Array.ofDim[Byte](bytes.length) + val output = new Array[Byte](bytes.length) input.asInputStream.skip(a) @@ -890,7 +890,7 @@ class ByteStringSpec extends WordSpec with Matchers with Checkers { val buffer = ByteBuffer.allocate(bytes.size) bytes.copyToBuffer(buffer) buffer.flip() - val array = Array.ofDim[Byte](bytes.size) + val array = new Array[Byte](bytes.size) buffer.get(array) bytes == array.toSeq } diff --git a/akka-actor/src/main/scala/akka/actor/TypedActor.scala b/akka-actor/src/main/scala/akka/actor/TypedActor.scala index 2b606ced40..5c9e462e46 100644 --- a/akka-actor/src/main/scala/akka/actor/TypedActor.scala +++ b/akka-actor/src/main/scala/akka/actor/TypedActor.scala @@ -151,7 +151,7 @@ object TypedActor extends ExtensionId[TypedActorExtension] with ExtensionIdProvi case ps if ps.length == 0 ⇒ SerializedMethodCall(method.getDeclaringClass, method.getName, method.getParameterTypes, Array()) case ps ⇒ val serialization = SerializationExtension(akka.serialization.JavaSerializer.currentSystem.value) - val serializedParameters = Array.ofDim[(Int, Class[_], Array[Byte])](ps.length) + val serializedParameters = new Array[(Int, Class[_], Array[Byte])](ps.length) for (i ← 0 until ps.length) { val p = ps(i) val s = serialization.findSerializerFor(p) @@ -182,7 +182,7 @@ object TypedActor extends ExtensionId[TypedActorExtension] with ExtensionIdProvi case null ⇒ null case a if a.length == 0 ⇒ Array[AnyRef]() case a ⇒ - val deserializedParameters: Array[AnyRef] = Array.ofDim[AnyRef](a.length) //Mutable for the sake of sanity + val deserializedParameters: Array[AnyRef] = new Array[AnyRef](a.length) //Mutable for the sake of sanity for (i ← 0 until a.length) { val (sId, manifest, bytes) = a(i) deserializedParameters(i) = diff --git a/akka-actor/src/main/scala/akka/serialization/Serialization.scala b/akka-actor/src/main/scala/akka/serialization/Serialization.scala index 763e3e6911..9de3da6315 100644 --- a/akka-actor/src/main/scala/akka/serialization/Serialization.scala +++ b/akka-actor/src/main/scala/akka/serialization/Serialization.scala @@ -188,7 +188,7 @@ class Serialization(val system: ExtendedActorSystem) extends Extension { case ser: ByteBufferSerializer ⇒ ser.fromBinary(buf, manifest) case _ ⇒ - val bytes = Array.ofDim[Byte](buf.remaining()) + val bytes = new Array[Byte](buf.remaining()) buf.get(bytes) deserializeByteArray(bytes, serializer, manifest) } @@ -362,7 +362,7 @@ class Serialization(val system: ExtendedActorSystem) extends Extension { */ private val quickSerializerByIdentity: Array[Serializer] = { val size = 1024 - val table = Array.ofDim[Serializer](size) + val table = new Array[Serializer](size) serializerByIdentity.foreach { case (id, ser) ⇒ if (0 <= id && id < size) table(id) = ser } diff --git a/akka-actor/src/main/scala/akka/serialization/Serializer.scala b/akka-actor/src/main/scala/akka/serialization/Serializer.scala index 5032ec9c29..3b0d0092da 100644 --- a/akka-actor/src/main/scala/akka/serialization/Serializer.scala +++ b/akka-actor/src/main/scala/akka/serialization/Serializer.scala @@ -168,7 +168,7 @@ abstract class SerializerWithStringManifest extends Serializer { * try { * toBinary(o, buf) * buf.flip() - * val bytes = Array.ofDim[Byte](buf.remaining) + * val bytes = new Array[Byte](buf.remaining) * buf.get(bytes) * bytes * } finally { @@ -402,7 +402,7 @@ class ByteArraySerializer(val system: ExtendedActorSystem) extends BaseSerialize } override def fromBinary(buf: ByteBuffer, manifest: String): AnyRef = { - val bytes = Array.ofDim[Byte](buf.remaining()) + val bytes = new Array[Byte](buf.remaining()) buf.get(bytes) bytes } diff --git a/akka-actor/src/main/scala/akka/util/ByteIterator.scala b/akka-actor/src/main/scala/akka/util/ByteIterator.scala index 2ae1155051..14ac05a408 100644 --- a/akka-actor/src/main/scala/akka/util/ByteIterator.scala +++ b/akka-actor/src/main/scala/akka/util/ByteIterator.scala @@ -13,7 +13,6 @@ import scala.reflect.ClassTag object ByteIterator { object ByteArrayIterator { - private val emptyArray: Array[Byte] = Array.ofDim[Byte](0) protected[akka] def apply(array: Array[Byte]): ByteArrayIterator = new ByteArrayIterator(array, 0, array.length) @@ -21,7 +20,7 @@ object ByteIterator { protected[akka] def apply(array: Array[Byte], from: Int, until: Int): ByteArrayIterator = new ByteArrayIterator(array, from, until) - val empty: ByteArrayIterator = apply(emptyArray) + val empty: ByteArrayIterator = apply(Array.emptyByteArray) } class ByteArrayIterator private (private var array: Array[Byte], private var from: Int, private var until: Int) extends ByteIterator { @@ -38,7 +37,7 @@ object ByteIterator { else { val i = from; from = from + 1; array(i) } } - def clear(): Unit = { this.array = ByteArrayIterator.emptyArray; from = 0; until = from } + def clear(): Unit = { this.array = Array.emptyByteArray; from = 0; until = from } final override def length: Int = { val l = len; clear(); l } @@ -460,7 +459,7 @@ abstract class ByteIterator extends BufferedIterator[Byte] { } override def toArray[B >: Byte](implicit arg0: ClassTag[B]): Array[B] = { - val target = Array.ofDim[B](len) + val target = new Array[B](len) copyToArray(target) target } diff --git a/akka-bench-jmh/src/main/scala/akka/remote/artery/BenchTestSource.scala b/akka-bench-jmh/src/main/scala/akka/remote/artery/BenchTestSource.scala index 4a8bbd671a..2ffe12f823 100644 --- a/akka-bench-jmh/src/main/scala/akka/remote/artery/BenchTestSource.scala +++ b/akka-bench-jmh/src/main/scala/akka/remote/artery/BenchTestSource.scala @@ -17,7 +17,7 @@ import akka.stream.stage.OutHandler */ class BenchTestSource(elementCount: Int) extends GraphStage[SourceShape[java.lang.Integer]] { - private val elements = Array.ofDim[java.lang.Integer](elementCount) + private val elements = new Array[java.lang.Integer](elementCount) (1 to elementCount).map(n => elements(n - 1) = n) val out: Outlet[java.lang.Integer] = Outlet("BenchTestSource") diff --git a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsRouting.scala b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsRouting.scala index 7e59948277..82b0159125 100644 --- a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsRouting.scala +++ b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsRouting.scala @@ -468,7 +468,7 @@ private[metrics] class WeightedRoutees(routees: immutable.IndexedSeq[Routee], se case a ⇒ a } } - val buckets = Array.ofDim[Int](routees.size) + val buckets = new Array[Int](routees.size) val meanWeight = if (weights.isEmpty) 1 else weights.values.sum / weights.size val w = weights.withDefaultValue(meanWeight) // we don’t necessarily have metrics for all addresses var i = 0 diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/DurableStore.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/DurableStore.scala index edccca3e86..ddb9acac78 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/DurableStore.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/DurableStore.scala @@ -173,10 +173,10 @@ final class LmdbDurableStore(config: Config) extends Actor with ActorLogging { var n = 0 val loadData = LoadData(iter.asScala.map { entry ⇒ n += 1 - val keyArray = Array.ofDim[Byte](entry.key.remaining) + val keyArray = new Array[Byte](entry.key.remaining) entry.key.get(keyArray) val key = new String(keyArray, ByteString.UTF_8) - val valArray = Array.ofDim[Byte](entry.`val`.remaining) + val valArray = new Array[Byte](entry.`val`.remaining) entry.`val`.get(valArray) val envelope = serializer.fromBinary(valArray, manifest).asInstanceOf[DurableDataEnvelope] key → envelope diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/protobuf/ReplicatorMessageSerializer.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/protobuf/ReplicatorMessageSerializer.scala index 93a4f68cde..b4c0a56269 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/protobuf/ReplicatorMessageSerializer.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/protobuf/ReplicatorMessageSerializer.scala @@ -52,7 +52,7 @@ import akka.cluster.ddata.PruningState.PruningPerformed private val n = new AtomicInteger(0) private val mask = size - 1 - private val elements = Array.ofDim[(A, B)](size) + private val elements = new Array[(A, B)](size) private val ttlNanos = timeToLive.toNanos // in theory this should be volatile, but since the cache has low diff --git a/akka-docs/rst/scala/code/docs/actor/ByteBufferSerializerDocSpec.scala b/akka-docs/rst/scala/code/docs/actor/ByteBufferSerializerDocSpec.scala index 0aa7057ee8..750c986a59 100644 --- a/akka-docs/rst/scala/code/docs/actor/ByteBufferSerializerDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/actor/ByteBufferSerializerDocSpec.scala @@ -24,7 +24,7 @@ class ByteBufferSerializerDocSpec { toBinary(o, buf) buf.flip() - val bytes = Array.ofDim[Byte](buf.remaining) + val bytes = new Array[Byte](buf.remaining) buf.get(bytes) bytes } diff --git a/akka-persistence-tck/src/main/scala/akka/persistence/journal/JournalPerfSpec.scala b/akka-persistence-tck/src/main/scala/akka/persistence/journal/JournalPerfSpec.scala index c16a45868a..ebb737bbf7 100644 --- a/akka-persistence-tck/src/main/scala/akka/persistence/journal/JournalPerfSpec.scala +++ b/akka-persistence-tck/src/main/scala/akka/persistence/journal/JournalPerfSpec.scala @@ -89,7 +89,7 @@ abstract class JournalPerfSpec(config: Config) extends JournalSpec(config) { /** Executes a block of code multiple times (no warm-up) */ def measure(msg: Duration ⇒ String)(block: ⇒ Unit): Unit = { - val measurements = Array.ofDim[Duration](measurementIterations) + val measurements = new Array[Duration](measurementIterations) var i = 0 while (i < measurementIterations) { val start = System.nanoTime() diff --git a/akka-persistence/src/main/scala/akka/persistence/serialization/SnapshotSerializer.scala b/akka-persistence/src/main/scala/akka/persistence/serialization/SnapshotSerializer.scala index c7a6316006..503d404c33 100644 --- a/akka-persistence/src/main/scala/akka/persistence/serialization/SnapshotSerializer.scala +++ b/akka-persistence/src/main/scala/akka/persistence/serialization/SnapshotSerializer.scala @@ -78,7 +78,7 @@ class SnapshotSerializer(val system: ExtendedActorSystem) extends BaseSerializer val manifest = if (remaining == 0) "" else { - val manifestBytes = Array.ofDim[Byte](remaining) + val manifestBytes = new Array[Byte](remaining) in.read(manifestBytes) new String(manifestBytes, UTF_8) } diff --git a/akka-persistence/src/main/scala/akka/persistence/serialization/package.scala b/akka-persistence/src/main/scala/akka/persistence/serialization/package.scala index 4f079db224..6d058cb2ea 100644 --- a/akka-persistence/src/main/scala/akka/persistence/serialization/package.scala +++ b/akka-persistence/src/main/scala/akka/persistence/serialization/package.scala @@ -12,7 +12,7 @@ package object serialization { */ def streamToBytes(inputStream: InputStream): Array[Byte] = { val len = 16384 - val buf = Array.ofDim[Byte](len) + val buf = new Array[Byte](len) val out = new ByteArrayOutputStream @scala.annotation.tailrec diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/artery/MaxThroughputSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/artery/MaxThroughputSpec.scala index de628d7a31..ca27215139 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/artery/MaxThroughputSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/artery/MaxThroughputSpec.scala @@ -310,7 +310,7 @@ object MaxThroughputSpec extends MultiNodeConfig { val buf = ByteBuffer.allocate(8) toBinary(o, buf) buf.flip() - val bytes = Array.ofDim[Byte](buf.remaining) + val bytes = new Array[Byte](buf.remaining) buf.get(bytes) bytes } diff --git a/akka-remote/src/main/scala/akka/remote/artery/Association.scala b/akka-remote/src/main/scala/akka/remote/artery/Association.scala index 1ca72a0241..6d30a841bc 100644 --- a/akka-remote/src/main/scala/akka/remote/artery/Association.scala +++ b/akka-remote/src/main/scala/akka/remote/artery/Association.scala @@ -136,7 +136,7 @@ private[remote] class Association( private val queueSize = advancedSettings.OutboundMessageQueueSize private val largeQueueSize = advancedSettings.OutboundLargeMessageQueueSize - private[this] val queues: Array[SendQueue.ProducerApi[OutboundEnvelope]] = Array.ofDim(2 + outboundLanes) + private[this] val queues: Array[SendQueue.ProducerApi[OutboundEnvelope]] = new Array(2 + outboundLanes) queues(ControlQueueIndex) = QueueWrapperImpl(createQueue(controlQueueSize)) // control stream queues(LargeQueueIndex) = if (transport.largeMessageChannelEnabled) // large messages stream diff --git a/akka-remote/src/main/scala/akka/remote/artery/BufferPool.scala b/akka-remote/src/main/scala/akka/remote/artery/BufferPool.scala index 649a9ef8f7..e00bfc456d 100644 --- a/akka-remote/src/main/scala/akka/remote/artery/BufferPool.scala +++ b/akka-remote/src/main/scala/akka/remote/artery/BufferPool.scala @@ -365,8 +365,8 @@ private[remote] final class EnvelopeBuffer(val byteBuffer: ByteBuffer) { import EnvelopeBuffer._ val aeronBuffer = new UnsafeBuffer(byteBuffer) - private var literalChars = Array.ofDim[Char](64) - private var literalBytes = Array.ofDim[Byte](64) + private var literalChars = new Array[Char](64) + private var literalBytes = new Array[Byte](64) def writeHeader(h: HeaderBuilder): Unit = writeHeader(h, null) @@ -514,8 +514,8 @@ private[remote] final class EnvelopeBuffer(val byteBuffer: ByteBuffer) { private def ensureLiteralCharsLength(length: Int): Unit = { if (length > literalChars.length) { - literalChars = Array.ofDim[Char](length) - literalBytes = Array.ofDim[Byte](length) + literalChars = new Array[Char](length) + literalBytes = new Array[Byte](length) } } diff --git a/akka-remote/src/main/scala/akka/remote/artery/FlightRecorderReader.scala b/akka-remote/src/main/scala/akka/remote/artery/FlightRecorderReader.scala index 722755f39f..d3efcdeb53 100644 --- a/akka-remote/src/main/scala/akka/remote/artery/FlightRecorderReader.scala +++ b/akka-remote/src/main/scala/akka/remote/artery/FlightRecorderReader.scala @@ -147,7 +147,7 @@ private[akka] final class FlightRecorderReader(fileChannel: FileChannel) { val recordStartOffset = recordOffset + RollingEventLogSection.CommitEntrySize // FIXME: extract magic numbers - val metadata = Array.ofDim[Byte](fileBuffer.getByte(recordStartOffset + 20)) + val metadata = new Array[Byte](fileBuffer.getByte(recordStartOffset + 20)) fileBuffer.getBytes(recordStartOffset + 21, metadata) val entry = RichEntry( diff --git a/akka-remote/src/main/scala/akka/remote/artery/ImmutableLongMap.scala b/akka-remote/src/main/scala/akka/remote/artery/ImmutableLongMap.scala index 4208cb4a1f..870d694aa1 100644 --- a/akka-remote/src/main/scala/akka/remote/artery/ImmutableLongMap.scala +++ b/akka-remote/src/main/scala/akka/remote/artery/ImmutableLongMap.scala @@ -59,19 +59,19 @@ private[akka] class ImmutableLongMap[A >: Null] private ( val i = Arrays.binarySearch(keys, key) if (i >= 0) { // existing key, replace value - val newValues = Array.ofDim[A](values.length) + val newValues = new Array[A](values.length) System.arraycopy(values, 0, newValues, 0, values.length) newValues(i) = value new ImmutableLongMap(keys, newValues) } else { // insert the entry at the right position, and keep the arrays sorted val j = -(i + 1) - val newKeys = Array.ofDim[Long](size + 1) + val newKeys = new Array[Long](size + 1) System.arraycopy(keys, 0, newKeys, 0, j) newKeys(j) = key System.arraycopy(keys, j, newKeys, j + 1, keys.length - j) - val newValues = Array.ofDim[A](size + 1) + val newValues = new Array[A](size + 1) System.arraycopy(values, 0, newValues, 0, j) newValues(j) = value System.arraycopy(values, j, newValues, j + 1, values.length - j) @@ -87,11 +87,11 @@ private[akka] class ImmutableLongMap[A >: Null] private ( if (size == 1) ImmutableLongMap.empty else { - val newKeys = Array.ofDim[Long](size - 1) + val newKeys = new Array[Long](size - 1) System.arraycopy(keys, 0, newKeys, 0, i) System.arraycopy(keys, i + 1, newKeys, i, keys.length - i - 1) - val newValues = Array.ofDim[A](size - 1) + val newValues = new Array[A](size - 1) System.arraycopy(values, 0, newValues, 0, i) System.arraycopy(values, i + 1, newValues, i, values.length - i - 1) diff --git a/akka-remote/src/main/scala/akka/remote/artery/LruBoundedCache.scala b/akka-remote/src/main/scala/akka/remote/artery/LruBoundedCache.scala index 9fbb20adb3..bd64ee69cf 100644 --- a/akka-remote/src/main/scala/akka/remote/artery/LruBoundedCache.scala +++ b/akka-remote/src/main/scala/akka/remote/artery/LruBoundedCache.scala @@ -67,7 +67,7 @@ private[akka] abstract class LruBoundedCache[K: ClassTag, V <: AnyRef: ClassTag] private[this] val keys = Array.ofDim[K](capacity) private[this] val values = Array.ofDim[V](capacity) - private[this] val hashes = Array.ofDim[Int](capacity) + private[this] val hashes = new Array[Int](capacity) private[this] val epochs = Array.fill[Int](capacity)(epoch - evictAgeThreshold) // Guarantee existing "values" are stale final def get(k: K): Option[V] = { diff --git a/akka-remote/src/main/scala/akka/remote/artery/compress/CompressionTable.scala b/akka-remote/src/main/scala/akka/remote/artery/compress/CompressionTable.scala index 0b537822e8..d6c2cf3d02 100644 --- a/akka-remote/src/main/scala/akka/remote/artery/compress/CompressionTable.scala +++ b/akka-remote/src/main/scala/akka/remote/artery/compress/CompressionTable.scala @@ -31,8 +31,8 @@ private[remote] final case class CompressionTable[T](originUid: Long, version: B require(dictionary.values.sum + dictionary.size == expectedGaplessSum, "Given compression map does not seem to be gap-less and starting from zero, " + "which makes compressing it into an Array difficult, bailing out! Map was: " + dictionary) - val tups = Array.ofDim[(Object, Int)](dictionary.size).asInstanceOf[Array[(T, Int)]] - val ts = Array.ofDim[Object](dictionary.size).asInstanceOf[Array[T]] + val tups = new Array[(Object, Int)](dictionary.size).asInstanceOf[Array[(T, Int)]] + val ts = new Array[Object](dictionary.size).asInstanceOf[Array[T]] var i = 0 val mit = dictionary.iterator diff --git a/akka-remote/src/main/scala/akka/remote/artery/compress/TopHeavyHitters.scala b/akka-remote/src/main/scala/akka/remote/artery/compress/TopHeavyHitters.scala index 3ab2e90976..729975f57e 100644 --- a/akka-remote/src/main/scala/akka/remote/artery/compress/TopHeavyHitters.scala +++ b/akka-remote/src/main/scala/akka/remote/artery/compress/TopHeavyHitters.scala @@ -34,14 +34,14 @@ private[remote] final class TopHeavyHitters[T >: Null](val max: Int)(implicit cl // Contains the hash value for each entry in the hashmap. Used for quicker lookups (equality check can be avoided // if hashes don't match) - private[this] val hashes: Array[Int] = Array.ofDim(capacity) + private[this] val hashes: Array[Int] = new Array(capacity) // Actual stored elements in the hashmap private[this] val items: Array[T] = Array.ofDim[T](capacity) // Index of stored element in the associated heap private[this] val heapIndex: Array[Int] = Array.fill(capacity)(-1) // Weights associated with an entry in the hashmap. Used to maintain the heap property and give easy access to low // weight entries - private[this] val weights: Array[Long] = Array.ofDim(capacity) + private[this] val weights: Array[Long] = new Array(capacity) // Heap structure containing indices to slots in the hashmap private[this] val heap: Array[Int] = Array.fill(max)(-1) diff --git a/akka-remote/src/main/scala/akka/remote/serialization/PrimitiveSerializers.scala b/akka-remote/src/main/scala/akka/remote/serialization/PrimitiveSerializers.scala index 4b71fd4d12..d53fefd39b 100644 --- a/akka-remote/src/main/scala/akka/remote/serialization/PrimitiveSerializers.scala +++ b/akka-remote/src/main/scala/akka/remote/serialization/PrimitiveSerializers.scala @@ -19,7 +19,7 @@ class LongSerializer(val system: ExtendedActorSystem) extends BaseSerializer wit } override def toBinary(o: AnyRef): Array[Byte] = { - val result = Array.ofDim[Byte](8) + val result = new Array[Byte](8) var long = Long.unbox(o) var i = 0 while (long != 0) { @@ -50,7 +50,7 @@ class IntSerializer(val system: ExtendedActorSystem) extends BaseSerializer with override def fromBinary(buf: ByteBuffer, manifest: String): AnyRef = Int.box(buf.getInt) override def toBinary(o: AnyRef): Array[Byte] = { - val result = Array.ofDim[Byte](4) + val result = new Array[Byte](4) var int = Int.unbox(o) var i = 0 while (int != 0) { @@ -79,7 +79,7 @@ class StringSerializer(val system: ExtendedActorSystem) extends BaseSerializer w override def toBinary(o: AnyRef, buf: ByteBuffer): Unit = buf.put(toBinary(o)) override def fromBinary(buf: ByteBuffer, manifest: String): AnyRef = { - val bytes = Array.ofDim[Byte](buf.remaining()) + val bytes = new Array[Byte](buf.remaining()) buf.get(bytes) new String(bytes, "UTF-8") } @@ -106,7 +106,7 @@ class ByteStringSerializer(val system: ExtendedActorSystem) extends BaseSerializ override def toBinary(o: AnyRef): Array[Byte] = { val bs = o.asInstanceOf[ByteString] - val result = Array.ofDim[Byte](bs.length) + val result = new Array[Byte](bs.length) bs.copyToArray(result, 0, bs.length) result } diff --git a/akka-remote/src/test/scala/akka/remote/artery/AeronSinkSpec.scala b/akka-remote/src/test/scala/akka/remote/artery/AeronSinkSpec.scala index 3fc9302a7e..20197cfe58 100644 --- a/akka-remote/src/test/scala/akka/remote/artery/AeronSinkSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/artery/AeronSinkSpec.scala @@ -64,7 +64,7 @@ class AeronSinkSpec extends AkkaSpec with ImplicitSender { .runWith(Sink.ignore) // use large enough messages to fill up buffers - val payload = Array.ofDim[Byte](100000) + val payload = new Array[Byte](100000) val done = Source(1 to 1000).map(_ ⇒ payload) .map { n ⇒ val envelope = pool.acquire() diff --git a/akka-remote/src/test/scala/akka/remote/artery/FlightRecorderSpec.scala b/akka-remote/src/test/scala/akka/remote/artery/FlightRecorderSpec.scala index 4efc079132..78ba8ec423 100644 --- a/akka-remote/src/test/scala/akka/remote/artery/FlightRecorderSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/artery/FlightRecorderSpec.scala @@ -151,7 +151,7 @@ class FlightRecorderSpec extends AkkaSpec { "properly truncate low frequency event metadata if necessary" in withFlightRecorder { (recorder, reader, channel) ⇒ val sink = recorder.createEventSink() - val longMetadata = Array.ofDim[Byte](1024) + val longMetadata = new Array[Byte](1024) sink.loFreq(0, longMetadata) channel.force(false) @@ -160,7 +160,7 @@ class FlightRecorderSpec extends AkkaSpec { val entries = reader.structure.loFreqLog.logs(0).richEntries.toSeq entries.size should ===(1) - entries.head.metadata should ===(Array.ofDim[Byte](FlightRecorder.LoFreqRecordSize - 32)) + entries.head.metadata should ===(new Array[Byte](FlightRecorder.LoFreqRecordSize - 32)) } diff --git a/akka-remote/src/test/scala/akka/remote/artery/LargeMessagesStreamSpec.scala b/akka-remote/src/test/scala/akka/remote/artery/LargeMessagesStreamSpec.scala index 7ed66b0cd6..a932079aed 100644 --- a/akka-remote/src/test/scala/akka/remote/artery/LargeMessagesStreamSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/artery/LargeMessagesStreamSpec.scala @@ -106,7 +106,7 @@ class LargeMessagesStreamSpec extends ArteryMultiNodeSpec( val remoteProbe = TestProbe()(systemA) val largeBytes = 2000000 - largeRemote.tell(Ping(ByteString.fromArray(Array.ofDim[Byte](largeBytes))), remoteProbe.ref) + largeRemote.tell(Ping(ByteString.fromArray(new Array[Byte](largeBytes))), remoteProbe.ref) regularRemote.tell(Ping(), remoteProbe.ref) // should be no problems sending regular small messages while large messages are being sent diff --git a/akka-remote/src/test/scala/akka/remote/artery/RollingEventLogSimulationSpec.scala b/akka-remote/src/test/scala/akka/remote/artery/RollingEventLogSimulationSpec.scala index fe5e4e55dc..be7fa34349 100644 --- a/akka-remote/src/test/scala/akka/remote/artery/RollingEventLogSimulationSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/artery/RollingEventLogSimulationSpec.scala @@ -119,7 +119,7 @@ class RollingEventLogSimulationSpec extends AkkaSpec { class Simulator(writerCount: Int, entryCount: Int, totalWrites: Int) { var headPointer = 0 - val simulatedBuffer = Array.ofDim[Byte](4 * entryCount) + val simulatedBuffer = new Array[Byte](4 * entryCount) val writers = Array.tabulate(writerCount)(new Writer(_, entryCount, totalWrites)) var activeWriters = writerCount var log: List[String] = Nil diff --git a/akka-remote/src/test/scala/akka/remote/serialization/PrimitivesSerializationSpec.scala b/akka-remote/src/test/scala/akka/remote/serialization/PrimitivesSerializationSpec.scala index 44c3bdf538..d4ed6d609e 100644 --- a/akka-remote/src/test/scala/akka/remote/serialization/PrimitivesSerializationSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/serialization/PrimitivesSerializationSpec.scala @@ -47,7 +47,7 @@ class PrimitivesSerializationSpec extends AkkaSpec(PrimitivesSerializationSpec.t buffer.flip() // also make sure that the Array and ByteBuffer formats are equal, given LITTLE_ENDIAN - val array1 = Array.ofDim[Byte](buffer.remaining()) + val array1 = new Array[Byte](buffer.remaining()) buffer.get(array1) val array2 = serializer.toBinary(msg) ByteString(array1) should ===(ByteString(array2)) diff --git a/akka-stream-tests/src/test/scala/akka/stream/impl/TraversalTestUtils.scala b/akka-stream-tests/src/test/scala/akka/stream/impl/TraversalTestUtils.scala index ba95b3e56e..ae41c4307b 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/impl/TraversalTestUtils.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/impl/TraversalTestUtils.scala @@ -100,8 +100,8 @@ object TraversalTestUtils { var islandStack: List[(IslandTag, Attributes)] = (TestDefaultIsland, Attributes.none) :: Nil val connections = b.inSlots - val inlets = Array.ofDim[InPort](connections) - val outlets = Array.ofDim[OutPort](connections) + val inlets = new Array[InPort](connections) + val outlets = new Array[OutPort](connections) // Track next assignable number for input ports var inOffs = 0 diff --git a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/GraphInterpreterSpecKit.scala b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/GraphInterpreterSpecKit.scala index 0db17fc20a..c83fdb725f 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/GraphInterpreterSpecKit.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/GraphInterpreterSpecKit.scala @@ -38,7 +38,7 @@ object GraphInterpreterSpecKit { var inOwners = SMap.empty[Inlet[_], GraphStageLogic] var outOwners = SMap.empty[Outlet[_], GraphStageLogic] - val logics = Array.ofDim[GraphStageLogic](upstreams.length + stages.length + downstreams.length) + val logics = new Array[GraphStageLogic](upstreams.length + stages.length + downstreams.length) var idx = 0 while (idx < upstreams.length) { @@ -132,7 +132,7 @@ object GraphInterpreterSpecKit { inOwners: SMap[Inlet[_], GraphStageLogic], outOwners: SMap[Outlet[_], GraphStageLogic]): Array[Connection] = { - val connections = Array.ofDim[Connection](connectedPorts.size) + val connections = new Array[Connection](connectedPorts.size) connectedPorts.zipWithIndex.foreach { case ((outlet, inlet), idx) ⇒ diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FramingSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FramingSpec.scala index ffff359ae6..72a773ad38 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FramingSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FramingSpec.scala @@ -161,7 +161,7 @@ class FramingSpec extends StreamSpec { } } - ByteString(Array.ofDim[Byte](fieldOffset)) ++ header ++ payload + ByteString(new Array[Byte](fieldOffset)) ++ header ++ payload } "work with various byte orders, frame lengths and offsets" taggedAs LongRunningTest in { diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SinkSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SinkSpec.scala index 51490cfbe4..cf261c70ef 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SinkSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/SinkSpec.scala @@ -198,7 +198,7 @@ class SinkSpec extends StreamSpec with DefaultTimeout with ScalaFutures { } def supplier(): Supplier[Array[Int]] = new Supplier[Array[Int]] { - override def get(): Array[Int] = Array.ofDim(1) + override def get(): Array[Int] = new Array(1) } def accumulator(): BiConsumer[Array[Int], Int] = new BiConsumer[Array[Int], Int] { override def accept(a: Array[Int], b: Int): Unit = a(0) = intIdentity.applyAsInt(b) diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/UnfoldResourceAsyncSourceSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/UnfoldResourceAsyncSourceSpec.scala index 8f62d6ebd6..fa9456ae8f 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/UnfoldResourceAsyncSourceSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/UnfoldResourceAsyncSourceSpec.scala @@ -176,7 +176,7 @@ class UnfoldResourceAsyncSourceSpec extends StreamSpec(UnboundedMailboxConfig) { "work with ByteString as well" in assertAllStagesStopped { val chunkSize = 50 - val buffer = Array.ofDim[Char](chunkSize) + val buffer = new Array[Char](chunkSize) val p = Source.unfoldResourceAsync[ByteString, Reader]( open, reader ⇒ { diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/UnfoldResourceSourceSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/UnfoldResourceSourceSpec.scala index e396394092..187ff76720 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/UnfoldResourceSourceSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/UnfoldResourceSourceSpec.scala @@ -112,7 +112,7 @@ class UnfoldResourceSourceSpec extends StreamSpec(UnboundedMailboxConfig) { "work with ByteString as well" in assertAllStagesStopped { val chunkSize = 50 - val buffer = Array.ofDim[Char](chunkSize) + val buffer = new Array[Char](chunkSize) val p = Source.unfoldResource[ByteString, Reader]( () ⇒ new BufferedReader(new FileReader(manyLinesFile)), reader ⇒ {