remove scala 2.12 support (#1986)
* remove scala 2.12 support * move around some source because we don't need as many source dirs * move more files * remove annotations * remove annotations * Create remove-scala-2.12-support.excludes * remove some scala-2.12 refs * remove compat.PartialFunction * Update remove-scala-2.12-support.excludes
This commit is contained in:
parent
0f6def662f
commit
f2b677dfa0
120 changed files with 31 additions and 3597 deletions
2
.github/workflows/dependency-graph.yml
vendored
2
.github/workflows/dependency-graph.yml
vendored
|
|
@ -39,4 +39,4 @@ jobs:
|
|||
- uses: scalacenter/sbt-dependency-submission@64084844d2b0a9b6c3765f33acde2fbe3f5ae7d3 # v3.1.0
|
||||
with:
|
||||
configs-ignore: provided optional test TestJdk9 compile-internal runtime-internal pr-validation multi-jvm scala-tool scala-doc-tool
|
||||
modules-ignore: pekko-bench-jmh_2.12 pekko-docs_2.12 pekko-bench-jmh_2.13 pekko-docs_2.13 pekko-bench-jmh_3 pekko-docs_3
|
||||
modules-ignore: pekko-bench-jmh_2.13 pekko-docs_2.13 pekko-bench-jmh_3 pekko-docs_3
|
||||
|
|
|
|||
2
.github/workflows/nightly-builds.yml
vendored
2
.github/workflows/nightly-builds.yml
vendored
|
|
@ -114,7 +114,7 @@ jobs:
|
|||
# No need to specify the full Scala version. Only the Scala
|
||||
# binary version is required and Pekko build will set the right
|
||||
# full version from it.
|
||||
scalaVersion: ["2.12.x", "2.13.x", "3.3.x"]
|
||||
scalaVersion: ["2.13.x", "3.3.x"]
|
||||
javaVersion: [17, 21]
|
||||
env:
|
||||
DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }}
|
||||
|
|
|
|||
|
|
@ -236,7 +236,7 @@ Pekko uses [MiMa](https://github.com/lightbend/mima) to validate the binary comp
|
|||
PR fails due to binary compatibility issues, you may see an error like this:
|
||||
|
||||
```
|
||||
[info] stream: found 1 potential binary incompatibilities while checking against org.apache.pekko:pekko-stream_2.12:2.4.2 (filtered 222)
|
||||
[info] stream: found 1 potential binary incompatibilities while checking against org.apache.pekko:pekko-stream_2.13:2.4.2 (filtered 222)
|
||||
[error] * method foldAsync(java.lang.Object,scala.Function2)org.apache.pekko.stream.scaladsl.FlowOps in trait org.apache.pekko.stream.scaladsl.FlowOps is present only in current version
|
||||
[error] filter with: ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.pekko.stream.scaladsl.FlowOps.foldAsync")
|
||||
```
|
||||
|
|
|
|||
3
LICENSE
3
LICENSE
|
|
@ -212,7 +212,7 @@ Copyright (c) 2003-2011, LAMP/EPFL
|
|||
|
||||
pekko-actor contains code from scala-collection-compat in the `org.apache.pekko.util.ccompat` package
|
||||
which has released under an Apache 2.0 license.
|
||||
- actor/src/main/scala-2.12/org/apache/pekko/util/ccompat/package.scala
|
||||
- actor/src/main/scala/org/apache/pekko/util/ccompat/package.scala
|
||||
|
||||
Scala (https://www.scala-lang.org)
|
||||
|
||||
|
|
@ -222,7 +222,6 @@ Copyright EPFL and Lightbend, Inc.
|
|||
|
||||
pekko-actor contains code from scala-library in the `org.apache.pekko.util.ccompat` package
|
||||
and in `org.apache.pekko.util.Helpers.scala` which was released under an Apache 2.0 license.
|
||||
- actor/src/main/scala-2.12/org/apache/pekko/util/ccompat/package.scala
|
||||
- actor/src/main/scala/org/apache/pekko/util/Helpers.scala
|
||||
|
||||
Scala (https://www.scala-lang.org)
|
||||
|
|
|
|||
|
|
@ -29,12 +29,8 @@ class SameThreadExecutionContextSpec extends PekkoSpec with Matchers {
|
|||
|
||||
"return a Scala specific version" in {
|
||||
val ec = SameThreadExecutionContext()
|
||||
if (util.Properties.versionNumberString.startsWith("2.12")) {
|
||||
ec.getClass.getName should startWith("org.apache.pekko.dispatch.internal.SameThreadExecutionContext")
|
||||
} else {
|
||||
// in 2.13 and higher parasitic is available
|
||||
ec.getClass.getName should ===("scala.concurrent.ExecutionContext$parasitic$")
|
||||
}
|
||||
// in Scala 2.13 and higher parasitic is available
|
||||
ec.getClass.getName should ===("scala.concurrent.ExecutionContext$parasitic$")
|
||||
}
|
||||
|
||||
"should run follow up future operations in the same dispatcher" in {
|
||||
|
|
|
|||
|
|
@ -18,7 +18,6 @@
|
|||
package org.apache.pekko.util
|
||||
|
||||
import org.apache.pekko
|
||||
import pekko.util.ccompat._
|
||||
import pekko.util.OptionConverters._
|
||||
|
||||
import java.util._
|
||||
|
|
@ -32,7 +31,6 @@ import scala.annotation.nowarn
|
|||
* Remove this once Scala 2.12 support is dropped since all methods are in Scala 2.13+ stdlib
|
||||
*/
|
||||
|
||||
@ccompatUsedUntil213
|
||||
@nowarn("msg=deprecated")
|
||||
object Scala212CompatTest {
|
||||
|
||||
|
|
|
|||
|
|
@ -1,26 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* license agreements; and to You under the Apache License, version 2.0:
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* This file is part of the Apache Pekko project, which was derived from Akka.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Copyright (C) 2009-2022 Lightbend Inc. <https://www.lightbend.com>
|
||||
*/
|
||||
|
||||
package org.apache.pekko.actor.typed.internal.receptionist
|
||||
|
||||
import org.apache.pekko
|
||||
import pekko.actor.typed.ActorRef
|
||||
import pekko.annotation.InternalApi
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
*/
|
||||
@InternalApi private[receptionist] object Platform {
|
||||
type Service[K <: AbstractServiceKey] = ActorRef[K#Protocol]
|
||||
type Subscriber[K <: AbstractServiceKey] = ActorRef[ReceptionistMessages.Listing[K#Protocol]]
|
||||
}
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# Remove deprecated method in Scheduler
|
||||
ProblemFilters.exclude[MissingClassProblem]("org.apache.pekko.compat.PartialFunction")
|
||||
ProblemFilters.exclude[MissingClassProblem]("org.apache.pekko.compat.PartialFunction$")
|
||||
ProblemFilters.exclude[MissingClassProblem]("org.apache.pekko.util.ccompat.ccompatUsedUntil213")
|
||||
|
|
@ -1,57 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* license agreements; and to You under the Apache License, version 2.0:
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* This file is part of the Apache Pekko project, which was derived from Akka.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Copyright (C) 2009-2022 Lightbend Inc. <https://www.lightbend.com>
|
||||
*/
|
||||
|
||||
package org.apache.pekko.compat
|
||||
|
||||
import scala.collection.immutable
|
||||
import scala.concurrent.{ ExecutionContext, Future => SFuture }
|
||||
|
||||
import scala.annotation.nowarn
|
||||
|
||||
import org.apache.pekko
|
||||
import pekko.annotation.InternalApi
|
||||
import pekko.util.ccompat._
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
*
|
||||
* Compatibility wrapper for `scala.concurrent.Future` to be able to compile the same code
|
||||
* against Scala 2.12, 2.13
|
||||
*
|
||||
* Remove these classes as soon as support for Scala 2.12 is dropped!
|
||||
*/
|
||||
@nowarn @InternalApi private[pekko] object Future {
|
||||
def fold[T, R](futures: IterableOnce[SFuture[T]])(zero: R)(op: (R, T) => R)(
|
||||
implicit executor: ExecutionContext): SFuture[R] =
|
||||
SFuture.fold[T, R](futures)(zero)(op)(executor)
|
||||
|
||||
def fold[T, R](futures: immutable.Iterable[SFuture[T]])(zero: R)(op: (R, T) => R)(
|
||||
implicit executor: ExecutionContext): SFuture[R] =
|
||||
SFuture.foldLeft[T, R](futures)(zero)(op)(executor)
|
||||
|
||||
def reduce[T, R >: T](futures: IterableOnce[SFuture[T]])(op: (R, T) => R)(
|
||||
implicit executor: ExecutionContext): SFuture[R] =
|
||||
SFuture.reduce[T, R](futures)(op)(executor)
|
||||
|
||||
def reduce[T, R >: T](futures: immutable.Iterable[SFuture[T]])(op: (R, T) => R)(
|
||||
implicit executor: ExecutionContext): SFuture[R] =
|
||||
SFuture.reduceLeft[T, R](futures)(op)(executor)
|
||||
|
||||
def find[T](futures: IterableOnce[SFuture[T]])(p: T => Boolean)(
|
||||
implicit executor: ExecutionContext): SFuture[Option[T]] =
|
||||
SFuture.find[T](futures)(p)(executor)
|
||||
|
||||
def find[T](futures: immutable.Iterable[SFuture[T]])(p: T => Boolean)(
|
||||
implicit executor: ExecutionContext): SFuture[Option[T]] =
|
||||
SFuture.find[T](futures)(p)(executor)
|
||||
}
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* license agreements; and to You under the Apache License, version 2.0:
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* This file is part of the Apache Pekko project, which was derived from Akka.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Copyright (C) 2009-2022 Lightbend Inc. <https://www.lightbend.com>
|
||||
*/
|
||||
|
||||
package org.apache.pekko.compat
|
||||
|
||||
import scala.annotation.nowarn
|
||||
|
||||
import org.apache.pekko.annotation.InternalApi
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
*
|
||||
* Compatibility wrapper for `scala.PartialFunction` to be able to compile the same code
|
||||
* against Scala 2.12, 2.13, 3.0
|
||||
*
|
||||
* Remove these classes as soon as support for Scala 2.12 is dropped!
|
||||
*/
|
||||
@InternalApi private[pekko] object PartialFunction {
|
||||
|
||||
def fromFunction[A, B](f: (A) => B): scala.PartialFunction[A, B] = {
|
||||
@nowarn val pf = scala.PartialFunction(f)
|
||||
pf
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,39 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* license agreements; and to You under the Apache License, version 2.0:
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* This file is part of the Apache Pekko project, which was derived from Akka.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Copyright (C) 2009-2022 Lightbend Inc. <https://www.lightbend.com>
|
||||
*/
|
||||
|
||||
package org.apache.pekko.dispatch.internal
|
||||
|
||||
import scala.concurrent.ExecutionContext
|
||||
|
||||
import org.apache.pekko
|
||||
import pekko.annotation.InternalApi
|
||||
import pekko.dispatch.BatchingExecutor
|
||||
|
||||
/**
|
||||
* Factory to create same thread ec. Not intended to be called from any other site than to create [[pekko.dispatch.ExecutionContexts#parasitic]]
|
||||
*
|
||||
* INTERNAL API
|
||||
*/
|
||||
@InternalApi
|
||||
private[dispatch] object SameThreadExecutionContext {
|
||||
|
||||
private val sameThread = new ExecutionContext with BatchingExecutor {
|
||||
override protected def unbatchedExecute(runnable: Runnable): Unit = runnable.run()
|
||||
override protected def resubmitOnBlock: Boolean = false // No point since we execute on same thread
|
||||
override def reportFailure(t: Throwable): Unit =
|
||||
throw new IllegalStateException("exception in sameThreadExecutionContext", t)
|
||||
}
|
||||
|
||||
def apply(): ExecutionContext = sameThread
|
||||
|
||||
}
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* license agreements; and to You under the Apache License, version 2.0:
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* This file is part of the Apache Pekko project, which was derived from Akka.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Copyright (C) 2009-2022 Lightbend Inc. <https://www.lightbend.com>
|
||||
*/
|
||||
|
||||
package org.apache.pekko.dispatch.internal
|
||||
|
||||
import org.apache.pekko
|
||||
import pekko.annotation.InternalApi
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
*/
|
||||
@InternalApi
|
||||
private[pekko] object ScalaBatchable {
|
||||
|
||||
// see Scala 2.13 source tree for explanation
|
||||
def isBatchable(runnable: Runnable): Boolean = runnable match {
|
||||
case b: pekko.dispatch.Batchable => b.isBatchable
|
||||
case _: scala.concurrent.OnCompleteRunnable => true
|
||||
case _ => false
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,696 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* license agreements; and to You under the Apache License, version 2.0:
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* This file is part of the Apache Pekko project, which was derived from Akka.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Copyright (C) 2009-2022 Lightbend Inc. <https://www.lightbend.com>
|
||||
*/
|
||||
|
||||
package org.apache.pekko.util
|
||||
|
||||
import java.nio.{ ByteBuffer, ByteOrder }
|
||||
|
||||
import scala.annotation.tailrec
|
||||
import scala.collection.LinearSeq
|
||||
import scala.collection.mutable.ListBuffer
|
||||
import scala.reflect.ClassTag
|
||||
|
||||
import org.apache.pekko.util.Collections.EmptyImmutableSeq
|
||||
|
||||
object ByteIterator {
|
||||
object ByteArrayIterator {
|
||||
|
||||
protected[pekko] def apply(array: Array[Byte]): ByteArrayIterator =
|
||||
new ByteArrayIterator(array, 0, array.length)
|
||||
|
||||
protected[pekko] def apply(array: Array[Byte], from: Int, until: Int): ByteArrayIterator =
|
||||
new ByteArrayIterator(array, from, until)
|
||||
|
||||
val empty: ByteArrayIterator = apply(Array.emptyByteArray)
|
||||
}
|
||||
|
||||
class ByteArrayIterator private (private var array: Array[Byte], private var from: Int, private var until: Int)
|
||||
extends ByteIterator {
|
||||
iterator =>
|
||||
|
||||
final def len: Int = until - from
|
||||
|
||||
final def hasNext: Boolean = from < until
|
||||
|
||||
final def head: Byte = array(from)
|
||||
|
||||
final def next(): Byte = {
|
||||
if (!hasNext) EmptyImmutableSeq.iterator.next()
|
||||
else {
|
||||
val i = from; from = from + 1; array(i)
|
||||
}
|
||||
}
|
||||
|
||||
def clear(): Unit = { this.array = Array.emptyByteArray; from = 0; until = from }
|
||||
|
||||
final override def length: Int = { val l = len; clear(); l }
|
||||
|
||||
final override def ++(that: TraversableOnce[Byte]): ByteIterator = that match {
|
||||
case that: ByteIterator =>
|
||||
if (that.isEmpty) this
|
||||
else if (this.isEmpty) that
|
||||
else
|
||||
that match {
|
||||
case that: ByteArrayIterator =>
|
||||
if ((this.array eq that.array) && (this.until == that.from)) {
|
||||
this.until = that.until
|
||||
that.clear()
|
||||
this
|
||||
} else {
|
||||
val result = MultiByteArrayIterator(List(this, that))
|
||||
this.clear()
|
||||
result
|
||||
}
|
||||
case that: MultiByteArrayIterator => this ++: that
|
||||
}
|
||||
case _ => super.++(that)
|
||||
}
|
||||
|
||||
final override def clone: ByteArrayIterator = new ByteArrayIterator(array, from, until)
|
||||
|
||||
final override def take(n: Int): this.type = {
|
||||
if (n < len) until = { if (n > 0) from + n else from }
|
||||
this
|
||||
}
|
||||
|
||||
final override def drop(n: Int): this.type = {
|
||||
if (n > 0) from = { if (n < len) from + n else until }
|
||||
this
|
||||
}
|
||||
|
||||
final override def takeWhile(p: Byte => Boolean): this.type = {
|
||||
val prev = from
|
||||
dropWhile(p)
|
||||
until = from; from = prev
|
||||
this
|
||||
}
|
||||
|
||||
final override def dropWhile(p: Byte => Boolean): this.type = {
|
||||
var stop = false
|
||||
while (!stop && hasNext) {
|
||||
if (p(array(from))) {
|
||||
from = from + 1
|
||||
} else {
|
||||
stop = true
|
||||
}
|
||||
}
|
||||
this
|
||||
}
|
||||
|
||||
final override def copyToArray[B >: Byte](xs: Array[B], start: Int, len: Int): Unit = {
|
||||
val n = 0 max ((xs.length - start) min this.len min len)
|
||||
Array.copy(this.array, from, xs, start, n)
|
||||
this.drop(n)
|
||||
}
|
||||
|
||||
final override def toByteString: ByteString = {
|
||||
val result =
|
||||
if ((from == 0) && (until == array.length)) ByteString.ByteString1C(array)
|
||||
else ByteString.ByteString1(array, from, len)
|
||||
clear()
|
||||
result
|
||||
}
|
||||
|
||||
def getBytes(xs: Array[Byte], offset: Int, n: Int): this.type = {
|
||||
if (n <= this.len) {
|
||||
System.arraycopy(this.array, this.from, xs, offset, n)
|
||||
this.drop(n)
|
||||
} else EmptyImmutableSeq.iterator.next()
|
||||
}
|
||||
|
||||
private def wrappedByteBuffer: ByteBuffer = ByteBuffer.wrap(array, from, len).asReadOnlyBuffer
|
||||
|
||||
def getShorts(xs: Array[Short], offset: Int, n: Int)(implicit byteOrder: ByteOrder): this.type = {
|
||||
wrappedByteBuffer.order(byteOrder).asShortBuffer.get(xs, offset, n); drop(2 * n)
|
||||
}
|
||||
|
||||
def getInts(xs: Array[Int], offset: Int, n: Int)(implicit byteOrder: ByteOrder): this.type = {
|
||||
wrappedByteBuffer.order(byteOrder).asIntBuffer.get(xs, offset, n); drop(4 * n)
|
||||
}
|
||||
|
||||
def getLongs(xs: Array[Long], offset: Int, n: Int)(implicit byteOrder: ByteOrder): this.type = {
|
||||
wrappedByteBuffer.order(byteOrder).asLongBuffer.get(xs, offset, n); drop(8 * n)
|
||||
}
|
||||
|
||||
def getFloats(xs: Array[Float], offset: Int, n: Int)(implicit byteOrder: ByteOrder): this.type = {
|
||||
wrappedByteBuffer.order(byteOrder).asFloatBuffer.get(xs, offset, n); drop(4 * n)
|
||||
}
|
||||
|
||||
def getDoubles(xs: Array[Double], offset: Int, n: Int)(implicit byteOrder: ByteOrder): this.type = {
|
||||
wrappedByteBuffer.order(byteOrder).asDoubleBuffer.get(xs, offset, n); drop(8 * n)
|
||||
}
|
||||
|
||||
def copyToBuffer(buffer: ByteBuffer): Int = {
|
||||
val copyLength = math.min(buffer.remaining, len)
|
||||
if (copyLength > 0) {
|
||||
buffer.put(array, from, copyLength)
|
||||
drop(copyLength)
|
||||
}
|
||||
copyLength
|
||||
}
|
||||
|
||||
def asInputStream: java.io.InputStream = new java.io.InputStream {
|
||||
override def available: Int = iterator.len
|
||||
|
||||
def read: Int = if (hasNext) next().toInt & 0xFF else -1
|
||||
|
||||
override def read(b: Array[Byte], off: Int, len: Int): Int = {
|
||||
if ((off < 0) || (len < 0) || (off + len > b.length)) throw new IndexOutOfBoundsException
|
||||
if (len == 0) 0
|
||||
else if (!isEmpty) {
|
||||
val nRead = math.min(available, len)
|
||||
copyToArray(b, off, nRead)
|
||||
nRead
|
||||
} else -1
|
||||
}
|
||||
|
||||
override def skip(n: Long): Long = {
|
||||
val nSkip = math.min(iterator.len, n.toInt)
|
||||
iterator.drop(nSkip)
|
||||
nSkip
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
object MultiByteArrayIterator {
|
||||
protected val clearedList: List[ByteArrayIterator] = List(ByteArrayIterator.empty)
|
||||
|
||||
val empty: MultiByteArrayIterator = new MultiByteArrayIterator(Nil)
|
||||
|
||||
protected[pekko] def apply(iterators: LinearSeq[ByteArrayIterator]): MultiByteArrayIterator =
|
||||
new MultiByteArrayIterator(iterators)
|
||||
}
|
||||
|
||||
class MultiByteArrayIterator private (private var iterators: LinearSeq[ByteArrayIterator]) extends ByteIterator {
|
||||
// After normalization:
|
||||
// * iterators.isEmpty == false
|
||||
// * (!iterator.head.isEmpty || iterators.tail.isEmpty) == true
|
||||
private def normalize(): this.type = {
|
||||
@tailrec def norm(xs: LinearSeq[ByteArrayIterator]): LinearSeq[ByteArrayIterator] = {
|
||||
if (xs.isEmpty) MultiByteArrayIterator.clearedList
|
||||
else if (xs.head.isEmpty) norm(xs.tail)
|
||||
else xs
|
||||
}
|
||||
iterators = norm(iterators)
|
||||
this
|
||||
}
|
||||
normalize()
|
||||
|
||||
private def current: ByteArrayIterator = iterators.head
|
||||
private def dropCurrent(): Unit = { iterators = iterators.tail }
|
||||
final def clear(): Unit = { iterators = MultiByteArrayIterator.empty.iterators }
|
||||
|
||||
final def hasNext: Boolean = current.hasNext
|
||||
|
||||
final def head: Byte = current.head
|
||||
|
||||
final def next(): Byte = {
|
||||
val result = current.next()
|
||||
normalize()
|
||||
result
|
||||
}
|
||||
|
||||
final override def len: Int = iterators.foldLeft(0) { _ + _.len }
|
||||
|
||||
final override def length: Int = {
|
||||
val result = len
|
||||
clear()
|
||||
result
|
||||
}
|
||||
|
||||
private[pekko] def ++:(that: ByteArrayIterator): this.type = {
|
||||
iterators = that +: iterators
|
||||
this
|
||||
}
|
||||
|
||||
final override def ++(that: TraversableOnce[Byte]): ByteIterator = that match {
|
||||
case that: ByteIterator =>
|
||||
if (that.isEmpty) this
|
||||
else if (this.isEmpty) that
|
||||
else {
|
||||
that match {
|
||||
case that: ByteArrayIterator =>
|
||||
iterators = this.iterators :+ that
|
||||
that.clear()
|
||||
this
|
||||
case that: MultiByteArrayIterator =>
|
||||
iterators = this.iterators ++ that.iterators
|
||||
that.clear()
|
||||
this
|
||||
}
|
||||
}
|
||||
case _ => super.++(that)
|
||||
}
|
||||
|
||||
final override def clone: MultiByteArrayIterator = {
|
||||
val clonedIterators: List[ByteArrayIterator] = iterators.map(_.clone)(collection.breakOut)
|
||||
new MultiByteArrayIterator(clonedIterators)
|
||||
}
|
||||
|
||||
/** For performance sensitive code, call take() directly on ByteString (it's optimised there) */
|
||||
final override def take(n: Int): this.type = {
|
||||
var rest = n
|
||||
val builder = new ListBuffer[ByteArrayIterator]
|
||||
while ((rest > 0) && !iterators.isEmpty) {
|
||||
current.take(rest)
|
||||
if (current.hasNext) {
|
||||
rest -= current.len
|
||||
builder += current
|
||||
}
|
||||
iterators = iterators.tail
|
||||
}
|
||||
iterators = builder.result
|
||||
normalize()
|
||||
}
|
||||
|
||||
/** For performance sensitive code, call drop() directly on ByteString (it's optimised there) */
|
||||
final override def drop(n: Int): this.type =
|
||||
if ((n > 0) && !isEmpty) {
|
||||
val nCurrent = math.min(n, current.len)
|
||||
current.drop(n)
|
||||
val rest = n - nCurrent
|
||||
assert(current.isEmpty || (rest == 0))
|
||||
normalize()
|
||||
drop(rest)
|
||||
} else this
|
||||
|
||||
final override def takeWhile(p: Byte => Boolean): this.type = {
|
||||
var stop = false
|
||||
val builder = new ListBuffer[ByteArrayIterator]
|
||||
while (!stop && iterators.nonEmpty) {
|
||||
val lastLen = current.len
|
||||
current.takeWhile(p)
|
||||
if (current.hasNext) builder += current
|
||||
if (current.len < lastLen) stop = true
|
||||
dropCurrent()
|
||||
}
|
||||
iterators = builder.result
|
||||
normalize()
|
||||
}
|
||||
|
||||
@tailrec final override def dropWhile(p: Byte => Boolean): this.type =
|
||||
if (!isEmpty) {
|
||||
current.dropWhile(p)
|
||||
val dropMore = current.isEmpty
|
||||
normalize()
|
||||
if (dropMore) dropWhile(p) else this
|
||||
} else this
|
||||
|
||||
final override def copyToArray[B >: Byte](xs: Array[B], start: Int, len: Int): Unit = {
|
||||
var pos = start
|
||||
var rest = len
|
||||
while ((rest > 0) && !iterators.isEmpty && pos < xs.length) {
|
||||
val n = 0 max ((xs.length - pos) min current.len min rest)
|
||||
current.copyToArray(xs, pos, n)
|
||||
pos += n
|
||||
rest -= n
|
||||
if (current.isEmpty) {
|
||||
dropCurrent()
|
||||
}
|
||||
}
|
||||
normalize()
|
||||
}
|
||||
|
||||
override def foreach[@specialized U](f: Byte => U): Unit = {
|
||||
iterators.foreach { _.foreach(f) }
|
||||
clear()
|
||||
}
|
||||
|
||||
final override def toByteString: ByteString = {
|
||||
if (iterators.tail.isEmpty) iterators.head.toByteString
|
||||
else {
|
||||
val result = iterators.foldLeft(ByteString.empty) { _ ++ _.toByteString }
|
||||
clear()
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
@tailrec protected final def getToArray[A](xs: Array[A], offset: Int, n: Int, elemSize: Int)(getSingle: => A)(
|
||||
getMult: (Array[A], Int, Int) => Unit): this.type =
|
||||
if (n <= 0) this
|
||||
else {
|
||||
if (isEmpty) EmptyImmutableSeq.iterator.next()
|
||||
val nDone = if (current.len >= elemSize) {
|
||||
val nCurrent = math.min(n, current.len / elemSize)
|
||||
getMult(xs, offset, nCurrent)
|
||||
nCurrent
|
||||
} else {
|
||||
xs(offset) = getSingle
|
||||
1
|
||||
}
|
||||
normalize()
|
||||
getToArray(xs, offset + nDone, n - nDone, elemSize)(getSingle)(getMult)
|
||||
}
|
||||
|
||||
def getBytes(xs: Array[Byte], offset: Int, n: Int): this.type =
|
||||
getToArray(xs, offset, n, 1) { getByte } { current.getBytes(_, _, _) }
|
||||
|
||||
def getShorts(xs: Array[Short], offset: Int, n: Int)(implicit byteOrder: ByteOrder): this.type =
|
||||
getToArray(xs, offset, n, 2) { getShort(byteOrder) } { current.getShorts(_, _, _)(byteOrder) }
|
||||
|
||||
def getInts(xs: Array[Int], offset: Int, n: Int)(implicit byteOrder: ByteOrder): this.type =
|
||||
getToArray(xs, offset, n, 4) { getInt(byteOrder) } { current.getInts(_, _, _)(byteOrder) }
|
||||
|
||||
def getLongs(xs: Array[Long], offset: Int, n: Int)(implicit byteOrder: ByteOrder): this.type =
|
||||
getToArray(xs, offset, n, 8) { getLong(byteOrder) } { current.getLongs(_, _, _)(byteOrder) }
|
||||
|
||||
def getFloats(xs: Array[Float], offset: Int, n: Int)(implicit byteOrder: ByteOrder): this.type =
|
||||
getToArray(xs, offset, n, 8) { getFloat(byteOrder) } { current.getFloats(_, _, _)(byteOrder) }
|
||||
|
||||
def getDoubles(xs: Array[Double], offset: Int, n: Int)(implicit byteOrder: ByteOrder): this.type =
|
||||
getToArray(xs, offset, n, 8) { getDouble(byteOrder) } { current.getDoubles(_, _, _)(byteOrder) }
|
||||
|
||||
/** For performance sensitive code, call copyToBuffer() directly on ByteString (it's optimised there) */
|
||||
override def copyToBuffer(buffer: ByteBuffer): Int = {
|
||||
// the fold here is better than indexing into the LinearSeq
|
||||
val n = iterators.foldLeft(0) { _ + _.copyToBuffer(buffer) }
|
||||
normalize()
|
||||
n
|
||||
}
|
||||
|
||||
def asInputStream: java.io.InputStream = new java.io.InputStream {
|
||||
override def available: Int = current.len
|
||||
|
||||
def read: Int = if (hasNext) next().toInt & 0xFF else -1
|
||||
|
||||
override def read(b: Array[Byte], off: Int, len: Int): Int = {
|
||||
val nRead = current.asInputStream.read(b, off, len)
|
||||
normalize()
|
||||
nRead
|
||||
}
|
||||
|
||||
override def skip(n: Long): Long = {
|
||||
@tailrec def skipImpl(n: Long, skipped: Long): Long =
|
||||
if (n > 0) {
|
||||
if (!isEmpty) {
|
||||
val m = current.asInputStream.skip(n)
|
||||
normalize()
|
||||
val newN = n - m
|
||||
val newSkipped = skipped + m
|
||||
if (newN > 0) skipImpl(newN, newSkipped)
|
||||
else newSkipped
|
||||
} else 0
|
||||
} else 0
|
||||
|
||||
skipImpl(n, 0)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An iterator over a ByteString.
|
||||
*/
|
||||
abstract class ByteIterator extends BufferedIterator[Byte] {
|
||||
def len: Int
|
||||
|
||||
def head: Byte
|
||||
|
||||
def next(): Byte
|
||||
|
||||
protected def clear(): Unit
|
||||
|
||||
def ++(that: TraversableOnce[Byte]): ByteIterator =
|
||||
if (that.isEmpty) this else ByteIterator.ByteArrayIterator(that.toArray)
|
||||
|
||||
// *must* be overridden by derived classes. This construction is necessary
|
||||
// to specialize the return type, as the method is already implemented in
|
||||
// the parent class.
|
||||
override def clone: ByteIterator =
|
||||
throw new UnsupportedOperationException("Method clone is not implemented in ByteIterator")
|
||||
|
||||
override def duplicate: (ByteIterator, ByteIterator) = (this, clone)
|
||||
|
||||
// *must* be overridden by derived classes. This construction is necessary
|
||||
// to specialize the return type, as the method is already implemented in
|
||||
// the parent class.
|
||||
override def take(n: Int): this.type =
|
||||
throw new UnsupportedOperationException("Method take is not implemented in ByteIterator")
|
||||
|
||||
// *must* be overridden by derived classes. This construction is necessary
|
||||
// to specialize the return type, as the method is already implemented in
|
||||
// the parent class.
|
||||
override def drop(n: Int): this.type =
|
||||
throw new UnsupportedOperationException("Method drop is not implemented in ByteIterator")
|
||||
|
||||
override def slice(from: Int, until: Int): this.type = {
|
||||
if (from > 0) drop(from).take(until - from)
|
||||
else take(until)
|
||||
}
|
||||
|
||||
// *must* be overridden by derived classes. This construction is necessary
|
||||
// to specialize the return type, as the method is already implemented in
|
||||
// the parent class.
|
||||
override def takeWhile(p: Byte => Boolean): this.type =
|
||||
throw new UnsupportedOperationException("Method takeWhile is not implemented in ByteIterator")
|
||||
|
||||
// *must* be overridden by derived classes. This construction is necessary
|
||||
// to specialize the return type, as the method is already implemented in
|
||||
// the parent class.
|
||||
override def dropWhile(p: Byte => Boolean): this.type =
|
||||
throw new UnsupportedOperationException("Method dropWhile is not implemented in ByteIterator")
|
||||
|
||||
override def span(p: Byte => Boolean): (ByteIterator, ByteIterator) = {
|
||||
val that = clone
|
||||
this.takeWhile(p)
|
||||
that.drop(this.len)
|
||||
(this, that)
|
||||
}
|
||||
|
||||
override def indexWhere(p: Byte => Boolean): Int = indexWhere(p, 0)
|
||||
override def indexWhere(p: Byte => Boolean, from: Int): Int = {
|
||||
var index = 0
|
||||
while (index < from && hasNext) {
|
||||
next()
|
||||
index += 1
|
||||
}
|
||||
var found = false
|
||||
while (!found && hasNext) if (p(next())) {
|
||||
found = true
|
||||
} else {
|
||||
index += 1
|
||||
}
|
||||
if (found) index else -1
|
||||
}
|
||||
|
||||
def indexOf(elem: Byte): Int = indexOf(elem, 0)
|
||||
def indexOf(elem: Byte, from: Int): Int = indexWhere(_ == elem, from)
|
||||
|
||||
override def indexOf[B >: Byte](elem: B): Int = indexOf[B](elem, 0)
|
||||
override def indexOf[B >: Byte](elem: B, from: Int): Int = indexWhere(_ == elem, from)
|
||||
|
||||
def toByteString: ByteString
|
||||
|
||||
override def toSeq: ByteString = toByteString
|
||||
|
||||
override def foreach[@specialized U](f: Byte => U): Unit =
|
||||
while (hasNext) f(next())
|
||||
|
||||
override def foldLeft[@specialized B](z: B)(op: (B, Byte) => B): B = {
|
||||
var acc = z
|
||||
foreach { byte =>
|
||||
acc = op(acc, byte)
|
||||
}
|
||||
acc
|
||||
}
|
||||
|
||||
override def toArray[B >: Byte](implicit arg0: ClassTag[B]): Array[B] = {
|
||||
val target = new Array[B](len)
|
||||
copyToArray(target)
|
||||
target
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single Byte from this iterator. Identical to next().
|
||||
*/
|
||||
def getByte: Byte = next()
|
||||
|
||||
/**
|
||||
* Get a single Short from this iterator.
|
||||
*/
|
||||
def getShort(implicit byteOrder: ByteOrder): Short = {
|
||||
if (byteOrder == ByteOrder.BIG_ENDIAN)
|
||||
((next() & 0xFF) << 8 | (next() & 0xFF) << 0).toShort
|
||||
else if (byteOrder == ByteOrder.LITTLE_ENDIAN)
|
||||
((next() & 0xFF) << 0 | (next() & 0xFF) << 8).toShort
|
||||
else throw new IllegalArgumentException("Unknown byte order " + byteOrder)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single Int from this iterator.
|
||||
*/
|
||||
def getInt(implicit byteOrder: ByteOrder): Int = {
|
||||
if (byteOrder == ByteOrder.BIG_ENDIAN)
|
||||
((next() & 0xFF) << 24
|
||||
| (next() & 0xFF) << 16
|
||||
| (next() & 0xFF) << 8
|
||||
| (next() & 0xFF) << 0)
|
||||
else if (byteOrder == ByteOrder.LITTLE_ENDIAN)
|
||||
((next() & 0xFF) << 0
|
||||
| (next() & 0xFF) << 8
|
||||
| (next() & 0xFF) << 16
|
||||
| (next() & 0xFF) << 24)
|
||||
else throw new IllegalArgumentException("Unknown byte order " + byteOrder)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single Long from this iterator.
|
||||
*/
|
||||
def getLong(implicit byteOrder: ByteOrder): Long = {
|
||||
if (byteOrder == ByteOrder.BIG_ENDIAN)
|
||||
((next().toLong & 0xFF) << 56
|
||||
| (next().toLong & 0xFF) << 48
|
||||
| (next().toLong & 0xFF) << 40
|
||||
| (next().toLong & 0xFF) << 32
|
||||
| (next().toLong & 0xFF) << 24
|
||||
| (next().toLong & 0xFF) << 16
|
||||
| (next().toLong & 0xFF) << 8
|
||||
| (next().toLong & 0xFF) << 0)
|
||||
else if (byteOrder == ByteOrder.LITTLE_ENDIAN)
|
||||
((next().toLong & 0xFF) << 0
|
||||
| (next().toLong & 0xFF) << 8
|
||||
| (next().toLong & 0xFF) << 16
|
||||
| (next().toLong & 0xFF) << 24
|
||||
| (next().toLong & 0xFF) << 32
|
||||
| (next().toLong & 0xFF) << 40
|
||||
| (next().toLong & 0xFF) << 48
|
||||
| (next().toLong & 0xFF) << 56)
|
||||
else throw new IllegalArgumentException("Unknown byte order " + byteOrder)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a Long from this iterator where only the least significant `n`
|
||||
* bytes were encoded.
|
||||
*/
|
||||
def getLongPart(n: Int)(implicit byteOrder: ByteOrder): Long = {
|
||||
if (byteOrder == ByteOrder.BIG_ENDIAN) {
|
||||
var x = 0L
|
||||
(1 to n).foreach(_ => x = (x << 8) | (next() & 0xFF))
|
||||
x
|
||||
} else if (byteOrder == ByteOrder.LITTLE_ENDIAN) {
|
||||
var x = 0L
|
||||
(0 until n).foreach(i => x |= (next() & 0xFF) << 8 * i)
|
||||
x
|
||||
} else throw new IllegalArgumentException("Unknown byte order " + byteOrder)
|
||||
}
|
||||
|
||||
def getFloat(implicit byteOrder: ByteOrder): Float =
|
||||
java.lang.Float.intBitsToFloat(getInt(byteOrder))
|
||||
|
||||
def getDouble(implicit byteOrder: ByteOrder): Double =
|
||||
java.lang.Double.longBitsToDouble(getLong(byteOrder))
|
||||
|
||||
/**
|
||||
* Get a specific number of Bytes from this iterator. In contrast to
|
||||
* copyToArray, this method will fail if this.len < xs.length.
|
||||
*/
|
||||
def getBytes(xs: Array[Byte]): this.type = getBytes(xs, 0, xs.length)
|
||||
|
||||
/**
|
||||
* Get a specific number of Bytes from this iterator. In contrast to
|
||||
* copyToArray, this method will fail if length < n or if (xs.length - offset) < n.
|
||||
*/
|
||||
def getBytes(xs: Array[Byte], offset: Int, n: Int): this.type
|
||||
|
||||
/**
|
||||
* Get a specific number of Bytes from this iterator. In contrast to
|
||||
* copyToArray, this method will fail if this.len < n.
|
||||
*/
|
||||
def getBytes(n: Int): Array[Byte] = {
|
||||
val bytes = new Array[Byte](n)
|
||||
getBytes(bytes, 0, n)
|
||||
bytes
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a ByteString with specific number of Bytes from this iterator. In contrast to
|
||||
* copyToArray, this method will fail if this.len < n.
|
||||
*/
|
||||
def getByteString(n: Int): ByteString = {
|
||||
val bs = clone.take(n).toByteString
|
||||
drop(n)
|
||||
bs
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a number of Shorts from this iterator.
|
||||
*/
|
||||
def getShorts(xs: Array[Short])(implicit byteOrder: ByteOrder): this.type =
|
||||
getShorts(xs, 0, xs.length)(byteOrder)
|
||||
|
||||
/**
|
||||
* Get a number of Shorts from this iterator.
|
||||
*/
|
||||
def getShorts(xs: Array[Short], offset: Int, n: Int)(implicit byteOrder: ByteOrder): this.type
|
||||
|
||||
/**
|
||||
* Get a number of Ints from this iterator.
|
||||
*/
|
||||
def getInts(xs: Array[Int])(implicit byteOrder: ByteOrder): this.type =
|
||||
getInts(xs, 0, xs.length)(byteOrder)
|
||||
|
||||
/**
|
||||
* Get a number of Ints from this iterator.
|
||||
*/
|
||||
def getInts(xs: Array[Int], offset: Int, n: Int)(implicit byteOrder: ByteOrder): this.type
|
||||
|
||||
/**
|
||||
* Get a number of Longs from this iterator.
|
||||
*/
|
||||
def getLongs(xs: Array[Long])(implicit byteOrder: ByteOrder): this.type =
|
||||
getLongs(xs, 0, xs.length)(byteOrder)
|
||||
|
||||
/**
|
||||
* Get a number of Longs from this iterator.
|
||||
*/
|
||||
def getLongs(xs: Array[Long], offset: Int, n: Int)(implicit byteOrder: ByteOrder): this.type
|
||||
|
||||
/**
|
||||
* Get a number of Floats from this iterator.
|
||||
*/
|
||||
def getFloats(xs: Array[Float])(implicit byteOrder: ByteOrder): this.type =
|
||||
getFloats(xs, 0, xs.length)(byteOrder)
|
||||
|
||||
/**
|
||||
* Get a number of Floats from this iterator.
|
||||
*/
|
||||
def getFloats(xs: Array[Float], offset: Int, n: Int)(implicit byteOrder: ByteOrder): this.type
|
||||
|
||||
/**
|
||||
* Get a number of Doubles from this iterator.
|
||||
*/
|
||||
def getDoubles(xs: Array[Double])(implicit byteOrder: ByteOrder): this.type =
|
||||
getDoubles(xs, 0, xs.length)(byteOrder)
|
||||
|
||||
/**
|
||||
* Get a number of Doubles from this iterator.
|
||||
*/
|
||||
def getDoubles(xs: Array[Double], offset: Int, n: Int)(implicit byteOrder: ByteOrder): this.type
|
||||
|
||||
/**
|
||||
* Copy as many bytes as possible to a ByteBuffer, starting from it's
|
||||
* current position. This method will not overflow the buffer.
|
||||
*
|
||||
* @param buffer a ByteBuffer to copy bytes to
|
||||
* @return the number of bytes actually copied
|
||||
*/
|
||||
/** For performance sensitive code, call take() directly on ByteString (it's optimised there) */
|
||||
def copyToBuffer(buffer: ByteBuffer): Int
|
||||
|
||||
/**
|
||||
* Directly wraps this ByteIterator in an InputStream without copying.
|
||||
* Read and skip operations on the stream will advance the iterator
|
||||
* accordingly.
|
||||
*/
|
||||
def asInputStream: java.io.InputStream
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,562 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.pekko.util
|
||||
|
||||
import org.apache.pekko.annotation.InternalStableApi
|
||||
|
||||
import scala.compat.java8
|
||||
|
||||
import scala.language.implicitConversions
|
||||
import scala.annotation.nowarn
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
*
|
||||
* Remove this once Scala 2.12 support is dropped since all methods are in Scala 2.13+ stdlib.
|
||||
*
|
||||
* DO NOT edit this file manually, its copied over from scala-java8-compat. More specifically
|
||||
* scala-java8-compat generates this source from a template, so you have to do +compile in
|
||||
* scala-java8-compat and check the src_managed folder in target and then add
|
||||
* `@nowarn("msg=never used")` to functions as neccessary.
|
||||
*/
|
||||
@InternalStableApi
|
||||
private[pekko] object FunctionConverters extends java8.Priority1FunctionConverters {
|
||||
import java8.functionConverterImpls._
|
||||
|
||||
@inline def asScalaFromBiConsumer[T, U](jf: java.util.function.BiConsumer[T, U]): scala.Function2[T, U, Unit] =
|
||||
new FromJavaBiConsumer[T, U](jf)
|
||||
|
||||
@inline def asJavaBiConsumer[T, U](sf: scala.Function2[T, U, Unit]): java.util.function.BiConsumer[T, U] =
|
||||
new AsJavaBiConsumer[T, U](sf)
|
||||
|
||||
@inline def asScalaFromBiFunction[T, U, R](jf: java.util.function.BiFunction[T, U, R]): scala.Function2[T, U, R] =
|
||||
new FromJavaBiFunction[T, U, R](jf)
|
||||
|
||||
@inline def asJavaBiFunction[T, U, R](sf: scala.Function2[T, U, R]): java.util.function.BiFunction[T, U, R] =
|
||||
new AsJavaBiFunction[T, U, R](sf)
|
||||
|
||||
@inline def asScalaFromBiPredicate[T, U](jf: java.util.function.BiPredicate[T, U]): scala.Function2[T, U, Boolean] =
|
||||
new FromJavaBiPredicate[T, U](jf)
|
||||
|
||||
@inline def asJavaBiPredicate[T, U](sf: scala.Function2[T, U, Boolean]): java.util.function.BiPredicate[T, U] =
|
||||
new AsJavaBiPredicate[T, U](sf)
|
||||
|
||||
@inline def asScalaFromBinaryOperator[T](jf: java.util.function.BinaryOperator[T]): scala.Function2[T, T, T] =
|
||||
new FromJavaBinaryOperator[T](jf)
|
||||
|
||||
@inline def asJavaBinaryOperator[T](sf: scala.Function2[T, T, T]): java.util.function.BinaryOperator[T] =
|
||||
new AsJavaBinaryOperator[T](sf)
|
||||
|
||||
@inline def asScalaFromBooleanSupplier(jf: java.util.function.BooleanSupplier): scala.Function0[Boolean] =
|
||||
new FromJavaBooleanSupplier(jf)
|
||||
|
||||
@inline def asJavaBooleanSupplier(sf: scala.Function0[Boolean]): java.util.function.BooleanSupplier =
|
||||
new AsJavaBooleanSupplier(sf)
|
||||
|
||||
@inline def asScalaFromConsumer[T](jf: java.util.function.Consumer[T]): scala.Function1[T, Unit] =
|
||||
new FromJavaConsumer[T](jf)
|
||||
|
||||
@inline def asJavaConsumer[T](sf: scala.Function1[T, Unit]): java.util.function.Consumer[T] =
|
||||
new AsJavaConsumer[T](sf)
|
||||
|
||||
@inline def asScalaFromDoubleBinaryOperator(
|
||||
jf: java.util.function.DoubleBinaryOperator): scala.Function2[Double, Double, Double] =
|
||||
new FromJavaDoubleBinaryOperator(jf)
|
||||
|
||||
@inline def asJavaDoubleBinaryOperator(
|
||||
sf: scala.Function2[Double, Double, Double]): java.util.function.DoubleBinaryOperator =
|
||||
new AsJavaDoubleBinaryOperator(sf)
|
||||
|
||||
@inline def asScalaFromDoubleConsumer(jf: java.util.function.DoubleConsumer): scala.Function1[Double, Unit] =
|
||||
new FromJavaDoubleConsumer(jf)
|
||||
|
||||
@inline def asJavaDoubleConsumer(sf: scala.Function1[Double, Unit]): java.util.function.DoubleConsumer =
|
||||
new AsJavaDoubleConsumer(sf)
|
||||
|
||||
@inline def asScalaFromDoubleFunction[R](jf: java.util.function.DoubleFunction[R]): scala.Function1[Double, R] =
|
||||
new FromJavaDoubleFunction[R](jf)
|
||||
|
||||
@inline def asJavaDoubleFunction[R](sf: scala.Function1[Double, R]): java.util.function.DoubleFunction[R] =
|
||||
new AsJavaDoubleFunction[R](sf)
|
||||
|
||||
@inline def asScalaFromDoublePredicate(jf: java.util.function.DoublePredicate): scala.Function1[Double, Boolean] =
|
||||
new FromJavaDoublePredicate(jf)
|
||||
|
||||
@inline def asJavaDoublePredicate(sf: scala.Function1[Double, Boolean]): java.util.function.DoublePredicate =
|
||||
new AsJavaDoublePredicate(sf)
|
||||
|
||||
@inline def asScalaFromDoubleSupplier(jf: java.util.function.DoubleSupplier): scala.Function0[Double] =
|
||||
new FromJavaDoubleSupplier(jf)
|
||||
|
||||
@inline def asJavaDoubleSupplier(sf: scala.Function0[Double]): java.util.function.DoubleSupplier =
|
||||
new AsJavaDoubleSupplier(sf)
|
||||
|
||||
@inline def asScalaFromDoubleToIntFunction(jf: java.util.function.DoubleToIntFunction): scala.Function1[Double, Int] =
|
||||
new FromJavaDoubleToIntFunction(jf)
|
||||
|
||||
@inline def asJavaDoubleToIntFunction(sf: scala.Function1[Double, Int]): java.util.function.DoubleToIntFunction =
|
||||
new AsJavaDoubleToIntFunction(sf)
|
||||
|
||||
@inline def asScalaFromDoubleToLongFunction(
|
||||
jf: java.util.function.DoubleToLongFunction): scala.Function1[Double, Long] = new FromJavaDoubleToLongFunction(jf)
|
||||
|
||||
@inline def asJavaDoubleToLongFunction(sf: scala.Function1[Double, Long]): java.util.function.DoubleToLongFunction =
|
||||
new AsJavaDoubleToLongFunction(sf)
|
||||
|
||||
@inline def asScalaFromDoubleUnaryOperator(
|
||||
jf: java.util.function.DoubleUnaryOperator): scala.Function1[Double, Double] = new FromJavaDoubleUnaryOperator(jf)
|
||||
|
||||
@inline def asJavaDoubleUnaryOperator(sf: scala.Function1[Double, Double]): java.util.function.DoubleUnaryOperator =
|
||||
new AsJavaDoubleUnaryOperator(sf)
|
||||
|
||||
@inline def asScalaFromFunction[T, R](jf: java.util.function.Function[T, R]): scala.Function1[T, R] =
|
||||
new FromJavaFunction[T, R](jf)
|
||||
|
||||
@inline def asJavaFunction[T, R](sf: scala.Function1[T, R]): java.util.function.Function[T, R] =
|
||||
new AsJavaFunction[T, R](sf)
|
||||
|
||||
@inline def asScalaFromIntBinaryOperator(jf: java.util.function.IntBinaryOperator): scala.Function2[Int, Int, Int] =
|
||||
new FromJavaIntBinaryOperator(jf)
|
||||
|
||||
@inline def asJavaIntBinaryOperator(sf: scala.Function2[Int, Int, Int]): java.util.function.IntBinaryOperator =
|
||||
new AsJavaIntBinaryOperator(sf)
|
||||
|
||||
@inline def asScalaFromIntConsumer(jf: java.util.function.IntConsumer): scala.Function1[Int, Unit] =
|
||||
new FromJavaIntConsumer(jf)
|
||||
|
||||
@inline def asJavaIntConsumer(sf: scala.Function1[Int, Unit]): java.util.function.IntConsumer =
|
||||
new AsJavaIntConsumer(sf)
|
||||
|
||||
@inline def asScalaFromIntFunction[R](jf: java.util.function.IntFunction[R]): scala.Function1[Int, R] =
|
||||
new FromJavaIntFunction[R](jf)
|
||||
|
||||
@inline def asJavaIntFunction[R](sf: scala.Function1[Int, R]): java.util.function.IntFunction[R] =
|
||||
new AsJavaIntFunction[R](sf)
|
||||
|
||||
@inline def asScalaFromIntPredicate(jf: java.util.function.IntPredicate): scala.Function1[Int, Boolean] =
|
||||
new FromJavaIntPredicate(jf)
|
||||
|
||||
@inline def asJavaIntPredicate(sf: scala.Function1[Int, Boolean]): java.util.function.IntPredicate =
|
||||
new AsJavaIntPredicate(sf)
|
||||
|
||||
@inline def asScalaFromIntSupplier(jf: java.util.function.IntSupplier): scala.Function0[Int] =
|
||||
new FromJavaIntSupplier(jf)
|
||||
|
||||
@inline def asJavaIntSupplier(sf: scala.Function0[Int]): java.util.function.IntSupplier = new AsJavaIntSupplier(sf)
|
||||
|
||||
@inline def asScalaFromIntToDoubleFunction(jf: java.util.function.IntToDoubleFunction): scala.Function1[Int, Double] =
|
||||
new FromJavaIntToDoubleFunction(jf)
|
||||
|
||||
@inline def asJavaIntToDoubleFunction(sf: scala.Function1[Int, Double]): java.util.function.IntToDoubleFunction =
|
||||
new AsJavaIntToDoubleFunction(sf)
|
||||
|
||||
@inline def asScalaFromIntToLongFunction(jf: java.util.function.IntToLongFunction): scala.Function1[Int, Long] =
|
||||
new FromJavaIntToLongFunction(jf)
|
||||
|
||||
@inline def asJavaIntToLongFunction(sf: scala.Function1[Int, Long]): java.util.function.IntToLongFunction =
|
||||
new AsJavaIntToLongFunction(sf)
|
||||
|
||||
@inline def asScalaFromIntUnaryOperator(jf: java.util.function.IntUnaryOperator): scala.Function1[Int, Int] =
|
||||
new FromJavaIntUnaryOperator(jf)
|
||||
|
||||
@inline def asJavaIntUnaryOperator(sf: scala.Function1[Int, Int]): java.util.function.IntUnaryOperator =
|
||||
new AsJavaIntUnaryOperator(sf)
|
||||
|
||||
@inline def asScalaFromLongBinaryOperator(
|
||||
jf: java.util.function.LongBinaryOperator): scala.Function2[Long, Long, Long] = new FromJavaLongBinaryOperator(jf)
|
||||
|
||||
@inline def asJavaLongBinaryOperator(sf: scala.Function2[Long, Long, Long]): java.util.function.LongBinaryOperator =
|
||||
new AsJavaLongBinaryOperator(sf)
|
||||
|
||||
@inline def asScalaFromLongConsumer(jf: java.util.function.LongConsumer): scala.Function1[Long, Unit] =
|
||||
new FromJavaLongConsumer(jf)
|
||||
|
||||
@inline def asJavaLongConsumer(sf: scala.Function1[Long, Unit]): java.util.function.LongConsumer =
|
||||
new AsJavaLongConsumer(sf)
|
||||
|
||||
@inline def asScalaFromLongFunction[R](jf: java.util.function.LongFunction[R]): scala.Function1[Long, R] =
|
||||
new FromJavaLongFunction[R](jf)
|
||||
|
||||
@inline def asJavaLongFunction[R](sf: scala.Function1[Long, R]): java.util.function.LongFunction[R] =
|
||||
new AsJavaLongFunction[R](sf)
|
||||
|
||||
@inline def asScalaFromLongPredicate(jf: java.util.function.LongPredicate): scala.Function1[Long, Boolean] =
|
||||
new FromJavaLongPredicate(jf)
|
||||
|
||||
@inline def asJavaLongPredicate(sf: scala.Function1[Long, Boolean]): java.util.function.LongPredicate =
|
||||
new AsJavaLongPredicate(sf)
|
||||
|
||||
@inline def asScalaFromLongSupplier(jf: java.util.function.LongSupplier): scala.Function0[Long] =
|
||||
new FromJavaLongSupplier(jf)
|
||||
|
||||
@inline def asJavaLongSupplier(sf: scala.Function0[Long]): java.util.function.LongSupplier =
|
||||
new AsJavaLongSupplier(sf)
|
||||
|
||||
@inline def asScalaFromLongToDoubleFunction(
|
||||
jf: java.util.function.LongToDoubleFunction): scala.Function1[Long, Double] = new FromJavaLongToDoubleFunction(jf)
|
||||
|
||||
@inline def asJavaLongToDoubleFunction(sf: scala.Function1[Long, Double]): java.util.function.LongToDoubleFunction =
|
||||
new AsJavaLongToDoubleFunction(sf)
|
||||
|
||||
@inline def asScalaFromLongToIntFunction(jf: java.util.function.LongToIntFunction): scala.Function1[Long, Int] =
|
||||
new FromJavaLongToIntFunction(jf)
|
||||
|
||||
@inline def asJavaLongToIntFunction(sf: scala.Function1[Long, Int]): java.util.function.LongToIntFunction =
|
||||
new AsJavaLongToIntFunction(sf)
|
||||
|
||||
@inline def asScalaFromLongUnaryOperator(jf: java.util.function.LongUnaryOperator): scala.Function1[Long, Long] =
|
||||
new FromJavaLongUnaryOperator(jf)
|
||||
|
||||
@inline def asJavaLongUnaryOperator(sf: scala.Function1[Long, Long]): java.util.function.LongUnaryOperator =
|
||||
new AsJavaLongUnaryOperator(sf)
|
||||
|
||||
@inline def asScalaFromObjDoubleConsumer[T](
|
||||
jf: java.util.function.ObjDoubleConsumer[T]): scala.Function2[T, Double, Unit] =
|
||||
new FromJavaObjDoubleConsumer[T](jf)
|
||||
|
||||
@inline def asJavaObjDoubleConsumer[T](
|
||||
sf: scala.Function2[T, Double, Unit]): java.util.function.ObjDoubleConsumer[T] =
|
||||
new AsJavaObjDoubleConsumer[T](sf)
|
||||
|
||||
@inline def asScalaFromObjIntConsumer[T](jf: java.util.function.ObjIntConsumer[T]): scala.Function2[T, Int, Unit] =
|
||||
new FromJavaObjIntConsumer[T](jf)
|
||||
|
||||
@inline def asJavaObjIntConsumer[T](sf: scala.Function2[T, Int, Unit]): java.util.function.ObjIntConsumer[T] =
|
||||
new AsJavaObjIntConsumer[T](sf)
|
||||
|
||||
@inline def asScalaFromObjLongConsumer[T](jf: java.util.function.ObjLongConsumer[T]): scala.Function2[T, Long, Unit] =
|
||||
new FromJavaObjLongConsumer[T](jf)
|
||||
|
||||
@inline def asJavaObjLongConsumer[T](sf: scala.Function2[T, Long, Unit]): java.util.function.ObjLongConsumer[T] =
|
||||
new AsJavaObjLongConsumer[T](sf)
|
||||
|
||||
@inline def asScalaFromPredicate[T](jf: java.util.function.Predicate[T]): scala.Function1[T, Boolean] =
|
||||
new FromJavaPredicate[T](jf)
|
||||
|
||||
@inline def asJavaPredicate[T](sf: scala.Function1[T, Boolean]): java.util.function.Predicate[T] =
|
||||
new AsJavaPredicate[T](sf)
|
||||
|
||||
@inline def asScalaFromSupplier[T](jf: java.util.function.Supplier[T]): scala.Function0[T] =
|
||||
new FromJavaSupplier[T](jf)
|
||||
|
||||
@inline def asJavaSupplier[T](sf: scala.Function0[T]): java.util.function.Supplier[T] = new AsJavaSupplier[T](sf)
|
||||
|
||||
@inline def asScalaFromToDoubleBiFunction[T, U](
|
||||
jf: java.util.function.ToDoubleBiFunction[T, U]): scala.Function2[T, U, Double] =
|
||||
new FromJavaToDoubleBiFunction[T, U](jf)
|
||||
|
||||
@inline def asJavaToDoubleBiFunction[T, U](
|
||||
sf: scala.Function2[T, U, Double]): java.util.function.ToDoubleBiFunction[T, U] =
|
||||
new AsJavaToDoubleBiFunction[T, U](sf)
|
||||
|
||||
@inline def asScalaFromToDoubleFunction[T](jf: java.util.function.ToDoubleFunction[T]): scala.Function1[T, Double] =
|
||||
new FromJavaToDoubleFunction[T](jf)
|
||||
|
||||
@inline def asJavaToDoubleFunction[T](sf: scala.Function1[T, Double]): java.util.function.ToDoubleFunction[T] =
|
||||
new AsJavaToDoubleFunction[T](sf)
|
||||
|
||||
@inline def asScalaFromToIntBiFunction[T, U](
|
||||
jf: java.util.function.ToIntBiFunction[T, U]): scala.Function2[T, U, Int] = new FromJavaToIntBiFunction[T, U](jf)
|
||||
|
||||
@inline def asJavaToIntBiFunction[T, U](sf: scala.Function2[T, U, Int]): java.util.function.ToIntBiFunction[T, U] =
|
||||
new AsJavaToIntBiFunction[T, U](sf)
|
||||
|
||||
@inline def asScalaFromToIntFunction[T](jf: java.util.function.ToIntFunction[T]): scala.Function1[T, Int] =
|
||||
new FromJavaToIntFunction[T](jf)
|
||||
|
||||
@inline def asJavaToIntFunction[T](sf: scala.Function1[T, Int]): java.util.function.ToIntFunction[T] =
|
||||
new AsJavaToIntFunction[T](sf)
|
||||
|
||||
@inline def asScalaFromToLongBiFunction[T, U](
|
||||
jf: java.util.function.ToLongBiFunction[T, U]): scala.Function2[T, U, Long] =
|
||||
new FromJavaToLongBiFunction[T, U](jf)
|
||||
|
||||
@inline def asJavaToLongBiFunction[T, U](sf: scala.Function2[T, U, Long]): java.util.function.ToLongBiFunction[T, U] =
|
||||
new AsJavaToLongBiFunction[T, U](sf)
|
||||
|
||||
@inline def asScalaFromToLongFunction[T](jf: java.util.function.ToLongFunction[T]): scala.Function1[T, Long] =
|
||||
new FromJavaToLongFunction[T](jf)
|
||||
|
||||
@inline def asJavaToLongFunction[T](sf: scala.Function1[T, Long]): java.util.function.ToLongFunction[T] =
|
||||
new AsJavaToLongFunction[T](sf)
|
||||
|
||||
@inline def asScalaFromUnaryOperator[T](jf: java.util.function.UnaryOperator[T]): scala.Function1[T, T] =
|
||||
new FromJavaUnaryOperator[T](jf)
|
||||
|
||||
@inline def asJavaUnaryOperator[T](sf: scala.Function1[T, T]): java.util.function.UnaryOperator[T] =
|
||||
new AsJavaUnaryOperator[T](sf)
|
||||
|
||||
@inline implicit def enrichAsJavaBooleanSupplier(sf: scala.Function0[Boolean]): RichFunction0AsBooleanSupplier =
|
||||
new RichFunction0AsBooleanSupplier(sf)
|
||||
|
||||
@nowarn("msg=never used")
|
||||
@inline implicit def enrichAsJavaDoubleBinaryOperator[A0, A1](sf: scala.Function2[A0, A1, Double])(
|
||||
implicit evA0: =:=[A0, Double], evA1: =:=[A1, Double]): RichFunction2AsDoubleBinaryOperator =
|
||||
new RichFunction2AsDoubleBinaryOperator(sf.asInstanceOf[scala.Function2[Double, Double, Double]])
|
||||
|
||||
@nowarn("msg=never used")
|
||||
@inline implicit def enrichAsJavaDoubleConsumer[A0](sf: scala.Function1[A0, Unit])(
|
||||
implicit evA0: =:=[A0, Double]): RichFunction1AsDoubleConsumer =
|
||||
new RichFunction1AsDoubleConsumer(sf.asInstanceOf[scala.Function1[Double, Unit]])
|
||||
|
||||
@nowarn("msg=never used")
|
||||
@inline implicit def enrichAsJavaDoublePredicate[A0](sf: scala.Function1[A0, Boolean])(
|
||||
implicit evA0: =:=[A0, Double]): RichFunction1AsDoublePredicate =
|
||||
new RichFunction1AsDoublePredicate(sf.asInstanceOf[scala.Function1[Double, Boolean]])
|
||||
|
||||
@inline implicit def enrichAsJavaDoubleSupplier(sf: scala.Function0[Double]): RichFunction0AsDoubleSupplier =
|
||||
new RichFunction0AsDoubleSupplier(sf)
|
||||
|
||||
@nowarn("msg=never used")
|
||||
@inline implicit def enrichAsJavaDoubleToIntFunction[A0](sf: scala.Function1[A0, Int])(
|
||||
implicit evA0: =:=[A0, Double]): RichFunction1AsDoubleToIntFunction =
|
||||
new RichFunction1AsDoubleToIntFunction(sf.asInstanceOf[scala.Function1[Double, Int]])
|
||||
|
||||
@nowarn("msg=never used")
|
||||
@inline implicit def enrichAsJavaDoubleToLongFunction[A0](sf: scala.Function1[A0, Long])(
|
||||
implicit evA0: =:=[A0, Double]): RichFunction1AsDoubleToLongFunction =
|
||||
new RichFunction1AsDoubleToLongFunction(sf.asInstanceOf[scala.Function1[Double, Long]])
|
||||
|
||||
@nowarn("msg=never used")
|
||||
@inline implicit def enrichAsJavaDoubleUnaryOperator[A0](sf: scala.Function1[A0, Double])(
|
||||
implicit evA0: =:=[A0, Double]): RichFunction1AsDoubleUnaryOperator =
|
||||
new RichFunction1AsDoubleUnaryOperator(sf.asInstanceOf[scala.Function1[Double, Double]])
|
||||
|
||||
@nowarn("msg=never used")
|
||||
@inline implicit def enrichAsJavaIntBinaryOperator[A0, A1](sf: scala.Function2[A0, A1, Int])(
|
||||
implicit evA0: =:=[A0, Int], evA1: =:=[A1, Int]): RichFunction2AsIntBinaryOperator =
|
||||
new RichFunction2AsIntBinaryOperator(sf.asInstanceOf[scala.Function2[Int, Int, Int]])
|
||||
|
||||
@nowarn("msg=never used")
|
||||
@inline implicit def enrichAsJavaIntConsumer[A0](sf: scala.Function1[A0, Unit])(
|
||||
implicit evA0: =:=[A0, Int]): RichFunction1AsIntConsumer =
|
||||
new RichFunction1AsIntConsumer(sf.asInstanceOf[scala.Function1[Int, Unit]])
|
||||
|
||||
@nowarn("msg=never used")
|
||||
@inline implicit def enrichAsJavaIntPredicate[A0](sf: scala.Function1[A0, Boolean])(
|
||||
implicit evA0: =:=[A0, Int]): RichFunction1AsIntPredicate =
|
||||
new RichFunction1AsIntPredicate(sf.asInstanceOf[scala.Function1[Int, Boolean]])
|
||||
|
||||
@inline implicit def enrichAsJavaIntSupplier(sf: scala.Function0[Int]): RichFunction0AsIntSupplier =
|
||||
new RichFunction0AsIntSupplier(sf)
|
||||
|
||||
@nowarn("msg=never used")
|
||||
@inline implicit def enrichAsJavaIntToDoubleFunction[A0](sf: scala.Function1[A0, Double])(
|
||||
implicit evA0: =:=[A0, Int]): RichFunction1AsIntToDoubleFunction =
|
||||
new RichFunction1AsIntToDoubleFunction(sf.asInstanceOf[scala.Function1[Int, Double]])
|
||||
|
||||
@nowarn("msg=never used")
|
||||
@inline implicit def enrichAsJavaIntToLongFunction[A0](sf: scala.Function1[A0, Long])(
|
||||
implicit evA0: =:=[A0, Int]): RichFunction1AsIntToLongFunction =
|
||||
new RichFunction1AsIntToLongFunction(sf.asInstanceOf[scala.Function1[Int, Long]])
|
||||
|
||||
@nowarn("msg=never used")
|
||||
@inline implicit def enrichAsJavaIntUnaryOperator[A0](sf: scala.Function1[A0, Int])(
|
||||
implicit evA0: =:=[A0, Int]): RichFunction1AsIntUnaryOperator =
|
||||
new RichFunction1AsIntUnaryOperator(sf.asInstanceOf[scala.Function1[Int, Int]])
|
||||
|
||||
@nowarn("msg=never used")
|
||||
@inline implicit def enrichAsJavaLongBinaryOperator[A0, A1](sf: scala.Function2[A0, A1, Long])(
|
||||
implicit evA0: =:=[A0, Long], evA1: =:=[A1, Long]): RichFunction2AsLongBinaryOperator =
|
||||
new RichFunction2AsLongBinaryOperator(sf.asInstanceOf[scala.Function2[Long, Long, Long]])
|
||||
|
||||
@nowarn("msg=never used")
|
||||
@inline implicit def enrichAsJavaLongConsumer[A0](sf: scala.Function1[A0, Unit])(
|
||||
implicit evA0: =:=[A0, Long]): RichFunction1AsLongConsumer =
|
||||
new RichFunction1AsLongConsumer(sf.asInstanceOf[scala.Function1[Long, Unit]])
|
||||
|
||||
@nowarn("msg=never used")
|
||||
@inline implicit def enrichAsJavaLongPredicate[A0](sf: scala.Function1[A0, Boolean])(
|
||||
implicit evA0: =:=[A0, Long]): RichFunction1AsLongPredicate =
|
||||
new RichFunction1AsLongPredicate(sf.asInstanceOf[scala.Function1[Long, Boolean]])
|
||||
|
||||
@inline implicit def enrichAsJavaLongSupplier(sf: scala.Function0[Long]): RichFunction0AsLongSupplier =
|
||||
new RichFunction0AsLongSupplier(sf)
|
||||
|
||||
@nowarn("msg=never used")
|
||||
@inline implicit def enrichAsJavaLongToDoubleFunction[A0](sf: scala.Function1[A0, Double])(
|
||||
implicit evA0: =:=[A0, Long]): RichFunction1AsLongToDoubleFunction =
|
||||
new RichFunction1AsLongToDoubleFunction(sf.asInstanceOf[scala.Function1[Long, Double]])
|
||||
|
||||
@nowarn("msg=never used")
|
||||
@inline implicit def enrichAsJavaLongToIntFunction[A0](sf: scala.Function1[A0, Int])(
|
||||
implicit evA0: =:=[A0, Long]): RichFunction1AsLongToIntFunction =
|
||||
new RichFunction1AsLongToIntFunction(sf.asInstanceOf[scala.Function1[Long, Int]])
|
||||
|
||||
@nowarn("msg=never used")
|
||||
@inline implicit def enrichAsJavaLongUnaryOperator[A0](sf: scala.Function1[A0, Long])(
|
||||
implicit evA0: =:=[A0, Long]): RichFunction1AsLongUnaryOperator =
|
||||
new RichFunction1AsLongUnaryOperator(sf.asInstanceOf[scala.Function1[Long, Long]])
|
||||
|
||||
@inline implicit def enrichAsScalaFromBiConsumer[T, U](
|
||||
jf: java.util.function.BiConsumer[T, U]): RichBiConsumerAsFunction2[T, U] =
|
||||
new RichBiConsumerAsFunction2[T, U](jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromBiFunction[T, U, R](
|
||||
jf: java.util.function.BiFunction[T, U, R]): RichBiFunctionAsFunction2[T, U, R] =
|
||||
new RichBiFunctionAsFunction2[T, U, R](jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromBiPredicate[T, U](
|
||||
jf: java.util.function.BiPredicate[T, U]): RichBiPredicateAsFunction2[T, U] =
|
||||
new RichBiPredicateAsFunction2[T, U](jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromBinaryOperator[T](
|
||||
jf: java.util.function.BinaryOperator[T]): RichBinaryOperatorAsFunction2[T] =
|
||||
new RichBinaryOperatorAsFunction2[T](jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromBooleanSupplier(
|
||||
jf: java.util.function.BooleanSupplier): RichBooleanSupplierAsFunction0 = new RichBooleanSupplierAsFunction0(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromConsumer[T](jf: java.util.function.Consumer[T]): RichConsumerAsFunction1[T] =
|
||||
new RichConsumerAsFunction1[T](jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromDoubleBinaryOperator(
|
||||
jf: java.util.function.DoubleBinaryOperator): RichDoubleBinaryOperatorAsFunction2 =
|
||||
new RichDoubleBinaryOperatorAsFunction2(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromDoubleConsumer(
|
||||
jf: java.util.function.DoubleConsumer): RichDoubleConsumerAsFunction1 = new RichDoubleConsumerAsFunction1(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromDoubleFunction[R](
|
||||
jf: java.util.function.DoubleFunction[R]): RichDoubleFunctionAsFunction1[R] =
|
||||
new RichDoubleFunctionAsFunction1[R](jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromDoublePredicate(
|
||||
jf: java.util.function.DoublePredicate): RichDoublePredicateAsFunction1 = new RichDoublePredicateAsFunction1(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromDoubleSupplier(
|
||||
jf: java.util.function.DoubleSupplier): RichDoubleSupplierAsFunction0 = new RichDoubleSupplierAsFunction0(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromDoubleToIntFunction(
|
||||
jf: java.util.function.DoubleToIntFunction): RichDoubleToIntFunctionAsFunction1 =
|
||||
new RichDoubleToIntFunctionAsFunction1(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromDoubleToLongFunction(
|
||||
jf: java.util.function.DoubleToLongFunction): RichDoubleToLongFunctionAsFunction1 =
|
||||
new RichDoubleToLongFunctionAsFunction1(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromDoubleUnaryOperator(
|
||||
jf: java.util.function.DoubleUnaryOperator): RichDoubleUnaryOperatorAsFunction1 =
|
||||
new RichDoubleUnaryOperatorAsFunction1(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromFunction[T, R](
|
||||
jf: java.util.function.Function[T, R]): RichFunctionAsFunction1[T, R] = new RichFunctionAsFunction1[T, R](jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromIntBinaryOperator(
|
||||
jf: java.util.function.IntBinaryOperator): RichIntBinaryOperatorAsFunction2 =
|
||||
new RichIntBinaryOperatorAsFunction2(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromIntConsumer(jf: java.util.function.IntConsumer): RichIntConsumerAsFunction1 =
|
||||
new RichIntConsumerAsFunction1(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromIntFunction[R](
|
||||
jf: java.util.function.IntFunction[R]): RichIntFunctionAsFunction1[R] = new RichIntFunctionAsFunction1[R](jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromIntPredicate(jf: java.util.function.IntPredicate): RichIntPredicateAsFunction1 =
|
||||
new RichIntPredicateAsFunction1(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromIntSupplier(jf: java.util.function.IntSupplier): RichIntSupplierAsFunction0 =
|
||||
new RichIntSupplierAsFunction0(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromIntToDoubleFunction(
|
||||
jf: java.util.function.IntToDoubleFunction): RichIntToDoubleFunctionAsFunction1 =
|
||||
new RichIntToDoubleFunctionAsFunction1(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromIntToLongFunction(
|
||||
jf: java.util.function.IntToLongFunction): RichIntToLongFunctionAsFunction1 =
|
||||
new RichIntToLongFunctionAsFunction1(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromIntUnaryOperator(
|
||||
jf: java.util.function.IntUnaryOperator): RichIntUnaryOperatorAsFunction1 =
|
||||
new RichIntUnaryOperatorAsFunction1(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromLongBinaryOperator(
|
||||
jf: java.util.function.LongBinaryOperator): RichLongBinaryOperatorAsFunction2 =
|
||||
new RichLongBinaryOperatorAsFunction2(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromLongConsumer(jf: java.util.function.LongConsumer): RichLongConsumerAsFunction1 =
|
||||
new RichLongConsumerAsFunction1(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromLongFunction[R](
|
||||
jf: java.util.function.LongFunction[R]): RichLongFunctionAsFunction1[R] = new RichLongFunctionAsFunction1[R](jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromLongPredicate(
|
||||
jf: java.util.function.LongPredicate): RichLongPredicateAsFunction1 = new RichLongPredicateAsFunction1(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromLongSupplier(jf: java.util.function.LongSupplier): RichLongSupplierAsFunction0 =
|
||||
new RichLongSupplierAsFunction0(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromLongToDoubleFunction(
|
||||
jf: java.util.function.LongToDoubleFunction): RichLongToDoubleFunctionAsFunction1 =
|
||||
new RichLongToDoubleFunctionAsFunction1(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromLongToIntFunction(
|
||||
jf: java.util.function.LongToIntFunction): RichLongToIntFunctionAsFunction1 =
|
||||
new RichLongToIntFunctionAsFunction1(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromLongUnaryOperator(
|
||||
jf: java.util.function.LongUnaryOperator): RichLongUnaryOperatorAsFunction1 =
|
||||
new RichLongUnaryOperatorAsFunction1(jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromObjDoubleConsumer[T](
|
||||
jf: java.util.function.ObjDoubleConsumer[T]): RichObjDoubleConsumerAsFunction2[T] =
|
||||
new RichObjDoubleConsumerAsFunction2[T](jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromObjIntConsumer[T](
|
||||
jf: java.util.function.ObjIntConsumer[T]): RichObjIntConsumerAsFunction2[T] =
|
||||
new RichObjIntConsumerAsFunction2[T](jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromObjLongConsumer[T](
|
||||
jf: java.util.function.ObjLongConsumer[T]): RichObjLongConsumerAsFunction2[T] =
|
||||
new RichObjLongConsumerAsFunction2[T](jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromPredicate[T](jf: java.util.function.Predicate[T]): RichPredicateAsFunction1[T] =
|
||||
new RichPredicateAsFunction1[T](jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromSupplier[T](jf: java.util.function.Supplier[T]): RichSupplierAsFunction0[T] =
|
||||
new RichSupplierAsFunction0[T](jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromToDoubleBiFunction[T, U](
|
||||
jf: java.util.function.ToDoubleBiFunction[T, U]): RichToDoubleBiFunctionAsFunction2[T, U] =
|
||||
new RichToDoubleBiFunctionAsFunction2[T, U](jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromToDoubleFunction[T](
|
||||
jf: java.util.function.ToDoubleFunction[T]): RichToDoubleFunctionAsFunction1[T] =
|
||||
new RichToDoubleFunctionAsFunction1[T](jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromToIntBiFunction[T, U](
|
||||
jf: java.util.function.ToIntBiFunction[T, U]): RichToIntBiFunctionAsFunction2[T, U] =
|
||||
new RichToIntBiFunctionAsFunction2[T, U](jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromToIntFunction[T](
|
||||
jf: java.util.function.ToIntFunction[T]): RichToIntFunctionAsFunction1[T] =
|
||||
new RichToIntFunctionAsFunction1[T](jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromToLongBiFunction[T, U](
|
||||
jf: java.util.function.ToLongBiFunction[T, U]): RichToLongBiFunctionAsFunction2[T, U] =
|
||||
new RichToLongBiFunctionAsFunction2[T, U](jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromToLongFunction[T](
|
||||
jf: java.util.function.ToLongFunction[T]): RichToLongFunctionAsFunction1[T] =
|
||||
new RichToLongFunctionAsFunction1[T](jf)
|
||||
|
||||
@inline implicit def enrichAsScalaFromUnaryOperator[T](
|
||||
jf: java.util.function.UnaryOperator[T]): RichUnaryOperatorAsFunction1[T] =
|
||||
new RichUnaryOperatorAsFunction1[T](jf)
|
||||
|
||||
}
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.pekko.util
|
||||
|
||||
import org.apache.pekko.annotation.InternalStableApi
|
||||
|
||||
import java.util.concurrent.CompletionStage
|
||||
import scala.concurrent.Future
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
*
|
||||
* Remove this once Scala 2.12 support is dropped since all methods are in Scala 2.13+ stdlib
|
||||
*/
|
||||
@InternalStableApi
|
||||
private[pekko] object FutureConverters {
|
||||
@inline final def asJava[T](f: Future[T]): CompletionStage[T] = scala.compat.java8.FutureConverters.toJava(f)
|
||||
|
||||
implicit final class FutureOps[T](private val f: Future[T]) extends AnyVal {
|
||||
@inline def asJava: CompletionStage[T] = FutureConverters.asJava(f)
|
||||
}
|
||||
|
||||
@inline final def asScala[T](cs: CompletionStage[T]): Future[T] = scala.compat.java8.FutureConverters.toScala(cs)
|
||||
|
||||
implicit final class CompletionStageOps[T](private val cs: CompletionStage[T]) extends AnyVal {
|
||||
@inline def asScala: Future[T] = FutureConverters.asScala(cs)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,91 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.pekko.util
|
||||
|
||||
import org.apache.pekko.annotation.InternalStableApi
|
||||
|
||||
import java.util._
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
*
|
||||
* Remove this once Scala 2.12 support is dropped since all methods are in Scala 2.13+ stdlib
|
||||
*/
|
||||
@InternalStableApi
|
||||
private[pekko] object OptionConverters {
|
||||
import scala.compat.java8.OptionConverters.SpecializerOfOptions
|
||||
import scala.compat.java8.OptionConverters._
|
||||
|
||||
@inline final def toScala[A](o: Optional[A]): Option[A] = scala.compat.java8.OptionConverters.toScala(o)
|
||||
|
||||
// The rest of the .toScala methods that work with OptionalDouble/OptionalInt/OptionalLong have to be manually
|
||||
// redefined because the scala.compat.java8.OptionConverters.toScala variants work with scala.lang primitive types
|
||||
// where as scala.jdk.javaapi.OptionConverters.toScala works with java.lang primitive types. Since the primary
|
||||
// usecase of these functions is for calling within Java code its preferrable to return Java primitives, see
|
||||
// https://github.com/scala/bug/issues/4214
|
||||
def toScala(o: OptionalDouble): Option[java.lang.Double] = if (o.isPresent) Some(o.getAsDouble) else None
|
||||
|
||||
def toScala(o: OptionalInt): Option[java.lang.Integer] = if (o.isPresent) Some(o.getAsInt) else None
|
||||
|
||||
def toScala(o: OptionalLong): Option[java.lang.Long] = if (o.isPresent) Some(o.getAsLong) else None
|
||||
|
||||
@inline final def toJava[A](o: Option[A]): Optional[A] = scala.compat.java8.OptionConverters.toJava(o)
|
||||
|
||||
implicit final class RichOptional[A](private val o: java.util.Optional[A]) extends AnyVal {
|
||||
@inline def toScala: Option[A] = scala.compat.java8.OptionConverters.RichOptionalGeneric(o).asScala
|
||||
|
||||
@inline def toJavaPrimitive[O](implicit specOp: SpecializerOfOptions[A, O]): O =
|
||||
scala.compat.java8.OptionConverters.RichOptionalGeneric(o).asPrimitive
|
||||
}
|
||||
|
||||
implicit final class RichOption[A](private val o: Option[A]) extends AnyVal {
|
||||
@inline def toJava: Optional[A] = scala.compat.java8.OptionConverters.RichOptionForJava8(o).asJava
|
||||
|
||||
@inline def toJavaPrimitive[O](implicit specOp: SpecializerOfOptions[A, O]): O =
|
||||
scala.compat.java8.OptionConverters.RichOptionForJava8(o).asPrimitive
|
||||
}
|
||||
|
||||
implicit class RichOptionalDouble(private val o: OptionalDouble) extends AnyVal {
|
||||
|
||||
/** Convert a Java `OptionalDouble` to a Scala `Option` */
|
||||
@inline def toScala: Option[Double] = scala.compat.java8.OptionConverters.RichOptionalDouble(o).asScala
|
||||
|
||||
/** Convert a Java `OptionalDouble` to a generic Java `Optional` */
|
||||
@inline def toJavaGeneric: Optional[Double] = scala.compat.java8.OptionConverters.RichOptionalDouble(o).asGeneric
|
||||
}
|
||||
|
||||
/** Provides conversions from `OptionalInt` to Scala `Option` and the generic `Optional` */
|
||||
implicit class RichOptionalInt(private val o: OptionalInt) extends AnyVal {
|
||||
|
||||
/** Convert a Java `OptionalInt` to a Scala `Option` */
|
||||
@inline def toScala: Option[Int] = scala.compat.java8.OptionConverters.RichOptionalInt(o).asScala
|
||||
|
||||
/** Convert a Java `OptionalInt` to a generic Java `Optional` */
|
||||
@inline def toJavaGeneric: Optional[Int] = scala.compat.java8.OptionConverters.RichOptionalInt(o).asGeneric
|
||||
}
|
||||
|
||||
/** Provides conversions from `OptionalLong` to Scala `Option` and the generic `Optional` */
|
||||
implicit class RichOptionalLong(private val o: OptionalLong) extends AnyVal {
|
||||
|
||||
/** Convert a Java `OptionalLong` to a Scala `Option` */
|
||||
@inline def toScala: Option[Long] = scala.compat.java8.OptionConverters.RichOptionalLong(o).asScala
|
||||
|
||||
/** Convert a Java `OptionalLong` to a generic Java `Optional` */
|
||||
@inline def toJavaGeneric: Optional[Long] = scala.compat.java8.OptionConverters.RichOptionalLong(o).asGeneric
|
||||
}
|
||||
}
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* license agreements; and to You under the Apache License, version 2.0:
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* This file is part of the Apache Pekko project, which was derived from Akka.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Copyright (C) 2018-2022 Lightbend Inc. <https://www.lightbend.com>
|
||||
*/
|
||||
|
||||
package org.apache.pekko.util.ccompat
|
||||
|
||||
import scala.collection.generic.CanBuildFrom
|
||||
import scala.collection.mutable.Builder
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
*
|
||||
* Based on https://github.com/scala/scala-collection-compat/blob/master/compat/src/main/scala-2.11_2.12/scala/collection/compat/CompatImpl.scala
|
||||
* but reproduced here so we don't need to add a dependency on this library. It contains much more than we need right now, and is
|
||||
* not promising binary compatibility yet at the time of writing.
|
||||
*/
|
||||
private[ccompat] object CompatImpl {
|
||||
def simpleCBF[A, C](f: => Builder[A, C]): CanBuildFrom[Any, A, C] = new CanBuildFrom[Any, A, C] {
|
||||
def apply(from: Any): Builder[A, C] = apply()
|
||||
def apply(): Builder[A, C] = f
|
||||
}
|
||||
}
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* license agreements; and to You under the Apache License, version 2.0:
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* This file is part of the Apache Pekko project, which was derived from Akka.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Copyright (C) 2019-2022 Lightbend Inc. <https://www.lightbend.com>
|
||||
*/
|
||||
|
||||
package org.apache.pekko.util.ccompat
|
||||
|
||||
import scala.annotation.Annotation
|
||||
|
||||
import org.apache.pekko.annotation.InternalApi
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
*
|
||||
* Annotation to mark files that need ccompat to be imported for Scala 2.11 and/or 2.12,
|
||||
* but not 2.13. Gets rid of the 'unused import' warning on 2.13.
|
||||
*/
|
||||
@InternalApi
|
||||
private[pekko] class ccompatUsedUntil213 extends Annotation
|
||||
|
|
@ -1,154 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* license agreements; and to You under the Apache License, version 2.0:
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* This file is part of the Apache Pekko project, which was derived from Akka.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Copyright (C) 2018-2022 Lightbend Inc. <https://www.lightbend.com>
|
||||
*/
|
||||
|
||||
/*
|
||||
* Scala (https://www.scala-lang.org)
|
||||
*
|
||||
* Copyright EPFL and Lightbend, Inc.
|
||||
*
|
||||
* Licensed under Apache License 2.0
|
||||
* (http://www.apache.org/licenses/LICENSE-2.0).
|
||||
*
|
||||
* See the NOTICE file distributed with this work for
|
||||
* additional information regarding copyright ownership.
|
||||
*/
|
||||
|
||||
package org.apache.pekko.util
|
||||
|
||||
import scala.{ collection => c }
|
||||
import scala.collection.{ immutable => i, mutable => m, GenTraversable, IterableView }
|
||||
import scala.collection.generic.{ CanBuildFrom, GenericCompanion, Sorted, SortedSetFactory }
|
||||
import scala.language.higherKinds
|
||||
import scala.language.implicitConversions
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
*
|
||||
* Based on https://github.com/scala/scala-collection-compat/blob/master/compat/src/main/scala-2.11_2.12/scala/collection/compat/PackageShared.scala
|
||||
* but reproduced here so we don't need to add a dependency on this library. It contains much more than we need right now, and is
|
||||
* not promising binary compatibility yet at the time of writing.
|
||||
*/
|
||||
package object ccompat {
|
||||
import CompatImpl._
|
||||
|
||||
/**
|
||||
* A factory that builds a collection of type `C` with elements of type `A`.
|
||||
*
|
||||
* @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.)
|
||||
* @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.)
|
||||
*/
|
||||
private[pekko] type Factory[-A, +C] = CanBuildFrom[Nothing, A, C]
|
||||
|
||||
private[pekko] implicit final class FactoryOps[-A, +C](private val factory: Factory[A, C]) {
|
||||
|
||||
/**
|
||||
* @return A collection of type `C` containing the same elements
|
||||
* as the source collection `it`.
|
||||
* @param it Source collection
|
||||
*/
|
||||
def fromSpecific(it: TraversableOnce[A]): C = (factory() ++= it).result()
|
||||
|
||||
/**
|
||||
* Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer.
|
||||
* Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections.
|
||||
*/
|
||||
def newBuilder: m.Builder[A, C] = factory()
|
||||
}
|
||||
|
||||
private[pekko] implicit def genericCompanionToCBF[A, CC[X] <: GenTraversable[X]](
|
||||
fact: GenericCompanion[CC]): CanBuildFrom[Any, A, CC[A]] =
|
||||
simpleCBF(fact.newBuilder[A])
|
||||
|
||||
private[pekko] implicit def sortedSetCompanionToCBF[
|
||||
A: Ordering, CC[X] <: c.SortedSet[X] with c.SortedSetLike[X, CC[X]]](
|
||||
fact: SortedSetFactory[CC]): CanBuildFrom[Any, A, CC[A]] =
|
||||
simpleCBF(fact.newBuilder[A])
|
||||
|
||||
private[ccompat] def build[T, CC](builder: m.Builder[T, CC], source: TraversableOnce[T]): CC = {
|
||||
builder ++= source
|
||||
builder.result()
|
||||
}
|
||||
|
||||
private[pekko] implicit final class ImmutableSortedMapExtensions(private val fact: i.SortedMap.type) extends AnyVal {
|
||||
def from[K: Ordering, V](source: TraversableOnce[(K, V)]): i.SortedMap[K, V] =
|
||||
build(i.SortedMap.newBuilder[K, V], source)
|
||||
}
|
||||
|
||||
private[pekko] implicit final class ImmutableTreeMapExtensions(private val fact: i.TreeMap.type) extends AnyVal {
|
||||
def from[K: Ordering, V](source: TraversableOnce[(K, V)]): i.TreeMap[K, V] =
|
||||
build(i.TreeMap.newBuilder[K, V], source)
|
||||
}
|
||||
|
||||
private[pekko] implicit final class IterableExtensions(private val fact: Iterable.type) extends AnyVal {
|
||||
// derived from https://github.com/scala/scala/blob/0842f23f6017f93160b115b8bf29ec5347cdbe94/src/library/scala/Predef.scala#L356-L361
|
||||
// Apache License 2.0 (see Scala license header at top of this file)
|
||||
def single[A](a: A): Iterable[A] = new Iterable[A] {
|
||||
override def iterator = Iterator.single(a)
|
||||
override def sizeHintIfCheap: Int = 1
|
||||
override def hasDefiniteSize: Boolean = true
|
||||
override def head = a
|
||||
override def headOption = Some(a)
|
||||
override def last = a
|
||||
override def lastOption = Some(a)
|
||||
override def view = new IterableView[A, Iterable[A]] {
|
||||
override def iterator: Iterator[A] = Iterator.single(a)
|
||||
override def sizeHintIfCheap: Int = 1
|
||||
override def hasDefiniteSize: Boolean = true
|
||||
override protected def underlying: Iterable[A] = this
|
||||
}
|
||||
override def take(n: Int) = if (n > 0) this else Iterable.empty
|
||||
override def takeRight(n: Int) = if (n > 0) this else Iterable.empty
|
||||
override def drop(n: Int) = if (n > 0) Iterable.empty else this
|
||||
override def dropRight(n: Int) = if (n > 0) Iterable.empty else this
|
||||
override def tail = Iterable.empty
|
||||
override def init = Iterable.empty
|
||||
}
|
||||
}
|
||||
|
||||
private[pekko] implicit final class SortedExtensionMethods[K, T <: Sorted[K, T]](private val fact: Sorted[K, T]) {
|
||||
def rangeFrom(from: K): T = fact.from(from)
|
||||
def rangeTo(to: K): T = fact.to(to)
|
||||
def rangeUntil(until: K): T = fact.until(until)
|
||||
}
|
||||
|
||||
// This really belongs into scala.collection but there's already a package object
|
||||
// in scala-library so we can't add to it
|
||||
type IterableOnce[+X] = c.TraversableOnce[X]
|
||||
val IterableOnce = c.TraversableOnce
|
||||
|
||||
implicit def toMapViewExtensionMethods[K, V, C <: scala.collection.Map[K, V]](
|
||||
self: IterableView[(K, V), C]): MapViewExtensionMethods[K, V, C] =
|
||||
new MapViewExtensionMethods[K, V, C](self)
|
||||
|
||||
implicit final class ImmutableSortedSetOps[A](val real: i.SortedSet[A]) extends AnyVal {
|
||||
def unsorted: i.Set[A] = real
|
||||
}
|
||||
|
||||
object JavaConverters extends scala.collection.convert.DecorateAsJava with scala.collection.convert.DecorateAsScala
|
||||
|
||||
implicit def toTraversableOnceExtensionMethods[A](self: TraversableOnce[A]): TraversableOnceExtensionMethods[A] =
|
||||
new TraversableOnceExtensionMethods[A](self)
|
||||
}
|
||||
|
||||
final class TraversableOnceExtensionMethods[A](private val self: c.TraversableOnce[A]) extends AnyVal {
|
||||
def iterator: Iterator[A] = self.toIterator
|
||||
}
|
||||
|
||||
final class MapViewExtensionMethods[K, V, C <: scala.collection.Map[K, V]](
|
||||
private val self: IterableView[(K, V), C]) extends AnyVal {
|
||||
def mapValues[W, That](f: V => W)(implicit bf: CanBuildFrom[IterableView[(K, V), C], (K, W), That]): That =
|
||||
self.map[(K, W), That] { case (k, v) => (k, f(v)) }
|
||||
|
||||
def filterKeys(p: K => Boolean): IterableView[(K, V), C] =
|
||||
self.filter { case (k, _) => p(k) }
|
||||
}
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* license agreements; and to You under the Apache License, version 2.0:
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* This file is part of the Apache Pekko project, which was derived from Akka.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Copyright (C) 2019-2022 Lightbend Inc. <https://www.lightbend.com>
|
||||
*/
|
||||
|
||||
package org.apache.pekko.util.ccompat
|
||||
|
||||
import scala.annotation.Annotation
|
||||
|
||||
import org.apache.pekko.annotation.InternalApi
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
*
|
||||
* Annotation to mark files that need ccompat to be imported for Scala 2.11 and/or 2.12,
|
||||
* but not 2.13. Gets rid of the 'unused import' warning on 2.13.
|
||||
*/
|
||||
@InternalApi
|
||||
private[pekko] class ccompatUsedUntil213 extends Annotation
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* license agreements; and to You under the Apache License, version 2.0:
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* This file is part of the Apache Pekko project, which was derived from Akka.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Copyright (C) 2009-2022 Lightbend Inc. <https://www.lightbend.com>
|
||||
*/
|
||||
|
||||
package org.apache.pekko.compat
|
||||
|
||||
import org.apache.pekko.annotation.InternalApi
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
*
|
||||
* Compatibility wrapper for `scala.PartialFunction` to be able to compile the same code
|
||||
* against Scala 2.12, 2.13, 3.0
|
||||
*
|
||||
* Remove these classes as soon as support for Scala 2.12 is dropped!
|
||||
*/
|
||||
@InternalApi private[pekko] object PartialFunction {
|
||||
|
||||
@inline def fromFunction[A, B](f: A => B): scala.PartialFunction[A, B] =
|
||||
scala.PartialFunction.fromFunction(f)
|
||||
|
||||
}
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* license agreements; and to You under the Apache License, version 2.0:
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* This file is part of the Apache Pekko project, which was derived from Akka.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Copyright (C) 2009-2022 Lightbend Inc. <https://www.lightbend.com>
|
||||
*/
|
||||
|
||||
package org.apache.pekko.compat
|
||||
|
||||
import org.apache.pekko.annotation.InternalApi
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
*
|
||||
* Compatibility wrapper for `scala.PartialFunction` to be able to compile the same code
|
||||
* against Scala 2.12, 2.13, 3.0
|
||||
*
|
||||
* Remove these classes as soon as support for Scala 2.12 is dropped!
|
||||
*/
|
||||
@InternalApi private[pekko] object PartialFunction {
|
||||
|
||||
inline def fromFunction[A, B](f: A => B): scala.PartialFunction[A, B] =
|
||||
scala.PartialFunction.fromFunction(f)
|
||||
|
||||
}
|
||||
|
|
@ -180,7 +180,7 @@ abstract class AbstractFSM[S, D] extends FSM[S, D] {
|
|||
* called, not only the first one matching.</b>
|
||||
*/
|
||||
final def onTransition(transitionHandler: Procedure2[S, S]): Unit = {
|
||||
val pf: PartialFunction[(S, S), Unit] = pekko.compat.PartialFunction.fromFunction(transitionHandler(_: S, _: S))
|
||||
val pf: PartialFunction[(S, S), Unit] = PartialFunction.fromFunction(transitionHandler(_: S, _: S))
|
||||
super.onTransition(pf)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -31,7 +31,6 @@ import pekko.dispatch.ExecutionContexts
|
|||
import pekko.pattern.ask
|
||||
import pekko.routing.MurmurHash
|
||||
import pekko.util.{ Helpers, JavaDurationConverters, Timeout }
|
||||
import pekko.util.ccompat._
|
||||
import pekko.util.FutureConverters
|
||||
|
||||
/**
|
||||
|
|
@ -39,7 +38,6 @@ import pekko.util.FutureConverters
|
|||
* allowing for broadcasting of messages to that section.
|
||||
*/
|
||||
@SerialVersionUID(1L)
|
||||
@ccompatUsedUntil213
|
||||
abstract class ActorSelection extends Serializable {
|
||||
this: ScalaActorSelection =>
|
||||
|
||||
|
|
|
|||
|
|
@ -19,7 +19,6 @@ import pekko.event.Logging
|
|||
import pekko.event.Logging.{ Error, LogEvent, LogLevel }
|
||||
import pekko.japi.Util.immutableSeq
|
||||
import pekko.util.JavaDurationConverters._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
import java.lang.reflect.InvocationTargetException
|
||||
import java.lang.{ Iterable => JIterable }
|
||||
|
|
@ -44,7 +43,6 @@ private[pekko] case object ChildNameReserved extends ChildStats
|
|||
* ChildRestartStats is the statistics kept by every parent Actor for every child Actor
|
||||
* and is used for SupervisorStrategies to know how to deal with problems that occur for the children.
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
final case class ChildRestartStats(
|
||||
child: ActorRef,
|
||||
var maxNrOfRetriesCount: Int = 0,
|
||||
|
|
|
|||
|
|
@ -26,7 +26,6 @@ import pekko.annotation.DoNotInherit
|
|||
import pekko.annotation.InternalApi
|
||||
import pekko.event.Logging
|
||||
import pekko.io.dns.DnsProtocol
|
||||
import pekko.util.ccompat._
|
||||
import pekko.util.unused
|
||||
|
||||
/**
|
||||
|
|
@ -34,7 +33,6 @@ import pekko.util.unused
|
|||
*
|
||||
* This used to be a supported extension point but will be removed in future versions of Apache Pekko.
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
@DoNotInherit
|
||||
abstract class Dns {
|
||||
|
||||
|
|
|
|||
|
|
@ -26,7 +26,6 @@ import pekko.actor._
|
|||
import pekko.io.Inet.{ SoJavaFactories, SocketOption }
|
||||
import pekko.util.ByteString
|
||||
import pekko.util.Helpers.Requiring
|
||||
import pekko.util.ccompat._
|
||||
|
||||
/**
|
||||
* UDP Extension for Akka’s IO layer.
|
||||
|
|
@ -40,7 +39,6 @@ import pekko.util.ccompat._
|
|||
*
|
||||
* The Java API for generating UDP commands is available at [[UdpMessage]].
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
object Udp extends ExtensionId[UdpExt] with ExtensionIdProvider {
|
||||
|
||||
override def lookup = Udp
|
||||
|
|
|
|||
|
|
@ -25,7 +25,6 @@ import pekko.actor._
|
|||
import pekko.io.Inet.SocketOption
|
||||
import pekko.io.Udp.UdpSettings
|
||||
import pekko.util.ByteString
|
||||
import pekko.util.ccompat._
|
||||
|
||||
/**
|
||||
* UDP Extension for Akka’s IO layer.
|
||||
|
|
@ -39,7 +38,6 @@ import pekko.util.ccompat._
|
|||
*
|
||||
* The Java API for generating UDP commands is available at [[UdpConnectedMessage]].
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
object UdpConnected extends ExtensionId[UdpConnectedExt] with ExtensionIdProvider {
|
||||
|
||||
override def lookup = UdpConnected
|
||||
|
|
|
|||
|
|
@ -49,12 +49,10 @@ import pekko.io.dns.internal.{ ResolvConf, ResolvConfParser }
|
|||
import pekko.util.Helpers
|
||||
import pekko.util.Helpers.Requiring
|
||||
import pekko.util.JavaDurationConverters._
|
||||
import pekko.util.ccompat._
|
||||
import pekko.util.ccompat.JavaConverters._
|
||||
|
||||
/** INTERNAL API */
|
||||
@InternalApi
|
||||
@ccompatUsedUntil213
|
||||
private[dns] final class DnsSettings(system: ExtendedActorSystem, c: Config) {
|
||||
|
||||
import DnsSettings._
|
||||
|
|
|
|||
|
|
@ -35,8 +35,6 @@ import java.lang.Integer.{ rotateLeft => rotl }
|
|||
|
||||
import scala.annotation.nowarn
|
||||
|
||||
import org.apache.pekko.util.ccompat._
|
||||
|
||||
/**
|
||||
* An object designed to generate well-distributed non-cryptographic
|
||||
* hashes. It is designed to hash a collection of integers; along with
|
||||
|
|
@ -46,7 +44,6 @@ import org.apache.pekko.util.ccompat._
|
|||
* incorporate a new integer) to update the values. Only one method
|
||||
* needs to be called to finalize the hash.
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
object MurmurHash {
|
||||
// Magic values used for MurmurHash's 32 bit hash.
|
||||
// Don't change these without consulting a hashing expert!
|
||||
|
|
|
|||
|
|
@ -30,7 +30,6 @@ import pekko.actor.SupervisorStrategy
|
|||
import pekko.actor.Terminated
|
||||
import pekko.dispatch.Envelope
|
||||
import pekko.dispatch.MessageDispatcher
|
||||
import pekko.util.ccompat._
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
|
|
@ -46,7 +45,6 @@ private[pekko] object RoutedActorCell {
|
|||
/**
|
||||
* INTERNAL API
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
private[pekko] class RoutedActorCell(
|
||||
_system: ActorSystemImpl,
|
||||
_ref: InternalActorRef,
|
||||
|
|
|
|||
|
|
@ -32,9 +32,7 @@ import org.apache.pekko
|
|||
import pekko.actor._
|
||||
import pekko.annotation.InternalApi
|
||||
import pekko.event.{ LogMarker, Logging, LoggingAdapter }
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object Serialization {
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -15,8 +15,6 @@ package org.apache.pekko.util
|
|||
|
||||
import scala.collection.immutable
|
||||
|
||||
import org.apache.pekko.util.ccompat._
|
||||
|
||||
/**
|
||||
* Typeclass which describes a classification hierarchy. Observe the contract between `isEqual` and `isSubclass`!
|
||||
*/
|
||||
|
|
@ -88,7 +86,6 @@ private[pekko] object SubclassifiedIndex {
|
|||
* cache, e.g. HashMap, is faster than tree traversal which must use linear
|
||||
* scan at each level. Therefore, no value traversals are published.
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
private[pekko] class SubclassifiedIndex[K, V] private (protected var values: Set[V])(
|
||||
implicit sc: Subclassification[K]) {
|
||||
|
||||
|
|
|
|||
|
|
@ -27,13 +27,11 @@ import pekko.dispatch.Dispatchers
|
|||
import pekko.protobufv3.internal.MessageLite
|
||||
import pekko.remote.ByteStringUtils
|
||||
import pekko.serialization.{ BaseSerializer, SerializationExtension, SerializerWithStringManifest, Serializers }
|
||||
import pekko.util.ccompat._
|
||||
import pekko.util.ccompat.JavaConverters._
|
||||
|
||||
/**
|
||||
* Protobuf serializer for [[pekko.cluster.metrics.ClusterMetricsMessage]] types.
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
class MessageSerializer(val system: ExtendedActorSystem) extends SerializerWithStringManifest with BaseSerializer {
|
||||
|
||||
private final val BufferSize = 4 * 1024
|
||||
|
|
|
|||
|
|
@ -22,13 +22,11 @@ import pekko.actor._
|
|||
import pekko.cluster.MemberStatus
|
||||
import pekko.cluster.sharding.ShardRegion.{ CurrentShardRegionState, GetShardRegionState, Passivate }
|
||||
import pekko.testkit._
|
||||
import pekko.util.ccompat._
|
||||
import com.typesafe.config.ConfigFactory
|
||||
import org.HdrHistogram.Histogram
|
||||
|
||||
import scala.concurrent.duration._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object ClusterShardingRememberEntitiesPerfSpec {
|
||||
val NrRegions = 6
|
||||
// use 5 for "real" testing
|
||||
|
|
|
|||
|
|
@ -17,7 +17,6 @@ import org.apache.pekko
|
|||
import pekko.actor.testkit.typed.scaladsl.TestProbe
|
||||
import pekko.actor.typed.ActorRef
|
||||
import pekko.actor.typed.Behavior
|
||||
import pekko.util.ccompat._
|
||||
import pekko.actor.typed.scaladsl.Behaviors
|
||||
import pekko.cluster.MemberStatus
|
||||
import pekko.cluster.MemberStatus.Removed
|
||||
|
|
@ -71,7 +70,6 @@ class ClusterShardingPreparingForShutdownMultiJvmNode1 extends ClusterShardingPr
|
|||
class ClusterShardingPreparingForShutdownMultiJvmNode2 extends ClusterShardingPreparingForShutdownSpec
|
||||
class ClusterShardingPreparingForShutdownMultiJvmNode3 extends ClusterShardingPreparingForShutdownSpec
|
||||
|
||||
@ccompatUsedUntil213
|
||||
class ClusterShardingPreparingForShutdownSpec
|
||||
extends MultiNodeSpec(ClusterShardingPreparingForShutdownSpec)
|
||||
with MultiNodeTypedClusterSpec {
|
||||
|
|
|
|||
|
|
@ -42,10 +42,8 @@ import pekko.cluster.sharding.typed.ReplicatedShardingSpec.MyReplicatedIntSet
|
|||
import pekko.cluster.sharding.typed.ReplicatedShardingSpec.MyReplicatedStringSet
|
||||
import pekko.persistence.typed.ReplicationId
|
||||
import com.typesafe.config.Config
|
||||
import pekko.util.ccompat._
|
||||
import org.scalatest.time.Span
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object ReplicatedShardingSpec {
|
||||
def commonConfig = ConfigFactory.parseString("""
|
||||
pekko.loglevel = DEBUG
|
||||
|
|
|
|||
|
|
@ -39,9 +39,7 @@ import pekko.cluster.typed.Leave
|
|||
import pekko.pattern.AskTimeoutException
|
||||
import pekko.serialization.jackson.CborSerializable
|
||||
import pekko.util.Timeout
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object ClusterShardingSpec {
|
||||
val config = ConfigFactory.parseString(s"""
|
||||
pekko.actor.provider = cluster
|
||||
|
|
|
|||
|
|
@ -41,13 +41,11 @@ import pekko.protobufv3.internal.MessageLite
|
|||
import pekko.serialization.BaseSerializer
|
||||
import pekko.serialization.Serialization
|
||||
import pekko.serialization.SerializerWithStringManifest
|
||||
import pekko.util.ccompat._
|
||||
import pekko.util.ccompat.JavaConverters._
|
||||
|
||||
/**
|
||||
* INTERNAL API: Protobuf serializer of ClusterSharding messages.
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
private[pekko] class ClusterShardingMessageSerializer(val system: ExtendedActorSystem)
|
||||
extends SerializerWithStringManifest
|
||||
with BaseSerializer {
|
||||
|
|
|
|||
|
|
@ -23,9 +23,7 @@ import pekko.cluster.MemberStatus
|
|||
import pekko.remote.transport.ThrottlerTransportAdapter.Direction
|
||||
import pekko.serialization.jackson.CborSerializable
|
||||
import pekko.testkit._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object ClusterShardCoordinatorDowning2Spec {
|
||||
case class Ping(id: String) extends CborSerializable
|
||||
|
||||
|
|
|
|||
|
|
@ -23,9 +23,7 @@ import pekko.cluster.MemberStatus
|
|||
import pekko.remote.transport.ThrottlerTransportAdapter.Direction
|
||||
import pekko.serialization.jackson.CborSerializable
|
||||
import pekko.testkit._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object ClusterShardCoordinatorDowningSpec {
|
||||
case class Ping(id: String) extends CborSerializable
|
||||
|
||||
|
|
|
|||
|
|
@ -23,9 +23,7 @@ import pekko.remote.testconductor.RoleName
|
|||
import pekko.remote.transport.ThrottlerTransportAdapter.Direction
|
||||
import pekko.serialization.jackson.CborSerializable
|
||||
import pekko.testkit._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object ClusterShardingFailureSpec {
|
||||
case class Get(id: String) extends CborSerializable
|
||||
case class Add(id: String, i: Int) extends CborSerializable
|
||||
|
|
|
|||
|
|
@ -20,9 +20,7 @@ import pekko.actor.{ Actor, ActorRef, Props }
|
|||
import pekko.cluster.MemberStatus
|
||||
import pekko.serialization.jackson.CborSerializable
|
||||
import pekko.testkit._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object ClusterShardingLeavingSpec {
|
||||
case class Ping(id: String) extends CborSerializable
|
||||
|
||||
|
|
|
|||
|
|
@ -20,9 +20,7 @@ import pekko.cluster.MemberStatus
|
|||
import pekko.cluster.sharding.ShardCoordinator.ShardAllocationStrategy
|
||||
import pekko.cluster.sharding.ShardRegion.{ ClusterShardingStats, GetClusterShardingStats }
|
||||
import pekko.testkit._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
abstract class ClusterShardingMinMembersSpecConfig(mode: String)
|
||||
extends MultiNodeClusterShardingConfig(
|
||||
mode,
|
||||
|
|
|
|||
|
|
@ -22,9 +22,7 @@ import pekko.actor._
|
|||
import pekko.cluster.{ Cluster, MemberStatus }
|
||||
import pekko.testkit._
|
||||
import pekko.remote.testkit.MultiNodeSpec
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object ClusterShardingRememberEntitiesSpec {
|
||||
|
||||
val extractEntityId: ShardRegion.ExtractEntityId = {
|
||||
|
|
|
|||
|
|
@ -24,9 +24,7 @@ import pekko.cluster.sharding.ShardRegion.{ CurrentRegions, GetCurrentRegions }
|
|||
import pekko.remote.testconductor.RoleName
|
||||
import pekko.serialization.jackson.CborSerializable
|
||||
import pekko.testkit._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object MultiDcClusterShardingSpec {
|
||||
sealed trait EntityMsg extends CborSerializable {
|
||||
def id: String
|
||||
|
|
|
|||
|
|
@ -25,11 +25,9 @@ import pekko.persistence.journal.leveldb.{ SharedLeveldbJournal, SharedLeveldbSt
|
|||
import pekko.remote.testconductor.RoleName
|
||||
import pekko.serialization.jackson.CborSerializable
|
||||
import pekko.testkit.{ TestActors, TestProbe }
|
||||
import pekko.util.ccompat._
|
||||
|
||||
import scala.annotation.nowarn
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object MultiNodeClusterShardingSpec {
|
||||
|
||||
object EntityActor {
|
||||
|
|
|
|||
|
|
@ -27,7 +27,6 @@ import pekko.testkit.PekkoSpec
|
|||
import pekko.testkit.TestActors.EchoActor
|
||||
import pekko.testkit.TestProbe
|
||||
import pekko.testkit.WithLogCapturing
|
||||
import pekko.util.ccompat._
|
||||
|
||||
object CoordinatedShutdownShardingSpec {
|
||||
val config =
|
||||
|
|
@ -50,7 +49,6 @@ object CoordinatedShutdownShardingSpec {
|
|||
}
|
||||
}
|
||||
|
||||
@ccompatUsedUntil213
|
||||
class CoordinatedShutdownShardingSpec extends PekkoSpec(CoordinatedShutdownShardingSpec.config) with WithLogCapturing {
|
||||
import CoordinatedShutdownShardingSpec._
|
||||
|
||||
|
|
|
|||
|
|
@ -51,10 +51,8 @@ import pekko.remote.DeadlineFailureDetector
|
|||
import pekko.routing.ConsistentHash
|
||||
import pekko.routing.MurmurHash
|
||||
import pekko.util.MessageBuffer
|
||||
import pekko.util.ccompat._
|
||||
import pekko.util.ccompat.JavaConverters._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
@deprecated(
|
||||
"Use Pekko gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-pekko-grpc",
|
||||
since = "Akka 2.6.0")
|
||||
|
|
|
|||
|
|
@ -28,13 +28,11 @@ import pekko.cluster.pubsub.protobuf.msg.{ DistributedPubSubMessages => dm }
|
|||
import pekko.protobufv3.internal.{ ByteString, MessageLite }
|
||||
import pekko.remote.ByteStringUtils
|
||||
import pekko.serialization._
|
||||
import pekko.util.ccompat._
|
||||
import pekko.util.ccompat.JavaConverters._
|
||||
|
||||
/**
|
||||
* INTERNAL API: Protobuf serializer of DistributedPubSubMediator messages.
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
private[pekko] class DistributedPubSubMessageSerializer(val system: ExtendedActorSystem)
|
||||
extends SerializerWithStringManifest
|
||||
with BaseSerializer {
|
||||
|
|
|
|||
|
|
@ -30,9 +30,7 @@ import pekko.remote.testkit.MultiNodeSpec
|
|||
import pekko.remote.testkit.STMultiNodeSpec
|
||||
import pekko.remote.transport.ThrottlerTransportAdapter
|
||||
import pekko.testkit._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object ClusterSingletonManagerDownedSpec extends MultiNodeConfig {
|
||||
val first = role("first")
|
||||
val second = role("second")
|
||||
|
|
|
|||
|
|
@ -83,8 +83,6 @@ class ClusterSingletonLeavingSpeedSpec
|
|||
|
||||
override def expectedTestDuration: FiniteDuration = 10.minutes
|
||||
|
||||
import pekko.util.ccompat._
|
||||
@ccompatUsedUntil213
|
||||
def join(from: ActorSystem, to: ActorSystem, probe: ActorRef): Unit = {
|
||||
|
||||
from.actorOf(
|
||||
|
|
|
|||
|
|
@ -69,8 +69,6 @@ class ClusterSingletonRestart2Spec
|
|||
ConfigFactory.parseString("pekko.cluster.roles = [other]").withFallback(system.settings.config))
|
||||
var sys4: ActorSystem = null
|
||||
|
||||
import pekko.util.ccompat._
|
||||
@ccompatUsedUntil213
|
||||
def join(from: ActorSystem, to: ActorSystem): Unit = {
|
||||
if (Cluster(from).selfRoles.contains("singleton"))
|
||||
from.actorOf(
|
||||
|
|
|
|||
|
|
@ -48,8 +48,6 @@ class ClusterSingletonRestartSpec
|
|||
val sys2 = ActorSystem(system.name, system.settings.config)
|
||||
var sys3: ActorSystem = null
|
||||
|
||||
import pekko.util.ccompat._
|
||||
@ccompatUsedUntil213
|
||||
def join(from: ActorSystem, to: ActorSystem): Unit = {
|
||||
from.actorOf(
|
||||
ClusterSingletonManager.props(
|
||||
|
|
|
|||
|
|
@ -1,27 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* license agreements; and to You under the Apache License, version 2.0:
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* This file is part of the Apache Pekko project, which was derived from Akka.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Copyright (C) 2021-2022 Lightbend Inc. <https://www.lightbend.com>
|
||||
*/
|
||||
|
||||
package org.apache.pekko.cluster.typed.internal.receptionist
|
||||
|
||||
import org.apache.pekko
|
||||
import pekko.actor.typed.ActorRef
|
||||
import pekko.actor.typed.internal.receptionist.{ AbstractServiceKey, ReceptionistMessages }
|
||||
import pekko.annotation.InternalApi
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
*/
|
||||
@InternalApi
|
||||
private[receptionist] object ClusterReceptionistProtocol {
|
||||
type SubscriptionsKV[K <: AbstractServiceKey] = ActorRef[ReceptionistMessages.Listing[K#Protocol]]
|
||||
}
|
||||
|
|
@ -27,7 +27,6 @@ import pekko.cluster.ClusterSettings.DataCenter
|
|||
import pekko.cluster.MemberStatus._
|
||||
import pekko.dispatch.{ RequiresMessageQueue, UnboundedMessageQueueSemantics }
|
||||
import pekko.event.EventStream
|
||||
import pekko.util.ccompat._
|
||||
import pekko.util.ccompat.JavaConverters._
|
||||
|
||||
/**
|
||||
|
|
@ -411,7 +410,6 @@ object ClusterEvent {
|
|||
* The nodes that have seen current version of the Gossip.
|
||||
*/
|
||||
@InternalApi
|
||||
@ccompatUsedUntil213
|
||||
private[cluster] final case class SeenChanged(convergence: Boolean, seenBy: Set[Address]) extends ClusterDomainEvent
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -32,7 +32,6 @@ import pekko.event.ActorWithLogClass
|
|||
import pekko.event.Logging
|
||||
import pekko.remote.FailureDetectorRegistry
|
||||
import pekko.remote.HeartbeatMessage
|
||||
import pekko.util.ccompat._
|
||||
|
||||
/**
|
||||
* INTERNAL API.
|
||||
|
|
@ -40,7 +39,6 @@ import pekko.util.ccompat._
|
|||
* Receives Heartbeat messages and replies.
|
||||
*/
|
||||
@InternalApi
|
||||
@ccompatUsedUntil213
|
||||
private[cluster] final class ClusterHeartbeatReceiver(getCluster: () => Cluster) extends Actor {
|
||||
import ClusterHeartbeatSender._
|
||||
|
||||
|
|
|
|||
|
|
@ -25,7 +25,6 @@ import pekko.event.ActorWithLogClass
|
|||
import pekko.event.Logging
|
||||
import pekko.remote.FailureDetectorRegistry
|
||||
import pekko.util.ConstantFun
|
||||
import pekko.util.ccompat._
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
|
|
@ -45,7 +44,6 @@ import pekko.util.ccompat._
|
|||
* nodes which aggressively come and go as the traffic in the service changes.
|
||||
*/
|
||||
@InternalApi
|
||||
@ccompatUsedUntil213
|
||||
private[cluster] class CrossDcHeartbeatSender extends Actor {
|
||||
import CrossDcHeartbeatSender._
|
||||
|
||||
|
|
|
|||
|
|
@ -20,7 +20,6 @@ import com.typesafe.config.{ Config, ConfigFactory, ConfigValue }
|
|||
import org.apache.pekko
|
||||
import pekko.actor.ExtendedActorSystem
|
||||
import pekko.annotation.{ DoNotInherit, InternalApi }
|
||||
import pekko.util.ccompat._
|
||||
import pekko.util.ccompat.JavaConverters._
|
||||
|
||||
abstract class JoinConfigCompatChecker {
|
||||
|
|
@ -107,7 +106,6 @@ object JoinConfigCompatChecker {
|
|||
* information that users may have added to their configuration.
|
||||
*/
|
||||
@InternalApi
|
||||
@ccompatUsedUntil213
|
||||
private[cluster] def filterWithKeys(requiredKeys: im.Seq[String], config: Config): Config = {
|
||||
|
||||
val filtered = for {
|
||||
|
|
|
|||
|
|
@ -24,12 +24,10 @@ import org.apache.pekko
|
|||
import pekko.annotation.InternalApi
|
||||
import pekko.cluster.ClusterSettings.DataCenter
|
||||
import pekko.cluster.MemberStatus._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
@InternalApi private[pekko] object MembershipState {
|
||||
import MemberStatus._
|
||||
private val leaderMemberStatus = Set[MemberStatus](Up, Leaving, PreparingForShutdown, ReadyForShutdown)
|
||||
|
|
|
|||
|
|
@ -17,12 +17,10 @@ import scala.collection.immutable
|
|||
|
||||
import org.apache.pekko
|
||||
import pekko.annotation.InternalApi
|
||||
import pekko.util.ccompat._
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
private[cluster] object Reachability {
|
||||
val empty = new Reachability(Vector.empty, Map.empty)
|
||||
|
||||
|
|
|
|||
|
|
@ -31,14 +31,12 @@ import pekko.remote.ByteStringUtils
|
|||
import pekko.routing.Pool
|
||||
import pekko.serialization._
|
||||
import pekko.util.Version
|
||||
import pekko.util.ccompat._
|
||||
import pekko.util.ccompat.JavaConverters._
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
*/
|
||||
@InternalApi
|
||||
@ccompatUsedUntil213
|
||||
private[pekko] object ClusterMessageSerializer {
|
||||
// Kept for one version iteration from Akka 2.6.4 to allow rolling migration to short manifests
|
||||
// can be removed in Akka 2.6.6 or later.
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@ package org.apache.pekko.cluster
|
|||
import org.apache.pekko
|
||||
import pekko.cluster.MemberStatus.Removed
|
||||
import pekko.remote.testkit.MultiNodeConfig
|
||||
import pekko.util.ccompat._
|
||||
import org.scalatest.concurrent.Eventually
|
||||
|
||||
import scala.concurrent.duration._
|
||||
|
|
@ -35,7 +34,6 @@ class ClusterShutdownSpecMultiJvmNode2 extends ClusterShutdownSpec
|
|||
class ClusterShutdownSpecMultiJvmNode3 extends ClusterShutdownSpec
|
||||
class ClusterShutdownSpecMultiJvmNode4 extends ClusterShutdownSpec
|
||||
|
||||
@ccompatUsedUntil213
|
||||
abstract class ClusterShutdownSpec extends MultiNodeClusterSpec(ClusterShutdownSpec) with Eventually {
|
||||
|
||||
import ClusterShutdownSpec._
|
||||
|
|
|
|||
|
|
@ -20,9 +20,7 @@ import pekko.cluster.MemberStatus._
|
|||
import pekko.remote.testconductor.RoleName
|
||||
import pekko.remote.testkit.MultiNodeConfig
|
||||
import pekko.testkit._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object MinMembersBeforeUpMultiJvmSpec extends MultiNodeConfig {
|
||||
val first = role("first")
|
||||
val second = role("second")
|
||||
|
|
|
|||
|
|
@ -26,9 +26,7 @@ import pekko.annotation.InternalApi
|
|||
import pekko.remote.testconductor.RoleName
|
||||
import pekko.remote.testkit.MultiNodeConfig
|
||||
import pekko.testkit._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object MultiDcHeartbeatTakingOverSpecMultiJvmSpec extends MultiNodeConfig {
|
||||
val first = role("first") // alpha
|
||||
val second = role("second") // alpha
|
||||
|
|
|
|||
|
|
@ -35,9 +35,7 @@ import pekko.remote.testkit.{ MultiNodeSpec, STMultiNodeSpec }
|
|||
import pekko.serialization.jackson.CborSerializable
|
||||
import pekko.testkit._
|
||||
import pekko.testkit.TestEvent._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object MultiNodeClusterSpec {
|
||||
|
||||
def clusterConfigWithFailureDetectorPuppet: Config =
|
||||
|
|
|
|||
|
|
@ -16,9 +16,7 @@ package org.apache.pekko.cluster
|
|||
import org.apache.pekko
|
||||
import pekko.remote.testkit.MultiNodeConfig
|
||||
import pekko.testkit._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object NodeMembershipMultiJvmSpec extends MultiNodeConfig {
|
||||
val first = role("first")
|
||||
val second = role("second")
|
||||
|
|
|
|||
|
|
@ -29,9 +29,7 @@ import pekko.cluster.MemberStatus._
|
|||
import pekko.remote.testkit.MultiNodeConfig
|
||||
import pekko.remote.testkit.MultiNodeSpec
|
||||
import pekko.testkit._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object RestartFirstSeedNodeMultiJvmSpec extends MultiNodeConfig {
|
||||
val seed1 = role("seed1")
|
||||
val seed2 = role("seed2")
|
||||
|
|
|
|||
|
|
@ -29,9 +29,7 @@ import pekko.cluster.MemberStatus._
|
|||
import pekko.remote.testkit.MultiNodeConfig
|
||||
import pekko.remote.testkit.MultiNodeSpec
|
||||
import pekko.testkit._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object RestartNode2SpecMultiJvmSpec extends MultiNodeConfig {
|
||||
val seed1 = role("seed1")
|
||||
val seed2 = role("seed2")
|
||||
|
|
|
|||
|
|
@ -30,9 +30,7 @@ import pekko.remote.testkit.MultiNodeConfig
|
|||
import pekko.remote.testkit.MultiNodeSpec
|
||||
import pekko.remote.transport.ThrottlerTransportAdapter.Direction
|
||||
import pekko.testkit._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object RestartNode3MultiJvmSpec extends MultiNodeConfig {
|
||||
val first = role("first")
|
||||
val second = role("second")
|
||||
|
|
|
|||
|
|
@ -34,9 +34,7 @@ import pekko.cluster.MemberStatus._
|
|||
import pekko.remote.testkit.MultiNodeConfig
|
||||
import pekko.remote.testkit.MultiNodeSpec
|
||||
import pekko.testkit._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object RestartNodeMultiJvmSpec extends MultiNodeConfig {
|
||||
val first = role("first")
|
||||
val second = role("second")
|
||||
|
|
|
|||
|
|
@ -29,9 +29,7 @@ import pekko.remote.testconductor.RoleName
|
|||
import pekko.remote.testkit.MultiNodeConfig
|
||||
import pekko.remote.transport.ThrottlerTransportAdapter.Direction
|
||||
import pekko.testkit._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object UnreachableNodeJoinsAgainMultiNodeConfig extends MultiNodeConfig {
|
||||
val first = role("first")
|
||||
val second = role("second")
|
||||
|
|
|
|||
|
|
@ -21,13 +21,11 @@ import pekko.cluster.UniqueAddress
|
|||
import pekko.cluster.ddata.Key.KeyId
|
||||
import pekko.cluster.ddata.Replicator.Internal.DeltaPropagation
|
||||
import pekko.cluster.ddata.Replicator.Internal.DeltaPropagation.NoDeltaPlaceholder
|
||||
import pekko.util.ccompat._
|
||||
|
||||
/**
|
||||
* INTERNAL API: Used by the Replicator actor.
|
||||
* Extracted to separate trait to make it easy to test.
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
@InternalApi
|
||||
private[pekko] trait DeltaPropagationSelector {
|
||||
|
||||
|
|
|
|||
|
|
@ -69,9 +69,7 @@ import pekko.serialization.SerializationExtension
|
|||
import pekko.util.ByteString
|
||||
import pekko.util.Helpers.toRootLowerCase
|
||||
import pekko.util.JavaDurationConverters._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object ReplicatorSettings {
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -36,10 +36,8 @@ import pekko.serialization.BaseSerializer
|
|||
import pekko.serialization.Serialization
|
||||
import pekko.serialization.SerializerWithStringManifest
|
||||
import pekko.util.ByteString.UTF_8
|
||||
import pekko.util.ccompat._
|
||||
import pekko.util.ccompat.JavaConverters._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
private object ReplicatedDataSerializer {
|
||||
/*
|
||||
* Generic superclass to allow to compare Entry types used in protobuf.
|
||||
|
|
|
|||
|
|
@ -41,13 +41,11 @@ import pekko.serialization.BaseSerializer
|
|||
import pekko.serialization.Serialization
|
||||
import pekko.serialization.SerializerWithStringManifest
|
||||
import pekko.util.{ ByteString => PekkoByteString }
|
||||
import pekko.util.ccompat._
|
||||
import pekko.util.ccompat.JavaConverters._
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
@InternalApi
|
||||
private[pekko] object ReplicatorMessageSerializer {
|
||||
|
||||
|
|
|
|||
|
|
@ -30,13 +30,11 @@ import pekko.protobufv3.internal.ByteString
|
|||
import pekko.protobufv3.internal.MessageLite
|
||||
import pekko.remote.ByteStringUtils
|
||||
import pekko.serialization._
|
||||
import pekko.util.ccompat._
|
||||
import pekko.util.ccompat.JavaConverters._
|
||||
|
||||
/**
|
||||
* Some useful serialization helper methods.
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
trait SerializationSupport {
|
||||
|
||||
private final val BufferSize = 1024 * 4
|
||||
|
|
|
|||
|
|
@ -27,9 +27,7 @@ import pekko.remote.testconductor.RoleName
|
|||
import pekko.remote.testkit.MultiNodeConfig
|
||||
import pekko.remote.testkit.MultiNodeSpec
|
||||
import pekko.testkit._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object DurablePruningSpec extends MultiNodeConfig {
|
||||
val first = role("first")
|
||||
val second = role("second")
|
||||
|
|
|
|||
|
|
@ -149,7 +149,7 @@ If in doubt, you can inspect your configuration objects
|
|||
before or after using them to construct an actor system:
|
||||
|
||||
```
|
||||
Welcome to Scala 2.12 (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0).
|
||||
Welcome to Scala 2.13 (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0).
|
||||
Type in expressions to have them evaluated.
|
||||
Type :help for more information.
|
||||
|
||||
|
|
|
|||
|
|
@ -212,7 +212,7 @@ Copyright (c) 2003-2011, LAMP/EPFL
|
|||
|
||||
pekko-actor contains code from scala-collection-compat in the `org.apache.pekko.util.ccompat` package
|
||||
which has released under an Apache 2.0 license.
|
||||
- actor/src/main/scala-2.12/org/apache/pekko/util/ccompat/package.scala
|
||||
- actor/src/main/scala/org/apache/pekko/util/ccompat/package.scala
|
||||
|
||||
Scala (https://www.scala-lang.org)
|
||||
|
||||
|
|
|
|||
|
|
@ -37,9 +37,7 @@ import pekko.pattern.{ ask, AskTimeoutException }
|
|||
import pekko.remote.testconductor.RemoteConnection.getAddrString
|
||||
import pekko.remote.transport.ThrottlerTransportAdapter.{ Blackhole, SetThrottle, TokenBucket, Unthrottled }
|
||||
import pekko.util.Timeout
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object Player {
|
||||
|
||||
final class Waiter extends Actor with RequiresMessageQueue[UnboundedMessageQueueSemantics] {
|
||||
|
|
|
|||
|
|
@ -34,12 +34,10 @@ import pekko.testkit._
|
|||
import pekko.testkit.TestEvent._
|
||||
import pekko.testkit.TestKit
|
||||
import pekko.util.Timeout
|
||||
import pekko.util.ccompat._
|
||||
|
||||
/**
|
||||
* Configure the role names and participants of the test, including configuration settings.
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
abstract class MultiNodeConfig {
|
||||
|
||||
private var _commonConf: Option[Config] = None
|
||||
|
|
|
|||
|
|
@ -22,9 +22,7 @@ import pekko.actor.Cancellable
|
|||
import pekko.actor.DeadLetterSuppression
|
||||
import pekko.annotation.InternalApi
|
||||
import pekko.persistence.serialization.Message
|
||||
import pekko.util.ccompat._
|
||||
|
||||
@ccompatUsedUntil213
|
||||
object AtLeastOnceDelivery {
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -26,12 +26,10 @@ import com.typesafe.config.Config
|
|||
import org.apache.pekko
|
||||
import pekko.actor.ExtendedActorSystem
|
||||
import pekko.event.{ Logging, LoggingAdapter }
|
||||
import pekko.util.ccompat._
|
||||
|
||||
/**
|
||||
* `EventAdapters` serves as a per-journal collection of bound event adapters.
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
class EventAdapters(
|
||||
map: ConcurrentHashMap[Class[_], EventAdapter],
|
||||
bindings: immutable.Seq[(Class[_], EventAdapter)],
|
||||
|
|
|
|||
|
|
@ -24,13 +24,11 @@ import pekko.persistence._
|
|||
import pekko.persistence.journal.{ AsyncWriteJournal => SAsyncWriteJournal }
|
||||
import pekko.util.ConstantFun.scalaAnyToUnit
|
||||
import pekko.util.FutureConverters._
|
||||
import pekko.util.ccompat._
|
||||
import pekko.util.ccompat.JavaConverters._
|
||||
|
||||
/**
|
||||
* Java API: abstract journal, optimized for asynchronous, non-blocking writes.
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
abstract class AsyncWriteJournal extends AsyncRecovery with SAsyncWriteJournal with AsyncWritePlugin {
|
||||
import SAsyncWriteJournal.successUnit
|
||||
|
||||
|
|
|
|||
|
|
@ -29,7 +29,6 @@ import pekko.persistence.serialization.{ MessageFormats => mf }
|
|||
import pekko.protobufv3.internal.ByteString
|
||||
import pekko.protobufv3.internal.UnsafeByteOperations
|
||||
import pekko.serialization._
|
||||
import pekko.util.ccompat._
|
||||
|
||||
/**
|
||||
* Marker trait for all protobuf-serializable messages in `pekko.persistence`.
|
||||
|
|
@ -39,7 +38,6 @@ trait Message extends Serializable
|
|||
/**
|
||||
* Protobuf serializer for [[pekko.persistence.PersistentRepr]], [[pekko.persistence.AtLeastOnceDelivery]] and [[pekko.persistence.fsm.PersistentFSM.StateChangeEvent]] messages.
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
class MessageSerializer(val system: ExtendedActorSystem) extends BaseSerializer {
|
||||
import PersistentRepr.Undefined
|
||||
|
||||
|
|
|
|||
|
|
@ -31,14 +31,12 @@ import pekko.persistence.serialization._
|
|||
import pekko.persistence.snapshot._
|
||||
import pekko.serialization.SerializationExtension
|
||||
import pekko.util.ByteString.UTF_8
|
||||
import pekko.util.ccompat._
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
*
|
||||
* Local filesystem backed snapshot store.
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
private[persistence] class LocalSnapshotStore(config: Config) extends SnapshotStore with ActorLogging {
|
||||
private val FilenamePattern = """^snapshot-(.+)-(\d+)-(\d+)""".r
|
||||
private val persistenceIdStartIdx = 9 // Persistence ID starts after the "snapshot-" substring
|
||||
|
|
|
|||
|
|
@ -43,10 +43,9 @@ object Dependencies {
|
|||
val jacksonCoreVersion = "2.20.0"
|
||||
val jacksonDatabindVersion = jacksonCoreVersion
|
||||
|
||||
val scala212Version = "2.12.20"
|
||||
val scala213Version = "2.13.16"
|
||||
val scala3Version = "3.3.6"
|
||||
val allScalaVersions = Seq(scala213Version, scala212Version, scala3Version)
|
||||
val allScalaVersions = Seq(scala213Version, scala3Version)
|
||||
|
||||
val reactiveStreamsVersion = "1.0.4"
|
||||
|
||||
|
|
|
|||
|
|
@ -30,7 +30,6 @@ import com.typesafe.config.Config
|
|||
import org.apache.pekko
|
||||
import pekko.Done
|
||||
import pekko.actor._
|
||||
import pekko.actor.ActorInitializationException
|
||||
import pekko.actor.SupervisorStrategy._
|
||||
import pekko.annotation.InternalStableApi
|
||||
import pekko.dispatch.{ RequiresMessageQueue, UnboundedMessageQueueSemantics }
|
||||
|
|
@ -44,7 +43,6 @@ import pekko.remote.transport.PekkoPduCodec.Message
|
|||
import pekko.remote.transport.Transport.{ ActorAssociationEventListener, AssociationEventListener, InboundAssociation }
|
||||
import pekko.util.ByteString.UTF_8
|
||||
import pekko.util.OptionVal
|
||||
import pekko.util.ccompat._
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
|
|
@ -145,7 +143,6 @@ private[remote] object Remoting {
|
|||
* INTERNAL API
|
||||
*/
|
||||
@nowarn("msg=deprecated")
|
||||
@ccompatUsedUntil213
|
||||
private[remote] class Remoting(_system: ExtendedActorSystem, _provider: RemoteActorRefProvider)
|
||||
extends RemoteTransport(_system, _provider) {
|
||||
|
||||
|
|
|
|||
|
|
@ -68,7 +68,6 @@ import pekko.stream.scaladsl.Source
|
|||
import pekko.util.OptionVal
|
||||
import pekko.util.PrettyDuration._
|
||||
import pekko.util.WildcardIndex
|
||||
import pekko.util.ccompat._
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
|
|
@ -139,7 +138,6 @@ private[remote] object Association {
|
|||
* Thread-safe, mutable holder for association state. Main entry point for remote destined message to a specific
|
||||
* remote address.
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
private[remote] class Association(
|
||||
val transport: ArteryTransport,
|
||||
val materializer: Materializer,
|
||||
|
|
|
|||
|
|
@ -29,7 +29,6 @@ import pekko.annotation.InternalStableApi
|
|||
import pekko.event.Logging
|
||||
import pekko.event.LoggingAdapter
|
||||
import pekko.remote.RemoteActorRefProvider
|
||||
import pekko.util.ccompat._
|
||||
import pekko.util.OptionVal
|
||||
import pekko.util.unused
|
||||
|
||||
|
|
@ -45,7 +44,6 @@ import pekko.util.unused
|
|||
* will be created for each encoder and decoder. It's only called from the operator, so if it doesn't
|
||||
* delegate to any shared instance it doesn't have to be thread-safe.
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
abstract class RemoteInstrument {
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -57,12 +57,10 @@ import pekko.stream.scaladsl.Flow
|
|||
import pekko.stream.scaladsl.Keep
|
||||
import pekko.stream.scaladsl.Sink
|
||||
import pekko.stream.scaladsl.Source
|
||||
import pekko.util.ccompat._
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
private[remote] class ArteryAeronUdpTransport(_system: ExtendedActorSystem, _provider: RemoteActorRefProvider)
|
||||
extends ArteryTransport(_system, _provider) {
|
||||
import AeronSource.AeronLifecycle
|
||||
|
|
|
|||
|
|
@ -59,7 +59,6 @@ import pekko.stream.scaladsl.Source
|
|||
import pekko.stream.scaladsl.Tcp
|
||||
import pekko.stream.scaladsl.Tcp.ServerBinding
|
||||
import pekko.util.{ ByteString, OptionVal }
|
||||
import pekko.util.ccompat._
|
||||
|
||||
/**
|
||||
* INTERNAL API
|
||||
|
|
@ -77,7 +76,6 @@ private[remote] object ArteryTcpTransport {
|
|||
/**
|
||||
* INTERNAL API
|
||||
*/
|
||||
@ccompatUsedUntil213
|
||||
private[remote] class ArteryTcpTransport(
|
||||
_system: ExtendedActorSystem,
|
||||
_provider: RemoteActorRefProvider,
|
||||
|
|
|
|||
|
|
@ -17,11 +17,9 @@ package tcp
|
|||
import org.apache.pekko
|
||||
import pekko.actor.ExtendedActorSystem
|
||||
import pekko.actor.setup.Setup
|
||||
import pekko.util.ccompat._
|
||||
import javax.net.ssl.SSLEngine
|
||||
import javax.net.ssl.SSLSession
|
||||
|
||||
@ccompatUsedUntil213
|
||||
trait SSLEngineProvider {
|
||||
|
||||
def createServerSSLEngine(hostname: String, port: Int): SSLEngine
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue