Add FunctionConverters, FutureConverters and ObjectConverters

This commit is contained in:
Matthew de Detrich 2023-04-04 16:22:58 +02:00 committed by Matthew de Detrich
parent dfa0b0ded1
commit 07df607192
70 changed files with 1041 additions and 339 deletions

View file

@ -15,13 +15,12 @@ package org.apache.pekko.actor.testkit.typed
import java.util.Optional
import scala.compat.java8.OptionConverters._
import org.slf4j.Marker
import org.slf4j.event.Level
import org.apache.pekko
import pekko.annotation.InternalApi
import pekko.util.OptionConverters._
import pekko.util.OptionVal
/**
@ -40,7 +39,7 @@ final case class CapturedLogEvent(level: Level, message: String, cause: Option[T
errorCause: Optional[Throwable],
marker: Optional[Marker],
mdc: java.util.Map[String, Any]) =
this(level, message, errorCause.asScala, marker.asScala)
this(level, message, errorCause.toScala, marker.toScala)
/**
* Constructor for Java API
@ -66,9 +65,9 @@ final case class CapturedLogEvent(level: Level, message: String, cause: Option[T
def this(level: Level, message: String, errorCause: Throwable, marker: Marker) =
this(level, message, Some(errorCause), Some(marker))
def getErrorCause: Optional[Throwable] = cause.asJava
def getErrorCause: Optional[Throwable] = cause.toJava
def getMarker: Optional[Marker] = marker.asJava
def getMarker: Optional[Marker] = marker.toJava
}
object CapturedLogEvent {

View file

@ -13,12 +13,12 @@
package org.apache.pekko.actor.testkit.typed
import scala.compat.java8.FunctionConverters._
import scala.concurrent.duration.FiniteDuration
import org.apache.pekko
import pekko.actor.typed.{ ActorRef, Behavior, Props }
import pekko.annotation.{ DoNotInherit, InternalApi }
import pekko.util.FunctionConverters._
import pekko.util.JavaDurationConverters._
import pekko.util.unused

View file

@ -15,13 +15,12 @@ package org.apache.pekko.actor.testkit.typed
import java.util.Optional
import scala.compat.java8.OptionConverters._
import org.slf4j.Marker
import org.slf4j.event.Level
import org.apache.pekko
import pekko.util.ccompat.JavaConverters._
import pekko.util.OptionConverters._
object LoggingEvent {
@ -49,7 +48,7 @@ object LoggingEvent {
marker: Optional[Marker],
throwable: Optional[Throwable],
mdc: java.util.Map[String, String]) =
apply(level, loggerName, threadName, message, timeStamp, marker.asScala, throwable.asScala, mdc.asScala.toMap)
apply(level, loggerName, threadName, message, timeStamp, marker.toScala, throwable.toScala, mdc.asScala.toMap)
}
/**
@ -70,13 +69,13 @@ final case class LoggingEvent(
* Java API
*/
def getMarker: Optional[Marker] =
marker.asJava
marker.toJava
/**
* Java API
*/
def getThrowable: Optional[Throwable] =
throwable.asJava
throwable.toJava
/**
* Java API

View file

@ -14,7 +14,6 @@
package org.apache.pekko.actor.testkit.typed.internal
import java.util.concurrent.{ CompletionStage, ThreadFactory }
import scala.compat.java8.FutureConverters
import scala.concurrent._
import scala.annotation.nowarn
import com.typesafe.config.{ Config, ConfigFactory }
@ -38,6 +37,7 @@ import pekko.actor.typed.internal.ActorRefImpl
import pekko.actor.typed.internal.InternalRecipientRef
import pekko.actor.typed.receptionist.Receptionist
import pekko.annotation.InternalApi
import pekko.util.FutureConverters
/**
* INTERNAL API
@ -108,7 +108,7 @@ import pekko.annotation.InternalApi
private val terminationPromise = Promise[Done]()
override def terminate(): Unit = terminationPromise.trySuccess(Done)
override def whenTerminated: Future[Done] = terminationPromise.future
override def getWhenTerminated: CompletionStage[Done] = FutureConverters.toJava(whenTerminated)
override def getWhenTerminated: CompletionStage[Done] = FutureConverters.asJava(whenTerminated)
override val startTime: Long = System.currentTimeMillis()
override def uptime: Long = System.currentTimeMillis() - startTime
override def threadFactory: java.util.concurrent.ThreadFactory = new ThreadFactory {

View file

@ -16,11 +16,11 @@ package org.apache.pekko.pattern;
import org.apache.pekko.actor.*;
import org.apache.pekko.testkit.PekkoJUnitActorSystemResource;
import org.apache.pekko.testkit.PekkoSpec;
import org.apache.pekko.util.FutureConverters;
import org.apache.pekko.util.JavaDurationConverters;
import org.junit.ClassRule;
import org.junit.Test;
import org.scalatestplus.junit.JUnitSuite;
import scala.compat.java8.FutureConverters;
import scala.concurrent.Await;
import java.util.Optional;
@ -53,7 +53,7 @@ public class CircuitBreakerTest extends JUnitSuite {
assertEquals(
"hello",
Await.result(
FutureConverters.toScala(res), JavaDurationConverters.asFiniteDuration(fiveSeconds)));
FutureConverters.asScala(res), JavaDurationConverters.asFiniteDuration(fiveSeconds)));
}
@Test
@ -73,7 +73,7 @@ public class CircuitBreakerTest extends JUnitSuite {
assertEquals(
"hello",
Await.result(
FutureConverters.toScala(res), JavaDurationConverters.asFiniteDuration(fiveSeconds)));
FutureConverters.asScala(res), JavaDurationConverters.asFiniteDuration(fiveSeconds)));
assertEquals(1, breaker.currentFailureCount());
}
}

View file

@ -16,7 +16,6 @@ package org.apache.pekko.actor.typed.delivery
import java.time.{ Duration => JavaDuration }
import java.util.Optional
import scala.compat.java8.OptionConverters._
import scala.concurrent.duration._
import scala.reflect.ClassTag
@ -34,6 +33,7 @@ import pekko.annotation.InternalApi
import pekko.util.Helpers.toRootLowerCase
import pekko.util.Helpers.Requiring
import pekko.util.JavaDurationConverters._
import pekko.util.OptionConverters._
/**
* Point-to-point reliable delivery between a single producer actor sending messages and a single consumer
@ -294,7 +294,7 @@ object ProducerController {
messageClass: Class[A],
producerId: String,
durableQueueBehavior: Optional[Behavior[DurableProducerQueue.Command[A]]]): Behavior[Command[A]] = {
apply(producerId, durableQueueBehavior.asScala)(ClassTag(messageClass))
apply(producerId, durableQueueBehavior.toScala)(ClassTag(messageClass))
}
/**
@ -305,7 +305,7 @@ object ProducerController {
producerId: String,
durableQueueBehavior: Optional[Behavior[DurableProducerQueue.Command[A]]],
settings: Settings): Behavior[Command[A]] = {
apply(producerId, durableQueueBehavior.asScala, settings)(ClassTag(messageClass))
apply(producerId, durableQueueBehavior.toScala, settings)(ClassTag(messageClass))
}
}

View file

@ -15,7 +15,6 @@ package org.apache.pekko.actor.typed.delivery
import java.util.Optional
import scala.compat.java8.OptionConverters._
import scala.concurrent.duration.FiniteDuration
import scala.reflect.ClassTag
@ -31,6 +30,7 @@ import pekko.actor.typed.receptionist.ServiceKey
import pekko.actor.typed.scaladsl.Behaviors
import pekko.annotation.ApiMayChange
import pekko.util.JavaDurationConverters._
import pekko.util.OptionConverters._
/**
* Work pulling is a pattern where several worker actors pull tasks in their own pace from
@ -241,7 +241,7 @@ object WorkPullingProducerController {
producerId: String,
workerServiceKey: ServiceKey[ConsumerController.Command[A]],
durableQueueBehavior: Optional[Behavior[DurableProducerQueue.Command[A]]]): Behavior[Command[A]] = {
apply(producerId, workerServiceKey, durableQueueBehavior.asScala)(ClassTag(messageClass))
apply(producerId, workerServiceKey, durableQueueBehavior.toScala)(ClassTag(messageClass))
}
/**
@ -253,6 +253,6 @@ object WorkPullingProducerController {
workerServiceKey: ServiceKey[ConsumerController.Command[A]],
durableQueueBehavior: Optional[Behavior[DurableProducerQueue.Command[A]]],
settings: Settings): Behavior[Command[A]] = {
apply(producerId, workerServiceKey, durableQueueBehavior.asScala, settings)(ClassTag(messageClass))
apply(producerId, workerServiceKey, durableQueueBehavior.toScala, settings)(ClassTag(messageClass))
}
}

View file

@ -15,7 +15,6 @@ package org.apache.pekko.actor.typed.internal.adapter
import java.util.concurrent.CompletionStage
import scala.compat.java8.FutureConverters
import scala.concurrent.ExecutionContextExecutor
import org.slf4j.{ Logger, LoggerFactory }
@ -43,6 +42,7 @@ import pekko.actor.typed.internal.PropsImpl.DispatcherSameAsParent
import pekko.actor.typed.internal.SystemMessage
import pekko.actor.typed.scaladsl.Behaviors
import pekko.annotation.InternalApi
import pekko.util.FutureConverters
/**
* INTERNAL API. Lightweight wrapper for presenting a classic ActorSystem to a Behavior (via the context).
@ -120,7 +120,7 @@ import pekko.annotation.InternalApi
override lazy val whenTerminated: scala.concurrent.Future[pekko.Done] =
system.whenTerminated.map(_ => Done)(parasitic)
override lazy val getWhenTerminated: CompletionStage[pekko.Done] =
FutureConverters.toJava(whenTerminated)
FutureConverters.asJava(whenTerminated)
override def systemActorOf[U](behavior: Behavior[U], name: String, props: Props): ActorRef[U] = {
val ref = system.systemActorOf(

View file

@ -17,12 +17,12 @@ package javadsl
import java.time.Duration
import java.util.concurrent.CompletionStage
import scala.compat.java8.FutureConverters._
import org.apache.pekko
import pekko.actor.typed.Scheduler
import pekko.actor.typed.scaladsl.AskPattern._
import pekko.japi.function.{ Function => JFunction }
import pekko.pattern.StatusReply
import pekko.util.FutureConverters._
import pekko.util.JavaDurationConverters._
/**
@ -49,7 +49,7 @@ object AskPattern {
messageFactory: JFunction[ActorRef[Res], Req],
timeout: Duration,
scheduler: Scheduler): CompletionStage[Res] =
actor.ask(messageFactory.apply)(timeout.asScala, scheduler).toJava
actor.ask(messageFactory.apply)(timeout.asScala, scheduler).asJava
/**
* The same as [[ask]] but only for requests that result in a response of type [[pekko.pattern.StatusReply]].
@ -62,6 +62,6 @@ object AskPattern {
messageFactory: JFunction[ActorRef[StatusReply[Res]], Req],
timeout: Duration,
scheduler: Scheduler): CompletionStage[Res] =
actor.askWithStatus(messageFactory.apply)(timeout.asScala, scheduler).toJava
actor.askWithStatus(messageFactory.apply)(timeout.asScala, scheduler).asJava
}

View file

@ -0,0 +1,554 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* license agreements; and to You under the Apache License, version 2.0:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* This file is part of the Apache Pekko project, derived from Akka.
*/
package org.apache.pekko.util
import org.apache.pekko.annotation.InternalStableApi
import scala.compat.java8
import scala.language.implicitConversions
import scala.annotation.nowarn
/**
* INTERNAL API
*
* Remove this once Scala 2.12 support is dropped since all methods are in Scala 2.13+ stdlib.
*
* DO NOT edit this file manually, its copied over from scala-java8-compat. More specifically
* scala-java8-compat generates this source from a template, so you have to do +compile in
* scala-java8-compat and check the src_managed folder in target and then add
* `@nowarn("msg=never used")` to functions as neccessary.
*/
@InternalStableApi
private[pekko] object FunctionConverters extends java8.Priority1FunctionConverters {
import java8.functionConverterImpls._
@inline def asScalaFromBiConsumer[T, U](jf: java.util.function.BiConsumer[T, U]): scala.Function2[T, U, Unit] =
new FromJavaBiConsumer[T, U](jf)
@inline def asJavaBiConsumer[T, U](sf: scala.Function2[T, U, Unit]): java.util.function.BiConsumer[T, U] =
new AsJavaBiConsumer[T, U](sf)
@inline def asScalaFromBiFunction[T, U, R](jf: java.util.function.BiFunction[T, U, R]): scala.Function2[T, U, R] =
new FromJavaBiFunction[T, U, R](jf)
@inline def asJavaBiFunction[T, U, R](sf: scala.Function2[T, U, R]): java.util.function.BiFunction[T, U, R] =
new AsJavaBiFunction[T, U, R](sf)
@inline def asScalaFromBiPredicate[T, U](jf: java.util.function.BiPredicate[T, U]): scala.Function2[T, U, Boolean] =
new FromJavaBiPredicate[T, U](jf)
@inline def asJavaBiPredicate[T, U](sf: scala.Function2[T, U, Boolean]): java.util.function.BiPredicate[T, U] =
new AsJavaBiPredicate[T, U](sf)
@inline def asScalaFromBinaryOperator[T](jf: java.util.function.BinaryOperator[T]): scala.Function2[T, T, T] =
new FromJavaBinaryOperator[T](jf)
@inline def asJavaBinaryOperator[T](sf: scala.Function2[T, T, T]): java.util.function.BinaryOperator[T] =
new AsJavaBinaryOperator[T](sf)
@inline def asScalaFromBooleanSupplier(jf: java.util.function.BooleanSupplier): scala.Function0[Boolean] =
new FromJavaBooleanSupplier(jf)
@inline def asJavaBooleanSupplier(sf: scala.Function0[Boolean]): java.util.function.BooleanSupplier =
new AsJavaBooleanSupplier(sf)
@inline def asScalaFromConsumer[T](jf: java.util.function.Consumer[T]): scala.Function1[T, Unit] =
new FromJavaConsumer[T](jf)
@inline def asJavaConsumer[T](sf: scala.Function1[T, Unit]): java.util.function.Consumer[T] =
new AsJavaConsumer[T](sf)
@inline def asScalaFromDoubleBinaryOperator(
jf: java.util.function.DoubleBinaryOperator): scala.Function2[Double, Double, Double] =
new FromJavaDoubleBinaryOperator(jf)
@inline def asJavaDoubleBinaryOperator(
sf: scala.Function2[Double, Double, Double]): java.util.function.DoubleBinaryOperator =
new AsJavaDoubleBinaryOperator(sf)
@inline def asScalaFromDoubleConsumer(jf: java.util.function.DoubleConsumer): scala.Function1[Double, Unit] =
new FromJavaDoubleConsumer(jf)
@inline def asJavaDoubleConsumer(sf: scala.Function1[Double, Unit]): java.util.function.DoubleConsumer =
new AsJavaDoubleConsumer(sf)
@inline def asScalaFromDoubleFunction[R](jf: java.util.function.DoubleFunction[R]): scala.Function1[Double, R] =
new FromJavaDoubleFunction[R](jf)
@inline def asJavaDoubleFunction[R](sf: scala.Function1[Double, R]): java.util.function.DoubleFunction[R] =
new AsJavaDoubleFunction[R](sf)
@inline def asScalaFromDoublePredicate(jf: java.util.function.DoublePredicate): scala.Function1[Double, Boolean] =
new FromJavaDoublePredicate(jf)
@inline def asJavaDoublePredicate(sf: scala.Function1[Double, Boolean]): java.util.function.DoublePredicate =
new AsJavaDoublePredicate(sf)
@inline def asScalaFromDoubleSupplier(jf: java.util.function.DoubleSupplier): scala.Function0[Double] =
new FromJavaDoubleSupplier(jf)
@inline def asJavaDoubleSupplier(sf: scala.Function0[Double]): java.util.function.DoubleSupplier =
new AsJavaDoubleSupplier(sf)
@inline def asScalaFromDoubleToIntFunction(jf: java.util.function.DoubleToIntFunction): scala.Function1[Double, Int] =
new FromJavaDoubleToIntFunction(jf)
@inline def asJavaDoubleToIntFunction(sf: scala.Function1[Double, Int]): java.util.function.DoubleToIntFunction =
new AsJavaDoubleToIntFunction(sf)
@inline def asScalaFromDoubleToLongFunction(
jf: java.util.function.DoubleToLongFunction): scala.Function1[Double, Long] = new FromJavaDoubleToLongFunction(jf)
@inline def asJavaDoubleToLongFunction(sf: scala.Function1[Double, Long]): java.util.function.DoubleToLongFunction =
new AsJavaDoubleToLongFunction(sf)
@inline def asScalaFromDoubleUnaryOperator(
jf: java.util.function.DoubleUnaryOperator): scala.Function1[Double, Double] = new FromJavaDoubleUnaryOperator(jf)
@inline def asJavaDoubleUnaryOperator(sf: scala.Function1[Double, Double]): java.util.function.DoubleUnaryOperator =
new AsJavaDoubleUnaryOperator(sf)
@inline def asScalaFromFunction[T, R](jf: java.util.function.Function[T, R]): scala.Function1[T, R] =
new FromJavaFunction[T, R](jf)
@inline def asJavaFunction[T, R](sf: scala.Function1[T, R]): java.util.function.Function[T, R] =
new AsJavaFunction[T, R](sf)
@inline def asScalaFromIntBinaryOperator(jf: java.util.function.IntBinaryOperator): scala.Function2[Int, Int, Int] =
new FromJavaIntBinaryOperator(jf)
@inline def asJavaIntBinaryOperator(sf: scala.Function2[Int, Int, Int]): java.util.function.IntBinaryOperator =
new AsJavaIntBinaryOperator(sf)
@inline def asScalaFromIntConsumer(jf: java.util.function.IntConsumer): scala.Function1[Int, Unit] =
new FromJavaIntConsumer(jf)
@inline def asJavaIntConsumer(sf: scala.Function1[Int, Unit]): java.util.function.IntConsumer =
new AsJavaIntConsumer(sf)
@inline def asScalaFromIntFunction[R](jf: java.util.function.IntFunction[R]): scala.Function1[Int, R] =
new FromJavaIntFunction[R](jf)
@inline def asJavaIntFunction[R](sf: scala.Function1[Int, R]): java.util.function.IntFunction[R] =
new AsJavaIntFunction[R](sf)
@inline def asScalaFromIntPredicate(jf: java.util.function.IntPredicate): scala.Function1[Int, Boolean] =
new FromJavaIntPredicate(jf)
@inline def asJavaIntPredicate(sf: scala.Function1[Int, Boolean]): java.util.function.IntPredicate =
new AsJavaIntPredicate(sf)
@inline def asScalaFromIntSupplier(jf: java.util.function.IntSupplier): scala.Function0[Int] =
new FromJavaIntSupplier(jf)
@inline def asJavaIntSupplier(sf: scala.Function0[Int]): java.util.function.IntSupplier = new AsJavaIntSupplier(sf)
@inline def asScalaFromIntToDoubleFunction(jf: java.util.function.IntToDoubleFunction): scala.Function1[Int, Double] =
new FromJavaIntToDoubleFunction(jf)
@inline def asJavaIntToDoubleFunction(sf: scala.Function1[Int, Double]): java.util.function.IntToDoubleFunction =
new AsJavaIntToDoubleFunction(sf)
@inline def asScalaFromIntToLongFunction(jf: java.util.function.IntToLongFunction): scala.Function1[Int, Long] =
new FromJavaIntToLongFunction(jf)
@inline def asJavaIntToLongFunction(sf: scala.Function1[Int, Long]): java.util.function.IntToLongFunction =
new AsJavaIntToLongFunction(sf)
@inline def asScalaFromIntUnaryOperator(jf: java.util.function.IntUnaryOperator): scala.Function1[Int, Int] =
new FromJavaIntUnaryOperator(jf)
@inline def asJavaIntUnaryOperator(sf: scala.Function1[Int, Int]): java.util.function.IntUnaryOperator =
new AsJavaIntUnaryOperator(sf)
@inline def asScalaFromLongBinaryOperator(
jf: java.util.function.LongBinaryOperator): scala.Function2[Long, Long, Long] = new FromJavaLongBinaryOperator(jf)
@inline def asJavaLongBinaryOperator(sf: scala.Function2[Long, Long, Long]): java.util.function.LongBinaryOperator =
new AsJavaLongBinaryOperator(sf)
@inline def asScalaFromLongConsumer(jf: java.util.function.LongConsumer): scala.Function1[Long, Unit] =
new FromJavaLongConsumer(jf)
@inline def asJavaLongConsumer(sf: scala.Function1[Long, Unit]): java.util.function.LongConsumer =
new AsJavaLongConsumer(sf)
@inline def asScalaFromLongFunction[R](jf: java.util.function.LongFunction[R]): scala.Function1[Long, R] =
new FromJavaLongFunction[R](jf)
@inline def asJavaLongFunction[R](sf: scala.Function1[Long, R]): java.util.function.LongFunction[R] =
new AsJavaLongFunction[R](sf)
@inline def asScalaFromLongPredicate(jf: java.util.function.LongPredicate): scala.Function1[Long, Boolean] =
new FromJavaLongPredicate(jf)
@inline def asJavaLongPredicate(sf: scala.Function1[Long, Boolean]): java.util.function.LongPredicate =
new AsJavaLongPredicate(sf)
@inline def asScalaFromLongSupplier(jf: java.util.function.LongSupplier): scala.Function0[Long] =
new FromJavaLongSupplier(jf)
@inline def asJavaLongSupplier(sf: scala.Function0[Long]): java.util.function.LongSupplier =
new AsJavaLongSupplier(sf)
@inline def asScalaFromLongToDoubleFunction(
jf: java.util.function.LongToDoubleFunction): scala.Function1[Long, Double] = new FromJavaLongToDoubleFunction(jf)
@inline def asJavaLongToDoubleFunction(sf: scala.Function1[Long, Double]): java.util.function.LongToDoubleFunction =
new AsJavaLongToDoubleFunction(sf)
@inline def asScalaFromLongToIntFunction(jf: java.util.function.LongToIntFunction): scala.Function1[Long, Int] =
new FromJavaLongToIntFunction(jf)
@inline def asJavaLongToIntFunction(sf: scala.Function1[Long, Int]): java.util.function.LongToIntFunction =
new AsJavaLongToIntFunction(sf)
@inline def asScalaFromLongUnaryOperator(jf: java.util.function.LongUnaryOperator): scala.Function1[Long, Long] =
new FromJavaLongUnaryOperator(jf)
@inline def asJavaLongUnaryOperator(sf: scala.Function1[Long, Long]): java.util.function.LongUnaryOperator =
new AsJavaLongUnaryOperator(sf)
@inline def asScalaFromObjDoubleConsumer[T](
jf: java.util.function.ObjDoubleConsumer[T]): scala.Function2[T, Double, Unit] =
new FromJavaObjDoubleConsumer[T](jf)
@inline def asJavaObjDoubleConsumer[T](
sf: scala.Function2[T, Double, Unit]): java.util.function.ObjDoubleConsumer[T] =
new AsJavaObjDoubleConsumer[T](sf)
@inline def asScalaFromObjIntConsumer[T](jf: java.util.function.ObjIntConsumer[T]): scala.Function2[T, Int, Unit] =
new FromJavaObjIntConsumer[T](jf)
@inline def asJavaObjIntConsumer[T](sf: scala.Function2[T, Int, Unit]): java.util.function.ObjIntConsumer[T] =
new AsJavaObjIntConsumer[T](sf)
@inline def asScalaFromObjLongConsumer[T](jf: java.util.function.ObjLongConsumer[T]): scala.Function2[T, Long, Unit] =
new FromJavaObjLongConsumer[T](jf)
@inline def asJavaObjLongConsumer[T](sf: scala.Function2[T, Long, Unit]): java.util.function.ObjLongConsumer[T] =
new AsJavaObjLongConsumer[T](sf)
@inline def asScalaFromPredicate[T](jf: java.util.function.Predicate[T]): scala.Function1[T, Boolean] =
new FromJavaPredicate[T](jf)
@inline def asJavaPredicate[T](sf: scala.Function1[T, Boolean]): java.util.function.Predicate[T] =
new AsJavaPredicate[T](sf)
@inline def asScalaFromSupplier[T](jf: java.util.function.Supplier[T]): scala.Function0[T] =
new FromJavaSupplier[T](jf)
@inline def asJavaSupplier[T](sf: scala.Function0[T]): java.util.function.Supplier[T] = new AsJavaSupplier[T](sf)
@inline def asScalaFromToDoubleBiFunction[T, U](
jf: java.util.function.ToDoubleBiFunction[T, U]): scala.Function2[T, U, Double] =
new FromJavaToDoubleBiFunction[T, U](jf)
@inline def asJavaToDoubleBiFunction[T, U](
sf: scala.Function2[T, U, Double]): java.util.function.ToDoubleBiFunction[T, U] =
new AsJavaToDoubleBiFunction[T, U](sf)
@inline def asScalaFromToDoubleFunction[T](jf: java.util.function.ToDoubleFunction[T]): scala.Function1[T, Double] =
new FromJavaToDoubleFunction[T](jf)
@inline def asJavaToDoubleFunction[T](sf: scala.Function1[T, Double]): java.util.function.ToDoubleFunction[T] =
new AsJavaToDoubleFunction[T](sf)
@inline def asScalaFromToIntBiFunction[T, U](
jf: java.util.function.ToIntBiFunction[T, U]): scala.Function2[T, U, Int] = new FromJavaToIntBiFunction[T, U](jf)
@inline def asJavaToIntBiFunction[T, U](sf: scala.Function2[T, U, Int]): java.util.function.ToIntBiFunction[T, U] =
new AsJavaToIntBiFunction[T, U](sf)
@inline def asScalaFromToIntFunction[T](jf: java.util.function.ToIntFunction[T]): scala.Function1[T, Int] =
new FromJavaToIntFunction[T](jf)
@inline def asJavaToIntFunction[T](sf: scala.Function1[T, Int]): java.util.function.ToIntFunction[T] =
new AsJavaToIntFunction[T](sf)
@inline def asScalaFromToLongBiFunction[T, U](
jf: java.util.function.ToLongBiFunction[T, U]): scala.Function2[T, U, Long] =
new FromJavaToLongBiFunction[T, U](jf)
@inline def asJavaToLongBiFunction[T, U](sf: scala.Function2[T, U, Long]): java.util.function.ToLongBiFunction[T, U] =
new AsJavaToLongBiFunction[T, U](sf)
@inline def asScalaFromToLongFunction[T](jf: java.util.function.ToLongFunction[T]): scala.Function1[T, Long] =
new FromJavaToLongFunction[T](jf)
@inline def asJavaToLongFunction[T](sf: scala.Function1[T, Long]): java.util.function.ToLongFunction[T] =
new AsJavaToLongFunction[T](sf)
@inline def asScalaFromUnaryOperator[T](jf: java.util.function.UnaryOperator[T]): scala.Function1[T, T] =
new FromJavaUnaryOperator[T](jf)
@inline def asJavaUnaryOperator[T](sf: scala.Function1[T, T]): java.util.function.UnaryOperator[T] =
new AsJavaUnaryOperator[T](sf)
@inline implicit def enrichAsJavaBooleanSupplier(sf: scala.Function0[Boolean]): RichFunction0AsBooleanSupplier =
new RichFunction0AsBooleanSupplier(sf)
@nowarn("msg=never used")
@inline implicit def enrichAsJavaDoubleBinaryOperator[A0, A1](sf: scala.Function2[A0, A1, Double])(
implicit evA0: =:=[A0, Double], evA1: =:=[A1, Double]): RichFunction2AsDoubleBinaryOperator =
new RichFunction2AsDoubleBinaryOperator(sf.asInstanceOf[scala.Function2[Double, Double, Double]])
@nowarn("msg=never used")
@inline implicit def enrichAsJavaDoubleConsumer[A0](sf: scala.Function1[A0, Unit])(
implicit evA0: =:=[A0, Double]): RichFunction1AsDoubleConsumer =
new RichFunction1AsDoubleConsumer(sf.asInstanceOf[scala.Function1[Double, Unit]])
@nowarn("msg=never used")
@inline implicit def enrichAsJavaDoublePredicate[A0](sf: scala.Function1[A0, Boolean])(
implicit evA0: =:=[A0, Double]): RichFunction1AsDoublePredicate =
new RichFunction1AsDoublePredicate(sf.asInstanceOf[scala.Function1[Double, Boolean]])
@inline implicit def enrichAsJavaDoubleSupplier(sf: scala.Function0[Double]): RichFunction0AsDoubleSupplier =
new RichFunction0AsDoubleSupplier(sf)
@nowarn("msg=never used")
@inline implicit def enrichAsJavaDoubleToIntFunction[A0](sf: scala.Function1[A0, Int])(
implicit evA0: =:=[A0, Double]): RichFunction1AsDoubleToIntFunction =
new RichFunction1AsDoubleToIntFunction(sf.asInstanceOf[scala.Function1[Double, Int]])
@nowarn("msg=never used")
@inline implicit def enrichAsJavaDoubleToLongFunction[A0](sf: scala.Function1[A0, Long])(
implicit evA0: =:=[A0, Double]): RichFunction1AsDoubleToLongFunction =
new RichFunction1AsDoubleToLongFunction(sf.asInstanceOf[scala.Function1[Double, Long]])
@nowarn("msg=never used")
@inline implicit def enrichAsJavaDoubleUnaryOperator[A0](sf: scala.Function1[A0, Double])(
implicit evA0: =:=[A0, Double]): RichFunction1AsDoubleUnaryOperator =
new RichFunction1AsDoubleUnaryOperator(sf.asInstanceOf[scala.Function1[Double, Double]])
@nowarn("msg=never used")
@inline implicit def enrichAsJavaIntBinaryOperator[A0, A1](sf: scala.Function2[A0, A1, Int])(
implicit evA0: =:=[A0, Int], evA1: =:=[A1, Int]): RichFunction2AsIntBinaryOperator =
new RichFunction2AsIntBinaryOperator(sf.asInstanceOf[scala.Function2[Int, Int, Int]])
@nowarn("msg=never used")
@inline implicit def enrichAsJavaIntConsumer[A0](sf: scala.Function1[A0, Unit])(
implicit evA0: =:=[A0, Int]): RichFunction1AsIntConsumer =
new RichFunction1AsIntConsumer(sf.asInstanceOf[scala.Function1[Int, Unit]])
@nowarn("msg=never used")
@inline implicit def enrichAsJavaIntPredicate[A0](sf: scala.Function1[A0, Boolean])(
implicit evA0: =:=[A0, Int]): RichFunction1AsIntPredicate =
new RichFunction1AsIntPredicate(sf.asInstanceOf[scala.Function1[Int, Boolean]])
@inline implicit def enrichAsJavaIntSupplier(sf: scala.Function0[Int]): RichFunction0AsIntSupplier =
new RichFunction0AsIntSupplier(sf)
@nowarn("msg=never used")
@inline implicit def enrichAsJavaIntToDoubleFunction[A0](sf: scala.Function1[A0, Double])(
implicit evA0: =:=[A0, Int]): RichFunction1AsIntToDoubleFunction =
new RichFunction1AsIntToDoubleFunction(sf.asInstanceOf[scala.Function1[Int, Double]])
@nowarn("msg=never used")
@inline implicit def enrichAsJavaIntToLongFunction[A0](sf: scala.Function1[A0, Long])(
implicit evA0: =:=[A0, Int]): RichFunction1AsIntToLongFunction =
new RichFunction1AsIntToLongFunction(sf.asInstanceOf[scala.Function1[Int, Long]])
@nowarn("msg=never used")
@inline implicit def enrichAsJavaIntUnaryOperator[A0](sf: scala.Function1[A0, Int])(
implicit evA0: =:=[A0, Int]): RichFunction1AsIntUnaryOperator =
new RichFunction1AsIntUnaryOperator(sf.asInstanceOf[scala.Function1[Int, Int]])
@nowarn("msg=never used")
@inline implicit def enrichAsJavaLongBinaryOperator[A0, A1](sf: scala.Function2[A0, A1, Long])(
implicit evA0: =:=[A0, Long], evA1: =:=[A1, Long]): RichFunction2AsLongBinaryOperator =
new RichFunction2AsLongBinaryOperator(sf.asInstanceOf[scala.Function2[Long, Long, Long]])
@nowarn("msg=never used")
@inline implicit def enrichAsJavaLongConsumer[A0](sf: scala.Function1[A0, Unit])(
implicit evA0: =:=[A0, Long]): RichFunction1AsLongConsumer =
new RichFunction1AsLongConsumer(sf.asInstanceOf[scala.Function1[Long, Unit]])
@nowarn("msg=never used")
@inline implicit def enrichAsJavaLongPredicate[A0](sf: scala.Function1[A0, Boolean])(
implicit evA0: =:=[A0, Long]): RichFunction1AsLongPredicate =
new RichFunction1AsLongPredicate(sf.asInstanceOf[scala.Function1[Long, Boolean]])
@inline implicit def enrichAsJavaLongSupplier(sf: scala.Function0[Long]): RichFunction0AsLongSupplier =
new RichFunction0AsLongSupplier(sf)
@nowarn("msg=never used")
@inline implicit def enrichAsJavaLongToDoubleFunction[A0](sf: scala.Function1[A0, Double])(
implicit evA0: =:=[A0, Long]): RichFunction1AsLongToDoubleFunction =
new RichFunction1AsLongToDoubleFunction(sf.asInstanceOf[scala.Function1[Long, Double]])
@nowarn("msg=never used")
@inline implicit def enrichAsJavaLongToIntFunction[A0](sf: scala.Function1[A0, Int])(
implicit evA0: =:=[A0, Long]): RichFunction1AsLongToIntFunction =
new RichFunction1AsLongToIntFunction(sf.asInstanceOf[scala.Function1[Long, Int]])
@nowarn("msg=never used")
@inline implicit def enrichAsJavaLongUnaryOperator[A0](sf: scala.Function1[A0, Long])(
implicit evA0: =:=[A0, Long]): RichFunction1AsLongUnaryOperator =
new RichFunction1AsLongUnaryOperator(sf.asInstanceOf[scala.Function1[Long, Long]])
@inline implicit def enrichAsScalaFromBiConsumer[T, U](
jf: java.util.function.BiConsumer[T, U]): RichBiConsumerAsFunction2[T, U] =
new RichBiConsumerAsFunction2[T, U](jf)
@inline implicit def enrichAsScalaFromBiFunction[T, U, R](
jf: java.util.function.BiFunction[T, U, R]): RichBiFunctionAsFunction2[T, U, R] =
new RichBiFunctionAsFunction2[T, U, R](jf)
@inline implicit def enrichAsScalaFromBiPredicate[T, U](
jf: java.util.function.BiPredicate[T, U]): RichBiPredicateAsFunction2[T, U] =
new RichBiPredicateAsFunction2[T, U](jf)
@inline implicit def enrichAsScalaFromBinaryOperator[T](
jf: java.util.function.BinaryOperator[T]): RichBinaryOperatorAsFunction2[T] =
new RichBinaryOperatorAsFunction2[T](jf)
@inline implicit def enrichAsScalaFromBooleanSupplier(
jf: java.util.function.BooleanSupplier): RichBooleanSupplierAsFunction0 = new RichBooleanSupplierAsFunction0(jf)
@inline implicit def enrichAsScalaFromConsumer[T](jf: java.util.function.Consumer[T]): RichConsumerAsFunction1[T] =
new RichConsumerAsFunction1[T](jf)
@inline implicit def enrichAsScalaFromDoubleBinaryOperator(
jf: java.util.function.DoubleBinaryOperator): RichDoubleBinaryOperatorAsFunction2 =
new RichDoubleBinaryOperatorAsFunction2(jf)
@inline implicit def enrichAsScalaFromDoubleConsumer(
jf: java.util.function.DoubleConsumer): RichDoubleConsumerAsFunction1 = new RichDoubleConsumerAsFunction1(jf)
@inline implicit def enrichAsScalaFromDoubleFunction[R](
jf: java.util.function.DoubleFunction[R]): RichDoubleFunctionAsFunction1[R] =
new RichDoubleFunctionAsFunction1[R](jf)
@inline implicit def enrichAsScalaFromDoublePredicate(
jf: java.util.function.DoublePredicate): RichDoublePredicateAsFunction1 = new RichDoublePredicateAsFunction1(jf)
@inline implicit def enrichAsScalaFromDoubleSupplier(
jf: java.util.function.DoubleSupplier): RichDoubleSupplierAsFunction0 = new RichDoubleSupplierAsFunction0(jf)
@inline implicit def enrichAsScalaFromDoubleToIntFunction(
jf: java.util.function.DoubleToIntFunction): RichDoubleToIntFunctionAsFunction1 =
new RichDoubleToIntFunctionAsFunction1(jf)
@inline implicit def enrichAsScalaFromDoubleToLongFunction(
jf: java.util.function.DoubleToLongFunction): RichDoubleToLongFunctionAsFunction1 =
new RichDoubleToLongFunctionAsFunction1(jf)
@inline implicit def enrichAsScalaFromDoubleUnaryOperator(
jf: java.util.function.DoubleUnaryOperator): RichDoubleUnaryOperatorAsFunction1 =
new RichDoubleUnaryOperatorAsFunction1(jf)
@inline implicit def enrichAsScalaFromFunction[T, R](
jf: java.util.function.Function[T, R]): RichFunctionAsFunction1[T, R] = new RichFunctionAsFunction1[T, R](jf)
@inline implicit def enrichAsScalaFromIntBinaryOperator(
jf: java.util.function.IntBinaryOperator): RichIntBinaryOperatorAsFunction2 =
new RichIntBinaryOperatorAsFunction2(jf)
@inline implicit def enrichAsScalaFromIntConsumer(jf: java.util.function.IntConsumer): RichIntConsumerAsFunction1 =
new RichIntConsumerAsFunction1(jf)
@inline implicit def enrichAsScalaFromIntFunction[R](
jf: java.util.function.IntFunction[R]): RichIntFunctionAsFunction1[R] = new RichIntFunctionAsFunction1[R](jf)
@inline implicit def enrichAsScalaFromIntPredicate(jf: java.util.function.IntPredicate): RichIntPredicateAsFunction1 =
new RichIntPredicateAsFunction1(jf)
@inline implicit def enrichAsScalaFromIntSupplier(jf: java.util.function.IntSupplier): RichIntSupplierAsFunction0 =
new RichIntSupplierAsFunction0(jf)
@inline implicit def enrichAsScalaFromIntToDoubleFunction(
jf: java.util.function.IntToDoubleFunction): RichIntToDoubleFunctionAsFunction1 =
new RichIntToDoubleFunctionAsFunction1(jf)
@inline implicit def enrichAsScalaFromIntToLongFunction(
jf: java.util.function.IntToLongFunction): RichIntToLongFunctionAsFunction1 =
new RichIntToLongFunctionAsFunction1(jf)
@inline implicit def enrichAsScalaFromIntUnaryOperator(
jf: java.util.function.IntUnaryOperator): RichIntUnaryOperatorAsFunction1 =
new RichIntUnaryOperatorAsFunction1(jf)
@inline implicit def enrichAsScalaFromLongBinaryOperator(
jf: java.util.function.LongBinaryOperator): RichLongBinaryOperatorAsFunction2 =
new RichLongBinaryOperatorAsFunction2(jf)
@inline implicit def enrichAsScalaFromLongConsumer(jf: java.util.function.LongConsumer): RichLongConsumerAsFunction1 =
new RichLongConsumerAsFunction1(jf)
@inline implicit def enrichAsScalaFromLongFunction[R](
jf: java.util.function.LongFunction[R]): RichLongFunctionAsFunction1[R] = new RichLongFunctionAsFunction1[R](jf)
@inline implicit def enrichAsScalaFromLongPredicate(
jf: java.util.function.LongPredicate): RichLongPredicateAsFunction1 = new RichLongPredicateAsFunction1(jf)
@inline implicit def enrichAsScalaFromLongSupplier(jf: java.util.function.LongSupplier): RichLongSupplierAsFunction0 =
new RichLongSupplierAsFunction0(jf)
@inline implicit def enrichAsScalaFromLongToDoubleFunction(
jf: java.util.function.LongToDoubleFunction): RichLongToDoubleFunctionAsFunction1 =
new RichLongToDoubleFunctionAsFunction1(jf)
@inline implicit def enrichAsScalaFromLongToIntFunction(
jf: java.util.function.LongToIntFunction): RichLongToIntFunctionAsFunction1 =
new RichLongToIntFunctionAsFunction1(jf)
@inline implicit def enrichAsScalaFromLongUnaryOperator(
jf: java.util.function.LongUnaryOperator): RichLongUnaryOperatorAsFunction1 =
new RichLongUnaryOperatorAsFunction1(jf)
@inline implicit def enrichAsScalaFromObjDoubleConsumer[T](
jf: java.util.function.ObjDoubleConsumer[T]): RichObjDoubleConsumerAsFunction2[T] =
new RichObjDoubleConsumerAsFunction2[T](jf)
@inline implicit def enrichAsScalaFromObjIntConsumer[T](
jf: java.util.function.ObjIntConsumer[T]): RichObjIntConsumerAsFunction2[T] =
new RichObjIntConsumerAsFunction2[T](jf)
@inline implicit def enrichAsScalaFromObjLongConsumer[T](
jf: java.util.function.ObjLongConsumer[T]): RichObjLongConsumerAsFunction2[T] =
new RichObjLongConsumerAsFunction2[T](jf)
@inline implicit def enrichAsScalaFromPredicate[T](jf: java.util.function.Predicate[T]): RichPredicateAsFunction1[T] =
new RichPredicateAsFunction1[T](jf)
@inline implicit def enrichAsScalaFromSupplier[T](jf: java.util.function.Supplier[T]): RichSupplierAsFunction0[T] =
new RichSupplierAsFunction0[T](jf)
@inline implicit def enrichAsScalaFromToDoubleBiFunction[T, U](
jf: java.util.function.ToDoubleBiFunction[T, U]): RichToDoubleBiFunctionAsFunction2[T, U] =
new RichToDoubleBiFunctionAsFunction2[T, U](jf)
@inline implicit def enrichAsScalaFromToDoubleFunction[T](
jf: java.util.function.ToDoubleFunction[T]): RichToDoubleFunctionAsFunction1[T] =
new RichToDoubleFunctionAsFunction1[T](jf)
@inline implicit def enrichAsScalaFromToIntBiFunction[T, U](
jf: java.util.function.ToIntBiFunction[T, U]): RichToIntBiFunctionAsFunction2[T, U] =
new RichToIntBiFunctionAsFunction2[T, U](jf)
@inline implicit def enrichAsScalaFromToIntFunction[T](
jf: java.util.function.ToIntFunction[T]): RichToIntFunctionAsFunction1[T] =
new RichToIntFunctionAsFunction1[T](jf)
@inline implicit def enrichAsScalaFromToLongBiFunction[T, U](
jf: java.util.function.ToLongBiFunction[T, U]): RichToLongBiFunctionAsFunction2[T, U] =
new RichToLongBiFunctionAsFunction2[T, U](jf)
@inline implicit def enrichAsScalaFromToLongFunction[T](
jf: java.util.function.ToLongFunction[T]): RichToLongFunctionAsFunction1[T] =
new RichToLongFunctionAsFunction1[T](jf)
@inline implicit def enrichAsScalaFromUnaryOperator[T](
jf: java.util.function.UnaryOperator[T]): RichUnaryOperatorAsFunction1[T] =
new RichUnaryOperatorAsFunction1[T](jf)
}

View file

@ -0,0 +1,35 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* license agreements; and to You under the Apache License, version 2.0:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* This file is part of the Apache Pekko project, derived from Akka.
*/
package org.apache.pekko.util
import org.apache.pekko.annotation.InternalStableApi
import java.util.concurrent.CompletionStage
import scala.concurrent.Future
/**
* INTERNAL API
*
* Remove this once Scala 2.12 support is dropped since all methods are in Scala 2.13+ stdlib
*/
@InternalStableApi
private[pekko] object FutureConverters {
def asJava[T](f: Future[T]): CompletionStage[T] = scala.compat.java8.FutureConverters.toJava(f)
implicit final class FutureOps[T](private val f: Future[T]) extends AnyVal {
@inline def asJava: CompletionStage[T] = FutureConverters.asJava(f)
}
def asScala[T](cs: CompletionStage[T]): Future[T] = scala.compat.java8.FutureConverters.toScala(cs)
implicit final class CompletionStageOps[T](private val cs: CompletionStage[T]) extends AnyVal {
@inline def asScala: Future[T] = FutureConverters.asScala(cs)
}
}

View file

@ -0,0 +1,30 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* license agreements; and to You under the Apache License, version 2.0:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* This file is part of the Apache Pekko project, derived from Akka.
*/
package org.apache.pekko.util
import org.apache.pekko.annotation.InternalStableApi
import java.util.Optional
/**
* INTERNAL API
*
* Remove this once Scala 2.12 support is dropped since all methods are in Scala 2.13+ stdlib
*/
@InternalStableApi
private[pekko] object OptionConverters {
implicit final class RichOptional[A](private val o: java.util.Optional[A]) extends AnyVal {
@inline def toScala: Option[A] = scala.compat.java8.OptionConverters.RichOptionalGeneric(o).asScala
}
implicit final class RichOption[A](private val o: Option[A]) extends AnyVal {
@inline def toJava: Optional[A] = scala.compat.java8.OptionConverters.RichOptionForJava8(o).asJava
}
}

View file

@ -0,0 +1,22 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* license agreements; and to You under the Apache License, version 2.0:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* This file is part of the Apache Pekko project, derived from Akka.
*/
package org.apache.pekko.util
import org.apache.pekko.annotation.InternalStableApi
import scala.jdk.Priority0FunctionExtensions
/**
* INTERNAL API
*
* Remove this once Scala 2.12 support is dropped since all methods are in Scala 2.13+ stdlib
*/
@InternalStableApi
private[pekko] object FunctionConverters extends Priority0FunctionExtensions

View file

@ -0,0 +1,37 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* license agreements; and to You under the Apache License, version 2.0:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* This file is part of the Apache Pekko project, derived from Akka.
*/
package org.apache.pekko.util
import org.apache.pekko.annotation.InternalStableApi
import java.util.concurrent.CompletionStage
import scala.concurrent.Future
/**
* INTERNAL API
*
* Remove this once Scala 2.12 support is dropped since all methods are in Scala 2.13+ stdlib
*/
@InternalStableApi
private[pekko] object FutureConverters {
import scala.jdk.javaapi
def asJava[T](f: Future[T]): CompletionStage[T] = javaapi.FutureConverters.asJava(f)
implicit final class FutureOps[T](private val f: Future[T]) extends AnyVal {
@inline def asJava: CompletionStage[T] = FutureConverters.asJava(f)
}
def asScala[T](cs: CompletionStage[T]): Future[T] = javaapi.FutureConverters.asScala(cs)
implicit final class CompletionStageOps[T](private val cs: CompletionStage[T]) extends AnyVal {
@inline def asScala: Future[T] = FutureConverters.asScala(cs)
}
}

View file

@ -0,0 +1,30 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* license agreements; and to You under the Apache License, version 2.0:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* This file is part of the Apache Pekko project, derived from Akka.
*/
package org.apache.pekko.util
import org.apache.pekko.annotation.InternalStableApi
import java.util.Optional
/**
* INTERNAL API
*
* Remove this once Scala 2.12 support is dropped since all methods are in Scala 2.13+ stdlib
*/
@InternalStableApi
private[pekko] object OptionConverters {
implicit final class RichOptional[A](private val o: java.util.Optional[A]) extends AnyVal {
@inline def toScala: Option[A] = scala.jdk.OptionConverters.RichOptional(o).toScala
}
implicit final class RichOption[A](private val o: Option[A]) extends AnyVal {
@inline def toJava: Optional[A] = scala.jdk.OptionConverters.RichOption(o).toJava
}
}

View file

@ -284,8 +284,8 @@ abstract class AbstractActor extends Actor {
@throws(classOf[Exception])
@nowarn("msg=deprecated")
override def preRestart(reason: Throwable, message: Option[Any]): Unit = {
import scala.compat.java8.OptionConverters._
preRestart(reason, message.asJava)
import pekko.util.OptionConverters._
preRestart(reason, message.toJava)
}
/**
@ -296,8 +296,8 @@ abstract class AbstractActor extends Actor {
*/
@throws(classOf[Exception])
def preRestart(reason: Throwable, message: Optional[Any]): Unit = {
import scala.compat.java8.OptionConverters._
super.preRestart(reason, message.asScala)
import pekko.util.OptionConverters._
super.preRestart(reason, message.toScala)
}
/**

View file

@ -100,8 +100,8 @@ final case class ActorIdentity(correlationId: Any, ref: Option[ActorRef]) {
* not defined if no actor matched the request.
*/
def getActorRef: Optional[ActorRef] = {
import scala.compat.java8.OptionConverters._
ref.asJava
import pekko.util.OptionConverters._
ref.toJava
}
}

View file

@ -18,7 +18,6 @@ import java.util.regex.Pattern
import scala.annotation.tailrec
import scala.collection.immutable
import scala.compat.java8.FutureConverters
import scala.concurrent.Future
import scala.concurrent.Promise
import scala.concurrent.duration._
@ -33,6 +32,7 @@ import pekko.pattern.ask
import pekko.routing.MurmurHash
import pekko.util.{ Helpers, JavaDurationConverters, Timeout }
import pekko.util.ccompat._
import pekko.util.FutureConverters
/**
* An ActorSelection is a logical view of a section of an ActorSystem's tree of Actors,
@ -109,7 +109,7 @@ abstract class ActorSelection extends Serializable {
*/
@deprecated("Use the overloaded method resolveOne which accepts java.time.Duration instead.", since = "Akka 2.5.20")
def resolveOneCS(timeout: FiniteDuration): CompletionStage[ActorRef] =
FutureConverters.toJava[ActorRef](resolveOne(timeout))
FutureConverters.asJava[ActorRef](resolveOne(timeout))
/**
* Java API for [[#resolveOne]]
@ -134,7 +134,7 @@ abstract class ActorSelection extends Serializable {
*/
def resolveOne(timeout: java.time.Duration): CompletionStage[ActorRef] = {
import JavaDurationConverters._
FutureConverters.toJava[ActorRef](resolveOne(timeout.asScala))
FutureConverters.asJava[ActorRef](resolveOne(timeout.asScala))
}
override def toString: String = {

View file

@ -20,8 +20,6 @@ import java.util.concurrent.atomic.AtomicReference
import scala.annotation.tailrec
import scala.collection.immutable
import scala.compat.java8.FutureConverters
import scala.compat.java8.OptionConverters._
import scala.concurrent.{ ExecutionContext, ExecutionContextExecutor, Future, Promise }
import scala.concurrent.blocking
import scala.concurrent.duration.Duration
@ -42,6 +40,8 @@ import pekko.event.Logging.DefaultLogger
import pekko.japi.Util.immutableSeq
import pekko.serialization.SerializationExtension
import pekko.util._
import pekko.util.FutureConverters._
import pekko.util.OptionConverters._
import pekko.util.Helpers.toRootLowerCase
object BootstrapSetup {
@ -80,7 +80,7 @@ object BootstrapSetup {
classLoader: Optional[ClassLoader],
config: Optional[Config],
defaultExecutionContext: Optional[ExecutionContext]): BootstrapSetup =
apply(classLoader.asScala, config.asScala, defaultExecutionContext.asScala)
apply(classLoader.toScala, config.toScala, defaultExecutionContext.toScala)
/**
* Java API: Short for using custom config but keeping default classloader and default execution context
@ -981,7 +981,7 @@ private[pekko] class ActorSystemImpl(
private[this] final val terminationCallbacks = new TerminationCallbacks(provider.terminationFuture)(dispatcher)
override def whenTerminated: Future[Terminated] = terminationCallbacks.terminationFuture
override def getWhenTerminated: CompletionStage[Terminated] = FutureConverters.toJava(whenTerminated)
override def getWhenTerminated: CompletionStage[Terminated] = whenTerminated.asJava
def lookupRoot: InternalActorRef = provider.rootGuardian
def guardian: LocalActorRef = provider.guardian
def systemGuardian: LocalActorRef = provider.systemGuardian

View file

@ -19,10 +19,10 @@ import java.util.Optional
import scala.annotation.tailrec
import scala.collection.immutable
import scala.compat.java8.OptionConverters._
import org.apache.pekko
import pekko.annotation.InternalApi
import pekko.util.OptionConverters._
/**
* The address specifies the physical location under which an Actor can be
@ -54,12 +54,12 @@ final case class Address private[pekko] (protocol: String, system: String, host:
/**
* Java API: The hostname if specified or empty optional if not
*/
def getHost(): Optional[String] = host.asJava
def getHost(): Optional[String] = host.toJava
/**
* Java API: The port if specified or empty optional if not
*/
def getPort(): Optional[Integer] = port.asJava.asInstanceOf[Optional[Integer]]
def getPort(): Optional[Integer] = port.toJava.asInstanceOf[Optional[Integer]]
/**
* Returns true if this Address is only defined locally. It is not safe to send locally scoped addresses to remote

View file

@ -20,8 +20,6 @@ import java.util.concurrent.atomic.AtomicReference
import java.util.function.Supplier
import scala.annotation.tailrec
import scala.compat.java8.FutureConverters._
import scala.compat.java8.OptionConverters._
import scala.concurrent.{ Await, ExecutionContext, Future, Promise }
import scala.concurrent.duration._
import scala.concurrent.duration.FiniteDuration
@ -37,7 +35,9 @@ import pekko.annotation.InternalApi
import pekko.dispatch.ExecutionContexts
import pekko.event.Logging
import pekko.pattern.after
import pekko.util.OptionConverters._
import pekko.util.OptionVal
import pekko.util.FutureConverters._
object CoordinatedShutdown extends ExtensionId[CoordinatedShutdown] with ExtensionIdProvider {
@ -588,7 +588,7 @@ final class CoordinatedShutdown private[pekko] (
* to a later stage with confidence that they will be run.
*/
def addCancellableTask(phase: String, taskName: String, task: Supplier[CompletionStage[Done]]): Cancellable = {
addCancellableTask(phase, taskName)(() => task.get().toScala)
addCancellableTask(phase, taskName)(() => task.get().asScala)
}
/**
@ -628,7 +628,7 @@ final class CoordinatedShutdown private[pekko] (
* and it will be performed.
*/
def addTask(phase: String, taskName: String, task: Supplier[CompletionStage[Done]]): Unit =
addTask(phase, taskName)(() => task.get().toScala)
addTask(phase, taskName)(() => task.get().asScala)
/**
* Scala API: Add an actor termination task to a phase. It doesn't remove
@ -674,7 +674,7 @@ final class CoordinatedShutdown private[pekko] (
* and it will be performed.
*/
def addActorTerminationTask(phase: String, taskName: String, actor: ActorRef, stopMsg: Optional[Any]): Unit =
addActorTerminationTask(phase, taskName, actor, stopMsg.asScala)
addActorTerminationTask(phase, taskName, actor, stopMsg.toScala)
/**
* The `Reason` for the shutdown as passed to the `run` method. `None` if the shutdown
@ -686,7 +686,7 @@ final class CoordinatedShutdown private[pekko] (
* The `Reason` for the shutdown as passed to the `run` method. `Optional.empty` if the shutdown
* has not been started.
*/
def getShutdownReason(): Optional[Reason] = shutdownReason().asJava
def getShutdownReason(): Optional[Reason] = shutdownReason().toJava
/**
* Scala API: Run tasks of all phases. The returned
@ -707,7 +707,7 @@ final class CoordinatedShutdown private[pekko] (
*
* It's safe to call this method multiple times. It will only run the shutdown sequence once.
*/
def runAll(reason: Reason): CompletionStage[Done] = run(reason).toJava
def runAll(reason: Reason): CompletionStage[Done] = run(reason).asJava
@deprecated("Use the method with `reason` parameter instead", since = "Akka 2.5.8")
def runAll(): CompletionStage[Done] = runAll(UnknownReason)
@ -796,7 +796,7 @@ final class CoordinatedShutdown private[pekko] (
* It's safe to call this method multiple times. It will only run the shutdown sequence once.
*/
def run(reason: Reason, fromPhase: Optional[String]): CompletionStage[Done] =
run(reason, fromPhase.asScala).toJava
run(reason, fromPhase.toScala).asJava
@deprecated("Use the method with `reason` parameter instead", since = "Akka 2.5.8")
def run(fromPhase: Optional[String]): CompletionStage[Done] =

View file

@ -16,11 +16,11 @@ package org.apache.pekko.actor.setup
import java.util.Optional
import scala.annotation.varargs
import scala.compat.java8.OptionConverters._
import scala.reflect.ClassTag
import org.apache.pekko
import pekko.annotation.InternalApi
import pekko.util.OptionConverters._
/**
* Marker supertype for a setup part that can be put inside [[ActorSystemSetup]], if a specific concrete setup
@ -67,7 +67,7 @@ final class ActorSystemSetup private[pekko] (@InternalApi private[pekko] val set
* Java API: Extract a concrete [[Setup]] of type `T` if it is defined in the settings.
*/
def get[T <: Setup](clazz: Class[T]): Optional[T] = {
setups.get(clazz).map(_.asInstanceOf[T]).asJava
setups.get(clazz).map(_.asInstanceOf[T]).toJava
}
/**

View file

@ -19,7 +19,6 @@ import java.util.concurrent.atomic.{ AtomicBoolean, AtomicInteger, AtomicLong }
import java.util.function.BiFunction
import java.util.function.Consumer
import scala.annotation.nowarn
import scala.compat.java8.FutureConverters
import scala.concurrent.{ Await, ExecutionContext, Future, Promise }
import scala.concurrent.TimeoutException
import scala.concurrent.duration._
@ -31,6 +30,7 @@ import pekko.PekkoException
import pekko.actor.{ ExtendedActorSystem, Scheduler }
import pekko.dispatch.ExecutionContexts.parasitic
import pekko.pattern.internal.{ CircuitBreakerNoopTelemetry, CircuitBreakerTelemetry }
import pekko.util.FutureConverters
import pekko.util.JavaDurationConverters._
import pekko.util.Unsafe
@ -403,8 +403,8 @@ class CircuitBreaker(
* `scala.concurrent.TimeoutException` if the call timed out
*/
def callWithCircuitBreakerCS[T](body: Callable[CompletionStage[T]]): CompletionStage[T] =
FutureConverters.toJava[T](callWithCircuitBreaker(new Callable[Future[T]] {
override def call(): Future[T] = FutureConverters.toScala(body.call())
FutureConverters.asJava[T](callWithCircuitBreaker(new Callable[Future[T]] {
override def call(): Future[T] = FutureConverters.asScala(body.call())
}))
/**
@ -418,8 +418,8 @@ class CircuitBreaker(
def callWithCircuitBreakerCS[T](
body: Callable[CompletionStage[T]],
defineFailureFn: BiFunction[Optional[T], Optional[Throwable], java.lang.Boolean]): CompletionStage[T] =
FutureConverters.toJava[T](callWithCircuitBreaker(new Callable[Future[T]] {
override def call(): Future[T] = FutureConverters.toScala(body.call())
FutureConverters.asJava[T](callWithCircuitBreaker(new Callable[Future[T]] {
override def call(): Future[T] = FutureConverters.asScala(body.call())
}, defineFailureFn))
/**

View file

@ -16,11 +16,11 @@ package org.apache.pekko.pattern
import java.util.Optional
import java.util.concurrent.{ Callable, CompletionStage, TimeUnit }
import scala.compat.java8.FutureConverters._
import scala.concurrent.ExecutionContext
import org.apache.pekko
import pekko.actor.{ ActorSelection, ClassicActorSystemProvider, Scheduler }
import pekko.util.FutureConverters._
import pekko.util.JavaDurationConverters._
/**
@ -104,7 +104,7 @@ object Patterns {
* }}}
*/
def ask(actor: ActorRef, message: Any, timeout: java.time.Duration): CompletionStage[AnyRef] =
scalaAsk(actor, message)(timeout.asScala).toJava.asInstanceOf[CompletionStage[AnyRef]]
scalaAsk(actor, message)(timeout.asScala).asJava.asInstanceOf[CompletionStage[AnyRef]]
/**
* Use for messages whose response is known to be a [[pekko.pattern.StatusReply]]. When a [[pekko.pattern.StatusReply#success]] response
@ -112,7 +112,7 @@ object Patterns {
* failed.
*/
def askWithStatus(actor: ActorRef, message: Any, timeout: java.time.Duration): CompletionStage[AnyRef] =
scalaAskWithStatus(actor, message)(timeout.asScala).toJava.asInstanceOf[CompletionStage[AnyRef]]
scalaAskWithStatus(actor, message)(timeout.asScala).asJava.asInstanceOf[CompletionStage[AnyRef]]
/**
* A variation of ask which allows to implement "replyTo" pattern by including
@ -147,7 +147,7 @@ object Patterns {
actor: ActorRef,
messageFactory: japi.function.Function[ActorRef, Any],
timeout: java.time.Duration): CompletionStage[AnyRef] =
extended.ask(actor, messageFactory.apply _)(Timeout.create(timeout)).toJava.asInstanceOf[CompletionStage[AnyRef]]
extended.ask(actor, messageFactory.apply _)(Timeout.create(timeout)).asJava.asInstanceOf[CompletionStage[AnyRef]]
/**
* <i>Java API for `org.apache.pekko.pattern.ask`:</i>
@ -262,7 +262,7 @@ object Patterns {
* }}}
*/
def ask(selection: ActorSelection, message: Any, timeout: java.time.Duration): CompletionStage[AnyRef] =
scalaAsk(selection, message)(timeout.asScala).toJava.asInstanceOf[CompletionStage[AnyRef]]
scalaAsk(selection, message)(timeout.asScala).asJava.asInstanceOf[CompletionStage[AnyRef]]
/**
* <i>Java API for `org.apache.pekko.pattern.ask`:</i>
@ -330,7 +330,7 @@ object Patterns {
selection: ActorSelection,
messageFactory: japi.Function[ActorRef, Any],
timeout: java.time.Duration): CompletionStage[AnyRef] =
extended.ask(selection, messageFactory.apply _)(timeout.asScala).toJava.asInstanceOf[CompletionStage[AnyRef]]
extended.ask(selection, messageFactory.apply _)(timeout.asScala).asJava.asInstanceOf[CompletionStage[AnyRef]]
/**
* Register an onComplete callback on this [[scala.concurrent.Future]] to send
@ -395,7 +395,7 @@ object Patterns {
* is completed with failure [[pekko.pattern.AskTimeoutException]].
*/
def gracefulStop(target: ActorRef, timeout: java.time.Duration): CompletionStage[java.lang.Boolean] =
scalaGracefulStop(target, timeout.asScala).toJava.asInstanceOf[CompletionStage[java.lang.Boolean]]
scalaGracefulStop(target, timeout.asScala).asJava.asInstanceOf[CompletionStage[java.lang.Boolean]]
/**
* Returns a [[scala.concurrent.Future]] that will be completed with success (value `true`) when
@ -430,7 +430,7 @@ object Patterns {
target: ActorRef,
timeout: java.time.Duration,
stopMessage: Any): CompletionStage[java.lang.Boolean] =
scalaGracefulStop(target, timeout.asScala, stopMessage).toJava.asInstanceOf[CompletionStage[java.lang.Boolean]]
scalaGracefulStop(target, timeout.asScala, stopMessage).asJava.asInstanceOf[CompletionStage[java.lang.Boolean]]
/**
* Returns a [[scala.concurrent.Future]] that will be completed with the success or failure of the provided Callable
@ -495,7 +495,7 @@ object Patterns {
*/
def retry[T](attempt: Callable[CompletionStage[T]], attempts: Int, ec: ExecutionContext): CompletionStage[T] = {
require(attempt != null, "Parameter attempt should not be null.")
scalaRetry(() => attempt.call().toScala, attempts)(ec).toJava
scalaRetry(() => attempt.call().asScala, attempts)(ec).asJava
}
/**
@ -557,9 +557,9 @@ object Patterns {
require(attempt != null, "Parameter attempt should not be null.")
require(minBackoff != null, "Parameter minBackoff should not be null.")
require(maxBackoff != null, "Parameter minBackoff should not be null.")
scalaRetry(() => attempt.call().toScala, attempts, minBackoff.asScala, maxBackoff.asScala, randomFactor)(
scalaRetry(() => attempt.call().asScala, attempts, minBackoff.asScala, maxBackoff.asScala, randomFactor)(
ec,
scheduler).toJava
scheduler).asJava
}
/**
@ -613,7 +613,7 @@ object Patterns {
scheduler: Scheduler,
ec: ExecutionContext): CompletionStage[T] = {
require(attempt != null, "Parameter attempt should not be null.")
scalaRetry(() => attempt.call().toScala, attempts, delay.asScala)(ec, scheduler).toJava
scalaRetry(() => attempt.call().asScala, attempts, delay.asScala)(ec, scheduler).asJava
}
/**
@ -635,12 +635,12 @@ object Patterns {
delayFunction: java.util.function.IntFunction[Optional[java.time.Duration]],
scheduler: Scheduler,
context: ExecutionContext): CompletionStage[T] = {
import scala.compat.java8.OptionConverters._
import pekko.util.OptionConverters._
require(attempt != null, "Parameter attempt should not be null.")
scalaRetry(
() => attempt.call().toScala,
() => attempt.call().asScala,
attempts,
attempted => delayFunction.apply(attempted).asScala.map(_.asScala))(context, scheduler).toJava
attempted => delayFunction.apply(attempted).toScala.map(_.asScala))(context, scheduler).asJava
}
}
@ -687,7 +687,7 @@ object PatternsCS {
*/
@deprecated("Use the overloaded one which accepts java.time.Duration instead.", since = "Akka 2.5.15")
def ask(actor: ActorRef, message: Any, timeout: Timeout): CompletionStage[AnyRef] =
scalaAsk(actor, message)(timeout).toJava.asInstanceOf[CompletionStage[AnyRef]]
scalaAsk(actor, message)(timeout).asJava.asInstanceOf[CompletionStage[AnyRef]]
/**
* <i>Java API for `org.apache.pekko.pattern.ask`:</i>
@ -740,7 +740,7 @@ object PatternsCS {
actor: ActorRef,
messageFactory: japi.function.Function[ActorRef, Any],
timeout: Timeout): CompletionStage[AnyRef] =
extended.ask(actor, messageFactory.apply _)(timeout).toJava.asInstanceOf[CompletionStage[AnyRef]]
extended.ask(actor, messageFactory.apply _)(timeout).asJava.asInstanceOf[CompletionStage[AnyRef]]
/**
* A variation of ask which allows to implement "replyTo" pattern by including
@ -762,7 +762,7 @@ object PatternsCS {
actor: ActorRef,
messageFactory: japi.function.Function[ActorRef, Any],
timeout: java.time.Duration): CompletionStage[AnyRef] =
extended.ask(actor, messageFactory.apply _)(Timeout.create(timeout)).toJava.asInstanceOf[CompletionStage[AnyRef]]
extended.ask(actor, messageFactory.apply _)(Timeout.create(timeout)).asJava.asInstanceOf[CompletionStage[AnyRef]]
/**
* <i>Java API for `org.apache.pekko.pattern.ask`:</i>
@ -793,7 +793,7 @@ object PatternsCS {
*/
@deprecated("Use Pattens.ask which accepts java.time.Duration instead.", since = "Akka 2.5.19")
def ask(actor: ActorRef, message: Any, timeoutMillis: Long): CompletionStage[AnyRef] =
scalaAsk(actor, message)(new Timeout(timeoutMillis, TimeUnit.MILLISECONDS)).toJava
scalaAsk(actor, message)(new Timeout(timeoutMillis, TimeUnit.MILLISECONDS)).asJava
.asInstanceOf[CompletionStage[AnyRef]]
/**
@ -847,7 +847,7 @@ object PatternsCS {
*/
@deprecated("Use the overloaded one which accepts java.time.Duration instead.", since = "Akka 2.5.15")
def ask(selection: ActorSelection, message: Any, timeout: Timeout): CompletionStage[AnyRef] =
scalaAsk(selection, message)(timeout).toJava.asInstanceOf[CompletionStage[AnyRef]]
scalaAsk(selection, message)(timeout).asJava.asInstanceOf[CompletionStage[AnyRef]]
/**
* <i>Java API for `org.apache.pekko.pattern.ask`:</i>
@ -909,7 +909,7 @@ object PatternsCS {
*/
@deprecated("Use Pattens.ask which accepts java.time.Duration instead.", since = "Akka 2.5.19")
def ask(selection: ActorSelection, message: Any, timeoutMillis: Long): CompletionStage[AnyRef] =
scalaAsk(selection, message)(new Timeout(timeoutMillis, TimeUnit.MILLISECONDS)).toJava
scalaAsk(selection, message)(new Timeout(timeoutMillis, TimeUnit.MILLISECONDS)).asJava
.asInstanceOf[CompletionStage[AnyRef]]
/**
@ -930,7 +930,7 @@ object PatternsCS {
timeoutMillis: Long): CompletionStage[AnyRef] =
extended
.ask(selection, messageFactory.apply _)(Timeout(timeoutMillis.millis))
.toJava
.asJava
.asInstanceOf[CompletionStage[AnyRef]]
/**
@ -966,7 +966,7 @@ object PatternsCS {
*/
@deprecated("Use the overloaded one which accepts java.time.Duration instead.", since = "Akka 2.5.12")
def gracefulStop(target: ActorRef, timeout: FiniteDuration): CompletionStage[java.lang.Boolean] =
scalaGracefulStop(target, timeout).toJava.asInstanceOf[CompletionStage[java.lang.Boolean]]
scalaGracefulStop(target, timeout).asJava.asInstanceOf[CompletionStage[java.lang.Boolean]]
/**
* Returns a [[java.util.concurrent.CompletionStage]] that will be completed with success (value `true`) when
@ -980,7 +980,7 @@ object PatternsCS {
*/
@deprecated("Use Patterns.gracefulStop instead.", since = "Akka 2.5.19")
def gracefulStop(target: ActorRef, timeout: java.time.Duration): CompletionStage[java.lang.Boolean] =
scalaGracefulStop(target, timeout.asScala).toJava.asInstanceOf[CompletionStage[java.lang.Boolean]]
scalaGracefulStop(target, timeout.asScala).asJava.asInstanceOf[CompletionStage[java.lang.Boolean]]
/**
* Returns a [[java.util.concurrent.CompletionStage]] that will be completed with success (value `true`) when
@ -997,7 +997,7 @@ object PatternsCS {
*/
@deprecated("Use the overloaded one which accepts java.time.Duration instead.", since = "Akka 2.5.12")
def gracefulStop(target: ActorRef, timeout: FiniteDuration, stopMessage: Any): CompletionStage[java.lang.Boolean] =
scalaGracefulStop(target, timeout, stopMessage).toJava.asInstanceOf[CompletionStage[java.lang.Boolean]]
scalaGracefulStop(target, timeout, stopMessage).asJava.asInstanceOf[CompletionStage[java.lang.Boolean]]
/**
* Returns a [[java.util.concurrent.CompletionStage]] that will be completed with success (value `true`) when
@ -1017,7 +1017,7 @@ object PatternsCS {
target: ActorRef,
timeout: java.time.Duration,
stopMessage: Any): CompletionStage[java.lang.Boolean] =
scalaGracefulStop(target, timeout.asScala, stopMessage).toJava.asInstanceOf[CompletionStage[java.lang.Boolean]]
scalaGracefulStop(target, timeout.asScala, stopMessage).asJava.asInstanceOf[CompletionStage[java.lang.Boolean]]
/**
* Returns a [[java.util.concurrent.CompletionStage]] that will be completed with the success or failure of the provided Callable
@ -1086,5 +1086,5 @@ object PatternsCS {
delay: java.time.Duration,
scheduler: Scheduler,
ec: ExecutionContext): CompletionStage[T] =
scalaRetry(() => attempt.call().toScala, attempts, delay.asScala)(ec, scheduler).toJava
scalaRetry(() => attempt.call().asScala, attempts, delay.asScala)(ec, scheduler).asJava
}

View file

@ -76,7 +76,7 @@ abstract class AsyncSerializerWithStringManifest(system: ExtendedActorSystem)
*/
abstract class AsyncSerializerWithStringManifestCS(system: ExtendedActorSystem)
extends AsyncSerializerWithStringManifest(system) {
import scala.compat.java8.FutureConverters._
import pekko.util.FutureConverters._
def toBinaryAsyncCS(o: AnyRef): CompletionStage[Array[Byte]]
@ -86,11 +86,11 @@ abstract class AsyncSerializerWithStringManifestCS(system: ExtendedActorSystem)
* Delegates to [[AsyncSerializerWithStringManifestCS#toBinaryAsyncCS]]
*/
final def toBinaryAsync(o: AnyRef): Future[Array[Byte]] =
toBinaryAsyncCS(o).toScala
toBinaryAsyncCS(o).asScala
/**
* Delegates to [[AsyncSerializerWithStringManifestCS#fromBinaryAsyncCS]]
*/
def fromBinaryAsync(bytes: Array[Byte], manifest: String): Future[AnyRef] =
fromBinaryAsyncCS(bytes, manifest).toScala
fromBinaryAsyncCS(bytes, manifest).asScala
}

View file

@ -15,7 +15,6 @@ package org.apache.pekko.cluster.sharding.typed.delivery
import java.util.Optional
import scala.compat.java8.OptionConverters._
import scala.concurrent.duration.FiniteDuration
import scala.reflect.ClassTag
@ -34,6 +33,7 @@ import pekko.annotation.ApiMayChange
import pekko.cluster.sharding.typed.ShardingEnvelope
import pekko.cluster.sharding.typed.delivery.internal.ShardingProducerControllerImpl
import pekko.util.JavaDurationConverters._
import pekko.util.OptionConverters._
/**
* Reliable delivery between a producer actor sending messages to sharded consumer
@ -297,7 +297,7 @@ object ShardingProducerController {
producerId: String,
region: ActorRef[ShardingEnvelope[ConsumerController.SequencedMessage[A]]],
durableQueueBehavior: Optional[Behavior[DurableProducerQueue.Command[A]]]): Behavior[Command[A]] = {
apply(producerId, region, durableQueueBehavior.asScala)(ClassTag(messageClass))
apply(producerId, region, durableQueueBehavior.toScala)(ClassTag(messageClass))
}
/**
@ -309,7 +309,7 @@ object ShardingProducerController {
region: ActorRef[ShardingEnvelope[ConsumerController.SequencedMessage[A]]],
durableQueueBehavior: Optional[Behavior[DurableProducerQueue.Command[A]]],
settings: Settings): Behavior[Command[A]] = {
apply(producerId, region, durableQueueBehavior.asScala, settings)(ClassTag(messageClass))
apply(producerId, region, durableQueueBehavior.toScala, settings)(ClassTag(messageClass))
}
// TODO maybe there is a need for variant taking message extractor instead of ShardingEnvelope

View file

@ -19,7 +19,6 @@ import java.time.Duration
import java.util.concurrent.CompletionStage
import java.util.concurrent.ConcurrentHashMap
import scala.compat.java8.FutureConverters._
import scala.concurrent.Future
import org.apache.pekko
@ -52,6 +51,7 @@ import pekko.pattern.AskTimeoutException
import pekko.pattern.PromiseActorRef
import pekko.pattern.StatusReply
import pekko.util.{ unused, ByteString, Timeout }
import pekko.util.FutureConverters._
import pekko.util.JavaDurationConverters._
/**
@ -148,19 +148,19 @@ import pekko.util.JavaDurationConverters._
// javadsl impl
override def init[M, E](entity: javadsl.Entity[M, E]): ActorRef[E] = {
import scala.compat.java8.OptionConverters._
import pekko.util.OptionConverters._
init(
new scaladsl.Entity(
createBehavior = (ctx: EntityContext[M]) =>
entity.createBehavior(new javadsl.EntityContext[M](entity.typeKey, ctx.entityId, ctx.shard)),
typeKey = entity.typeKey.asScala,
stopMessage = entity.stopMessage.asScala,
stopMessage = entity.stopMessage.toScala,
entityProps = entity.entityProps,
settings = entity.settings.asScala,
messageExtractor = entity.messageExtractor.asScala,
allocationStrategy = entity.allocationStrategy.asScala,
role = entity.role.asScala,
dataCenter = entity.dataCenter.asScala))
settings = entity.settings.toScala,
messageExtractor = entity.messageExtractor.toScala,
allocationStrategy = entity.allocationStrategy.toScala,
role = entity.role.toScala,
dataCenter = entity.dataCenter.toScala))
}
private def internalInit[M, E](
@ -353,13 +353,13 @@ import pekko.util.JavaDurationConverters._
}
override def ask[U](message: JFunction[ActorRef[U], M], timeout: Duration): CompletionStage[U] =
ask[U](replyTo => message.apply(replyTo))(timeout.asScala).toJava
ask[U](replyTo => message.apply(replyTo))(timeout.asScala).asJava
override def askWithStatus[Res](f: ActorRef[StatusReply[Res]] => M)(implicit timeout: Timeout): Future[Res] =
StatusReply.flattenStatusFuture(ask[StatusReply[Res]](f))
override def askWithStatus[Res](f: ActorRef[StatusReply[Res]] => M, timeout: Duration): CompletionStage[Res] =
askWithStatus(f.apply)(timeout.asScala).toJava
askWithStatus(f.apply)(timeout.asScala).asJava
/** Similar to [[pekko.actor.typed.scaladsl.AskPattern.PromiseRef]] but for an `EntityRef` target. */
@InternalApi

View file

@ -16,7 +16,6 @@ package org.apache.pekko.cluster.sharding.typed.internal
import java.util.function.IntFunction
import java.util.Optional
import scala.compat.java8.OptionConverters._
import scala.reflect.ClassTag
import org.apache.pekko
import pekko.actor.typed.ActorRef
@ -41,6 +40,7 @@ import pekko.cluster.sharding.typed.scaladsl.StartEntity
import pekko.cluster.typed.Cluster
import pekko.cluster.typed.SelfUp
import pekko.cluster.typed.Subscribe
import pekko.util.OptionConverters._
import pekko.util.PrettyDuration
/**
@ -218,7 +218,7 @@ private[pekko] final class ShardedDaemonProcessImpl(system: ActorSystem[_])
behaviorFactory: IntFunction[Behavior[T]],
settings: ShardedDaemonProcessSettings,
stopMessage: Optional[T]): Unit =
init(name, numberOfInstances, n => behaviorFactory(n), settings, stopMessage.asScala, None)(ClassTag(messageClass))
init(name, numberOfInstances, n => behaviorFactory(n), settings, stopMessage.toScala, None)(ClassTag(messageClass))
def init[T](
messageClass: Class[T],
@ -233,6 +233,6 @@ private[pekko] final class ShardedDaemonProcessImpl(system: ActorSystem[_])
numberOfInstances,
n => behaviorFactory(n),
settings,
stopMessage.asScala,
shardAllocationStrategy.asScala)(ClassTag(messageClass))
stopMessage.toScala,
shardAllocationStrategy.toScala)(ClassTag(messageClass))
}

View file

@ -17,7 +17,6 @@ import java.time.Duration
import java.util.concurrent.CompletionStage
import scala.concurrent.Future
import scala.compat.java8.FutureConverters._
import org.apache.pekko
import pekko.actor.ActorRefProvider
import pekko.actor.typed.ActorRef
@ -29,6 +28,7 @@ import pekko.cluster.sharding.typed.javadsl.EntityRef
import pekko.cluster.sharding.typed.scaladsl
import pekko.japi.function.{ Function => JFunction }
import pekko.pattern.StatusReply
import pekko.util.FutureConverters._
import pekko.util.JavaDurationConverters._
import pekko.util.Timeout
@ -57,10 +57,10 @@ import pekko.util.Timeout
}
def ask[U](message: JFunction[ActorRef[U], M], timeout: Duration): CompletionStage[U] =
ask[U](replyTo => message.apply(replyTo))(timeout.asScala).toJava
ask[U](replyTo => message.apply(replyTo))(timeout.asScala).asJava
override def askWithStatus[Res](f: ActorRef[StatusReply[Res]] => M, timeout: Duration): CompletionStage[Res] =
askWithStatus(f)(timeout.asScala).toJava
askWithStatus(f)(timeout.asScala).asJava
override def askWithStatus[Res](f: ActorRef[StatusReply[Res]] => M)(implicit timeout: Timeout): Future[Res] =
StatusReply.flattenStatusFuture(ask(f))

View file

@ -32,7 +32,8 @@ import pekko.cluster.sharding.ShardCoordinator.ShardAllocationStrategy
import pekko.cluster.sharding.typed.internal.EntityTypeKeyImpl
import pekko.japi.function.{ Function => JFunction }
import pekko.pattern.StatusReply
import scala.compat.java8.OptionConverters._
import pekko.util.OptionConverters._
@FunctionalInterface
trait EntityFactory[M] {
def apply(shardRegion: ActorRef[ClusterSharding.ShardCommand], entityId: String): Behavior[M]
@ -358,13 +359,13 @@ final class Entity[M, E] private (
new pekko.cluster.sharding.typed.scaladsl.Entity(
eCtx => createBehavior(eCtx.toJava),
typeKey.asScala,
stopMessage.asScala,
stopMessage.toScala,
entityProps,
settings.asScala,
messageExtractor.asScala,
allocationStrategy.asScala,
role.asScala,
dataCenter.asScala)
settings.toScala,
messageExtractor.toScala,
allocationStrategy.toScala,
role.toScala,
dataCenter.toScala)
}

View file

@ -15,7 +15,6 @@ package org.apache.pekko.cluster.sharding.external.internal
import java.util.concurrent.CompletionStage
import scala.compat.java8.FutureConverters._
import scala.concurrent.Future
import org.apache.pekko
@ -45,6 +44,7 @@ import pekko.cluster.sharding.external.ShardLocations
import pekko.dispatch.MessageDispatcher
import pekko.event.Logging
import pekko.pattern.ask
import pekko.util.FutureConverters._
import pekko.util.JavaDurationConverters._
import pekko.util.PrettyDuration._
import pekko.util.Timeout
@ -85,7 +85,7 @@ final private[external] class ExternalShardAllocationClientImpl(system: ActorSys
}
override def setShardLocation(shard: ShardId, location: Address): CompletionStage[Done] =
updateShardLocation(shard, location).toJava
updateShardLocation(shard, location).asJava
override def shardLocations(): Future[ShardLocations] = {
(replicator ? Get(Key, ReadMajority(timeout)))
@ -104,7 +104,7 @@ final private[external] class ExternalShardAllocationClientImpl(system: ActorSys
}
}
override def getShardLocations(): CompletionStage[ShardLocations] = shardLocations().toJava
override def getShardLocations(): CompletionStage[ShardLocations] = shardLocations().asJava
override def updateShardLocations(locations: Map[ShardId, Address]): Future[Done] = {
log.debug("updateShardLocations {} for {}", locations, Key)
@ -121,6 +121,6 @@ final private[external] class ExternalShardAllocationClientImpl(system: ActorSys
}
override def setShardLocations(locations: java.util.Map[ShardId, Address]): CompletionStage[Done] = {
updateShardLocations(locations.asScala.toMap).toJava
updateShardLocations(locations.asScala.toMap).asJava
}
}

View file

@ -17,8 +17,6 @@ import java.util.Optional
import java.util.concurrent.CompletionStage
import java.util.function.Consumer
import scala.compat.java8.FutureConverters._
import scala.compat.java8.OptionConverters._
import scala.concurrent.ExecutionContext
import scala.concurrent.Future
@ -27,6 +25,8 @@ import pekko.annotation.InternalApi
import pekko.coordination.lease.LeaseSettings
import pekko.coordination.lease.javadsl.{ Lease => JavaLease }
import pekko.coordination.lease.scaladsl.{ Lease => ScalaLease }
import pekko.util.FutureConverters._
import pekko.util.OptionConverters._
/**
* INTERNAL API
@ -34,13 +34,13 @@ import pekko.coordination.lease.scaladsl.{ Lease => ScalaLease }
@InternalApi
final private[pekko] class LeaseAdapter(delegate: ScalaLease)(implicit val ec: ExecutionContext) extends JavaLease {
override def acquire(): CompletionStage[java.lang.Boolean] = delegate.acquire().map(Boolean.box).toJava
override def acquire(): CompletionStage[java.lang.Boolean] = delegate.acquire().map(Boolean.box).asJava
override def acquire(leaseLostCallback: Consumer[Optional[Throwable]]): CompletionStage[java.lang.Boolean] = {
delegate.acquire(o => leaseLostCallback.accept(o.asJava)).map(Boolean.box).toJava
delegate.acquire(o => leaseLostCallback.accept(o.toJava)).map(Boolean.box).asJava
}
override def release(): CompletionStage[java.lang.Boolean] = delegate.release().map(Boolean.box).toJava
override def release(): CompletionStage[java.lang.Boolean] = delegate.release().map(Boolean.box).asJava
override def checkLease(): Boolean = delegate.checkLease()
override def getSettings(): LeaseSettings = delegate.settings
}
@ -53,13 +53,13 @@ final private[pekko] class LeaseAdapterToScala(val delegate: JavaLease)(implicit
extends ScalaLease(delegate.getSettings()) {
override def acquire(): Future[Boolean] =
delegate.acquire().toScala.map(Boolean.unbox)
delegate.acquire().asScala.map(Boolean.unbox)
override def acquire(leaseLostCallback: Option[Throwable] => Unit): Future[Boolean] =
delegate.acquire(o => leaseLostCallback(o.asScala)).toScala.map(Boolean.unbox)
delegate.acquire(o => leaseLostCallback(o.toScala)).asScala.map(Boolean.unbox)
override def release(): Future[Boolean] =
delegate.release().toScala.map(Boolean.unbox)
delegate.release().asScala.map(Boolean.unbox)
override def checkLease(): Boolean =
delegate.checkLease()

View file

@ -19,13 +19,13 @@ import java.util.concurrent.CompletionStage
import java.util.concurrent.TimeUnit
import scala.collection.immutable
import scala.compat.java8.OptionConverters._
import scala.concurrent.Future
import scala.concurrent.duration.FiniteDuration
import org.apache.pekko
import pekko.actor.{ DeadLetterSuppression, NoSerializationVerificationNeeded }
import pekko.util.HashCode
import pekko.util.OptionConverters._
object ServiceDiscovery {
@ -108,13 +108,13 @@ object ServiceDiscovery {
* Java API
*/
def getPort: Optional[Int] =
port.asJava
port.toJava
/**
* Java API
*/
def getAddress: Optional[InetAddress] =
address.asJava
address.toJava
override def toString: String = s"ResolvedTarget($host,$port,$address)"
@ -165,13 +165,13 @@ final class Lookup(val serviceName: String, val portName: Option[String], val pr
* Java API
*/
def getPortName: Optional[String] =
portName.asJava
portName.toJava
/**
* Java API
*/
def getProtocol: Optional[String] =
protocol.asJava
protocol.toJava
private def copy(
serviceName: String = serviceName,
@ -321,8 +321,8 @@ abstract class ServiceDiscovery {
* The returned future should be failed once resolveTimeout has passed with a [[DiscoveryTimeoutException]].
*/
def lookup(query: Lookup, resolveTimeout: java.time.Duration): CompletionStage[Resolved] = {
import scala.compat.java8.FutureConverters._
lookup(query, FiniteDuration(resolveTimeout.toMillis, TimeUnit.MILLISECONDS)).toJava
import pekko.util.FutureConverters._
lookup(query, FiniteDuration(resolveTimeout.toMillis, TimeUnit.MILLISECONDS)).asJava
}
/**

View file

@ -15,10 +15,9 @@ package org.apache.pekko.cluster.ddata
import java.util.Optional
import scala.compat.java8.OptionConverters._
import org.apache.pekko
import pekko.cluster.UniqueAddress
import pekko.util.OptionConverters._
/**
* Interface for implementing a state based convergent
@ -182,7 +181,7 @@ abstract class AbstractDeltaReplicatedData[A <: AbstractDeltaReplicatedData[A, B
* Delegates to [[#deltaData]], which must be implemented by subclass.
*/
final override def delta: Option[ReplicatedDelta] =
deltaData.asScala
deltaData.toScala
/**
* The accumulated delta of mutator operations since previous

View file

@ -19,6 +19,12 @@ This is just stub documentation. It will be improved.
* We have changed the default ports used by the pekko-remote module.
* With @ref:[Classic Remoting](../remoting.md), Akka defaults to 2552, while Pekko defaults to 7355.
* With @ref:[Artery Remoting](../remoting-artery.md), Akka defaults to 25520, while Pekko defaults to 17355.
* The Scala 2.13/Scala 3 version of Pekko no longer includes [scala-java8-compat](https://github.com/scala/scala-java8-compat)
as a dependency. This means if you were relying on `scala-java8-compat` along with Scala 2.12/Scala 3 as a transitive
dependency it's recommended to migrate to using [`scala.jdk` instead](https://github.com/scala/scala-java8-compat#do-you-need-this).
If this is not possible/desired then you can add `scala-java8-compat` as dependency yourself.
* In addition to the previous point, for Scala 2.12 `scala-java8-compat` has been updated to `1.0.2`. If you are using
an older binary incompatible version of `scala-java8-compat` its recommend to update to `1.0.2`.
We are still investigating the effects of how the package name changes affect the @ref:[Persistence](../persistence.md)
and @ref:[Cluster](../cluster-usage.md) modules.

View file

@ -19,10 +19,10 @@ import org.apache.pekko.pattern.Patterns;
import org.apache.pekko.testkit.PekkoJUnitActorSystemResource;
import org.apache.pekko.testkit.PekkoSpec;
import org.apache.pekko.util.Timeout;
import org.apache.pekko.util.FutureConverters;
import jdocs.AbstractJavaTest;
import org.junit.ClassRule;
import org.junit.Test;
import scala.compat.java8.FutureConverters;
import scala.concurrent.Await;
import scala.concurrent.ExecutionContext;
import scala.concurrent.Future;
@ -69,7 +69,7 @@ public class FutureDocTest extends AbstractJavaTest {
ec);
Future<String> result =
Futures.firstCompletedOf(
Arrays.<Future<String>>asList(future, FutureConverters.toScala(delayed)), ec);
Arrays.<Future<String>>asList(future, FutureConverters.asScala(delayed)), ec);
Timeout timeout = Timeout.create(Duration.ofSeconds(2));
Await.result(result, timeout.duration());
}

View file

@ -73,8 +73,8 @@ final class EventEnvelope(
* Java API
*/
def getEventMetaData(): Optional[Any] = {
import scala.compat.java8.OptionConverters._
eventMetadata.asJava
import pekko.util.OptionConverters._
eventMetadata.toJava
}
override def hashCode(): Int = {

View file

@ -93,16 +93,16 @@ final class EventEnvelope[Event](
* Java API
*/
def getOptionalEvent(): Optional[Event] = {
import scala.compat.java8.OptionConverters._
eventOption.asJava
import pekko.util.OptionConverters._
eventOption.toJava
}
/**
* Java API
*/
def getEventMetaData(): Optional[AnyRef] = {
import scala.compat.java8.OptionConverters._
eventMetadata.map(_.asInstanceOf[AnyRef]).asJava
import pekko.util.OptionConverters._
eventMetadata.map(_.asInstanceOf[AnyRef]).toJava
}
override def hashCode(): Int = {

View file

@ -226,8 +226,8 @@ final class EventSourcedBehaviorTestKit[Command, Event, State](
private val _persistenceTestKit = new PersistenceTestKit(delegate.persistenceTestKit)
private val _snapshotTestKit = {
import scala.compat.java8.OptionConverters._
delegate.snapshotTestKit.map(new SnapshotTestKit(_)).asJava
import pekko.util.OptionConverters._
delegate.snapshotTestKit.map(new SnapshotTestKit(_)).toJava
}
/**

View file

@ -16,12 +16,11 @@ package org.apache.pekko.persistence.testkit.javadsl
import java.time.Duration
import java.util.concurrent.CompletionStage
import scala.compat.java8.FutureConverters._
import org.apache.pekko
import pekko.Done
import pekko.actor.ClassicActorSystemProvider
import pekko.persistence.testkit.scaladsl
import pekko.util.FutureConverters._
import pekko.util.JavaDurationConverters._
/**
@ -50,6 +49,6 @@ object PersistenceInit {
journalPluginId: String,
snapshotPluginId: String,
timeout: Duration): CompletionStage[Done] =
scaladsl.PersistenceInit.initializePlugins(system, journalPluginId, snapshotPluginId, timeout.asScala).toJava
scaladsl.PersistenceInit.initializePlugins(system, journalPluginId, snapshotPluginId, timeout.asScala).asJava
}

View file

@ -15,8 +15,6 @@ package org.apache.pekko.persistence.testkit.state.javadsl
import java.util.Optional
import java.util.concurrent.{ CompletableFuture, CompletionStage }
import scala.compat.java8.FutureConverters._
import scala.compat.java8.OptionConverters._
import org.apache.pekko
import pekko.japi.Pair
import pekko.{ Done, NotUsed }
@ -28,6 +26,8 @@ import pekko.persistence.state.javadsl.DurableStateUpdateStore
import pekko.persistence.state.javadsl.GetObjectResult
import pekko.persistence.testkit.state.scaladsl.{ PersistenceTestKitDurableStateStore => SStore }
import pekko.stream.javadsl.Source
import pekko.util.FutureConverters._
import pekko.util.OptionConverters._
object PersistenceTestKitDurableStateStore {
val Identifier = pekko.persistence.testkit.state.scaladsl.PersistenceTestKitDurableStateStore.Identifier
@ -40,15 +40,15 @@ class PersistenceTestKitDurableStateStore[A](stateStore: SStore[A])
with DurableStateStorePagedPersistenceIdsQuery[A] {
def getObject(persistenceId: String): CompletionStage[GetObjectResult[A]] =
stateStore.getObject(persistenceId).map(_.toJava)(stateStore.system.dispatcher).toJava
stateStore.getObject(persistenceId).map(_.toJava)(stateStore.system.dispatcher).asJava
def upsertObject(persistenceId: String, seqNr: Long, value: A, tag: String): CompletionStage[Done] =
stateStore.upsertObject(persistenceId, seqNr, value, tag).toJava
stateStore.upsertObject(persistenceId, seqNr, value, tag).asJava
def deleteObject(persistenceId: String): CompletionStage[Done] = CompletableFuture.completedFuture(Done)
def deleteObject(persistenceId: String, revision: Long): CompletionStage[Done] =
stateStore.deleteObject(persistenceId, revision).toJava
stateStore.deleteObject(persistenceId, revision).asJava
def changes(tag: String, offset: Offset): Source[DurableStateChange[A], pekko.NotUsed] = {
stateStore.changes(tag, offset).asJava
@ -83,6 +83,6 @@ class PersistenceTestKitDurableStateStore[A](stateStore: SStore[A])
}
override def currentPersistenceIds(afterId: Optional[String], limit: Long): Source[String, NotUsed] =
stateStore.currentPersistenceIds(afterId.asScala, limit).asJava
stateStore.currentPersistenceIds(afterId.toScala, limit).asJava
}

View file

@ -381,7 +381,7 @@ private[pekko] final case class PublishedEventImpl(
replicatedMetaData: Option[ReplicatedPublishedEventMetaData])
extends PublishedEvent
with InternalProtocol {
import scala.compat.java8.OptionConverters._
import pekko.util.OptionConverters._
def tags: Set[String] = payload match {
case t: Tagged => t.tags
@ -398,5 +398,5 @@ private[pekko] final case class PublishedEventImpl(
case _ => this
}
override def getReplicatedMetaData: Optional[ReplicatedPublishedEventMetaData] = replicatedMetaData.asJava
override def getReplicatedMetaData: Optional[ReplicatedPublishedEventMetaData] = replicatedMetaData.toJava
}

View file

@ -16,11 +16,10 @@ package org.apache.pekko.persistence.typed.javadsl
import java.util.Objects
import java.util.function.{ BiFunction, Function => JFunction, Predicate, Supplier }
import scala.compat.java8.FunctionConverters._
import org.apache.pekko
import pekko.annotation.InternalApi
import pekko.persistence.typed.internal._
import pekko.util.FunctionConverters._
import pekko.util.OptionVal
/**
@ -107,7 +106,7 @@ final class CommandHandlerBuilder[Command, Event, State]() {
* @return A new, mutable, CommandHandlerBuilderByState
*/
def forNullState(): CommandHandlerBuilderByState[Command, Event, State, State] = {
val predicate: Predicate[State] = asJavaPredicate(s => Objects.isNull(s))
val predicate = ((s: State) => Objects.isNull(s)).asJava
val builder = CommandHandlerBuilderByState.builder[Command, Event, State](predicate)
builders = builder :: builders
builder
@ -123,7 +122,7 @@ final class CommandHandlerBuilder[Command, Event, State]() {
* @return A new, mutable, CommandHandlerBuilderByState
*/
def forNonNullState(): CommandHandlerBuilderByState[Command, Event, State, State] = {
val predicate: Predicate[State] = asJavaPredicate(s => Objects.nonNull(s))
val predicate = ((s: State) => Objects.nonNull(s)).asJava
val builder = CommandHandlerBuilderByState.builder[Command, Event, State](predicate)
builders = builder :: builders
builder
@ -141,7 +140,7 @@ final class CommandHandlerBuilder[Command, Event, State]() {
* @return A new, mutable, CommandHandlerBuilderByState
*/
def forAnyState(): CommandHandlerBuilderByState[Command, Event, State, State] = {
val predicate: Predicate[State] = asJavaPredicate(_ => true)
val predicate = ((_: State) => true).asJava
val builder = CommandHandlerBuilderByState.builder[Command, Event, State](predicate)
builders = builder :: builders
builder

View file

@ -16,11 +16,10 @@ package org.apache.pekko.persistence.typed.javadsl
import java.util.Objects
import java.util.function.{ BiFunction, Function => JFunction, Predicate, Supplier }
import scala.compat.java8.FunctionConverters._
import org.apache.pekko
import pekko.annotation.InternalApi
import pekko.persistence.typed.internal._
import pekko.util.FunctionConverters._
import pekko.util.OptionVal
/* Note that this is a copy of CommandHandler.scala to support ReplyEffect
@ -117,7 +116,7 @@ final class CommandHandlerWithReplyBuilder[Command, Event, State]() {
* @return A new, mutable, CommandHandlerWithReplyBuilderByState
*/
def forNullState(): CommandHandlerWithReplyBuilderByState[Command, Event, State, State] = {
val predicate: Predicate[State] = asJavaPredicate(s => Objects.isNull(s))
val predicate = ((s: State) => Objects.isNull(s)).asJava
val builder = CommandHandlerWithReplyBuilderByState.builder[Command, Event, State](predicate)
builders = builder :: builders
builder
@ -133,7 +132,7 @@ final class CommandHandlerWithReplyBuilder[Command, Event, State]() {
* @return A new, mutable, CommandHandlerWithReplyBuilderByState
*/
def forNonNullState(): CommandHandlerWithReplyBuilderByState[Command, Event, State, State] = {
val predicate: Predicate[State] = asJavaPredicate(s => Objects.nonNull(s))
val predicate = ((s: State) => Objects.nonNull(s)).asJava
val builder = CommandHandlerWithReplyBuilderByState.builder[Command, Event, State](predicate)
builders = builder :: builders
builder
@ -151,7 +150,7 @@ final class CommandHandlerWithReplyBuilder[Command, Event, State]() {
* @return A new, mutable, CommandHandlerWithReplyBuilderByState
*/
def forAnyState(): CommandHandlerWithReplyBuilderByState[Command, Event, State, State] = {
val predicate: Predicate[State] = asJavaPredicate(_ => true)
val predicate = ((_: State) => true).asJava
val builder = CommandHandlerWithReplyBuilderByState.builder[Command, Event, State](predicate)
builders = builder :: builders
builder

View file

@ -16,11 +16,10 @@ package org.apache.pekko.persistence.typed.javadsl
import java.util.Objects
import java.util.function.{ BiFunction, Function => JFunction, Predicate, Supplier }
import scala.compat.java8.FunctionConverters._
import org.apache.pekko
import pekko.annotation.InternalApi
import pekko.util.OptionVal
import pekko.util.FunctionConverters._
/**
* FunctionalInterface for reacting on events having been persisted
@ -108,7 +107,7 @@ final class EventHandlerBuilder[State, Event]() {
* @return A new, mutable, EventHandlerBuilderByState
*/
def forNullState(): EventHandlerBuilderByState[State, State, Event] = {
val predicate: Predicate[State] = asJavaPredicate(s => Objects.isNull(s))
val predicate = ((s: State) => Objects.isNull(s)).asJava
val builder = EventHandlerBuilderByState.builder[State, Event](predicate)
builders = builder :: builders
builder
@ -124,7 +123,7 @@ final class EventHandlerBuilder[State, Event]() {
* @return A new, mutable, EventHandlerBuilderByState
*/
def forNonNullState(): EventHandlerBuilderByState[State, State, Event] = {
val predicate: Predicate[State] = asJavaPredicate(s => Objects.nonNull(s))
val predicate = ((s: State) => Objects.nonNull(s)).asJava
val builder = EventHandlerBuilderByState.builder[State, Event](predicate)
builders = builder :: builders
builder
@ -142,7 +141,7 @@ final class EventHandlerBuilder[State, Event]() {
* @return A new, mutable, EventHandlerBuilderByState
*/
def forAnyState(): EventHandlerBuilderByState[State, State, Event] = {
val predicate: Predicate[State] = asJavaPredicate(_ => true)
val predicate = ((_: State) => true).asJava
val builder = EventHandlerBuilderByState.builder[State, Event](predicate)
builders = builder :: builders
builder

View file

@ -16,12 +16,11 @@ package org.apache.pekko.persistence.typed.javadsl
import java.time.Duration
import java.util.Optional
import scala.compat.java8.OptionConverters._
import org.apache.pekko
import pekko.japi.function.Function3
import pekko.persistence.typed.SnapshotAdapter
import pekko.util.JavaDurationConverters._
import pekko.util.OptionConverters._
/**
* Helper functions for migration from PersistentFSM to Persistence Typed
@ -38,5 +37,5 @@ object PersistentFSMMigration {
*/
def snapshotAdapter[State](adapt: Function3[String, Any, Optional[Duration], State]): SnapshotAdapter[State] =
pekko.persistence.typed.scaladsl.PersistentFSMMigration.snapshotAdapter((stateId, snapshot, timer) =>
adapt.apply(stateId, snapshot, timer.map(_.asJava).asJava))
adapt.apply(stateId, snapshot, timer.map(_.asJava).toJava))
}

View file

@ -19,12 +19,11 @@ import java.util.function.Predicate
import java.util.function.Supplier
import java.util.function.{ Function => JFunction }
import scala.compat.java8.FunctionConverters._
import org.apache.pekko
import pekko.annotation.InternalApi
import pekko.persistence.typed.state.internal._
import pekko.util.OptionVal
import pekko.util.FunctionConverters._
/**
* FunctionalInterface for reacting on commands
@ -110,7 +109,7 @@ final class CommandHandlerBuilder[Command, State]() {
* @return A new, mutable, CommandHandlerBuilderByState
*/
def forNullState(): CommandHandlerBuilderByState[Command, State, State] = {
val predicate: Predicate[State] = asJavaPredicate(s => Objects.isNull(s))
val predicate: Predicate[State] = ((s: State) => Objects.isNull(s)).asJava
val builder = CommandHandlerBuilderByState.builder[Command, State](predicate)
builders = builder :: builders
builder
@ -126,7 +125,7 @@ final class CommandHandlerBuilder[Command, State]() {
* @return A new, mutable, CommandHandlerBuilderByState
*/
def forNonNullState(): CommandHandlerBuilderByState[Command, State, State] = {
val predicate: Predicate[State] = asJavaPredicate(s => Objects.nonNull(s))
val predicate: Predicate[State] = ((s: State) => Objects.nonNull(s)).asJava
val builder = CommandHandlerBuilderByState.builder[Command, State](predicate)
builders = builder :: builders
builder
@ -144,7 +143,7 @@ final class CommandHandlerBuilder[Command, State]() {
* @return A new, mutable, CommandHandlerBuilderByState
*/
def forAnyState(): CommandHandlerBuilderByState[Command, State, State] = {
val predicate: Predicate[State] = asJavaPredicate(_ => true)
val predicate: Predicate[State] = ((_: State) => true).asJava
val builder = CommandHandlerBuilderByState.builder[Command, State](predicate)
builders = builder :: builders
builder

View file

@ -19,12 +19,11 @@ import java.util.function.Predicate
import java.util.function.Supplier
import java.util.function.{ Function => JFunction }
import scala.compat.java8.FunctionConverters._
import org.apache.pekko
import pekko.annotation.InternalApi
import pekko.persistence.typed.state.internal._
import pekko.util.OptionVal
import pekko.util.FunctionConverters._
/* Note that this is a copy of CommandHandler.scala to support ReplyEffect
* s/Effect/ReplyEffect/
@ -118,7 +117,7 @@ final class CommandHandlerWithReplyBuilder[Command, State]() {
* @return A new, mutable, CommandHandlerWithReplyBuilderByState
*/
def forNullState(): CommandHandlerWithReplyBuilderByState[Command, State, State] = {
val predicate: Predicate[State] = asJavaPredicate(s => Objects.isNull(s))
val predicate: Predicate[State] = ((s: State) => Objects.isNull(s)).asJava
val builder = CommandHandlerWithReplyBuilderByState.builder[Command, State](predicate)
builders = builder :: builders
builder
@ -134,7 +133,7 @@ final class CommandHandlerWithReplyBuilder[Command, State]() {
* @return A new, mutable, CommandHandlerWithReplyBuilderByState
*/
def forNonNullState(): CommandHandlerWithReplyBuilderByState[Command, State, State] = {
val predicate: Predicate[State] = asJavaPredicate(s => Objects.nonNull(s))
val predicate: Predicate[State] = ((s: State) => Objects.nonNull(s)).asJava
val builder = CommandHandlerWithReplyBuilderByState.builder[Command, State](predicate)
builders = builder :: builders
builder
@ -152,7 +151,7 @@ final class CommandHandlerWithReplyBuilder[Command, State]() {
* @return A new, mutable, CommandHandlerWithReplyBuilderByState
*/
def forAnyState(): CommandHandlerWithReplyBuilderByState[Command, State, State] = {
val predicate: Predicate[State] = asJavaPredicate(_ => true)
val predicate: Predicate[State] = ((_: State) => true).asJava
val builder = CommandHandlerWithReplyBuilderByState.builder[Command, State](predicate)
builders = builder :: builders
builder

View file

@ -16,10 +16,9 @@ package org.apache.pekko.persistence.state.javadsl
import java.util.Optional
import java.util.concurrent.CompletionStage
import scala.compat.java8.OptionConverters._
import org.apache.pekko
import pekko.persistence.state.scaladsl.{ GetObjectResult => SGetObjectResult }
import pekko.util.OptionConverters._
/**
* API for reading durable state objects with payload `A`.
@ -35,5 +34,5 @@ trait DurableStateStore[A] {
}
final case class GetObjectResult[A](value: Optional[A], revision: Long) {
def toScala: SGetObjectResult[A] = SGetObjectResult(value.asScala, revision)
def toScala: SGetObjectResult[A] = SGetObjectResult(value.toScala, revision)
}

View file

@ -14,10 +14,10 @@
package org.apache.pekko.persistence.state.scaladsl
import scala.concurrent.Future
import scala.compat.java8.OptionConverters._
import org.apache.pekko
import pekko.persistence.state.javadsl.{ GetObjectResult => JGetObjectResult }
import pekko.util.OptionConverters._
/**
* API for reading durable state objects with payload `A`.
@ -33,5 +33,5 @@ trait DurableStateStore[A] {
}
final case class GetObjectResult[A](value: Option[A], revision: Long) {
def toJava: JGetObjectResult[A] = JGetObjectResult(value.asJava, revision)
def toJava: JGetObjectResult[A] = JGetObjectResult(value.toJava, revision)
}

View file

@ -70,15 +70,7 @@ object Dependencies {
val Versions =
Seq(crossScalaVersions := allScalaVersions, scalaVersion := allScalaVersions.head,
java8CompatVersion := {
CrossVersion.partialVersion(scalaVersion.value) match {
// java8-compat is only used in a couple of places for 2.13,
// it is probably possible to remove the dependency if needed.
case Some((3, _)) => "1.0.0"
case Some((2, n)) if n >= 13 => "1.0.0"
case _ => "0.8.0"
}
})
java8CompatVersion := "1.0.2")
object Compile {
// Compile
@ -237,7 +229,13 @@ object Dependencies {
// TODO check if `l ++=` everywhere expensive?
val l = libraryDependencies
val actor = l ++= Seq(config, java8Compat.value)
val actor = l ++= (CrossVersion.partialVersion(scalaVersion.value) match {
// java8-compat is only used in a couple of places for 2.13,
// it is probably possible to remove the dependency if needed.
case Some((2, n)) if n == 12 =>
List("org.scala-lang.modules" %% "scala-java8-compat" % java8CompatVersion.value) // Scala License
case _ => List.empty
}) ++ Seq(config)
val actorTyped = l ++= Seq(slf4jApi)

View file

@ -144,7 +144,7 @@ object OSGi {
versionedImport(packageName, s"$epoch.$major", s"$epoch.${major.toInt + 1}")
}
def scalaJava8CompatImport(packageName: String = "scala.compat.java8.*") =
versionedImport(packageName, "0.8.0", "2.0.0")
versionedImport(packageName, "1.0.2", "1.0.2")
def scalaParsingCombinatorImport(packageName: String = "scala.util.parsing.combinator.*") =
versionedImport(packageName, "1.1.0", "1.2.0")
def sslConfigCoreImport(packageName: String = "com.typesafe.sslconfig") =

View file

@ -17,7 +17,6 @@ import java.util.Optional
import java.util.concurrent.ConcurrentHashMap
import scala.annotation.nowarn
import scala.collection.immutable
import scala.compat.java8.OptionConverters._
import scala.util.Failure
import scala.util.Success
@ -53,6 +52,7 @@ import pekko.annotation.InternalStableApi
import pekko.event.Logging
import pekko.event.LoggingAdapter
import pekko.util.unused
import pekko.util.OptionConverters._
object JacksonObjectMapperProvider extends ExtensionId[JacksonObjectMapperProvider] with ExtensionIdProvider {
override def get(system: ActorSystem): JacksonObjectMapperProvider = super.get(system)
@ -343,7 +343,7 @@ final class JacksonObjectMapperProvider(system: ExtendedActorSystem) extends Ext
* can be used
*/
def getOrCreate(bindingName: String, jsonFactory: Optional[JsonFactory]): ObjectMapper =
getOrCreate(bindingName, jsonFactory.asScala)
getOrCreate(bindingName, jsonFactory.toScala)
/**
* Scala API: Creates a new instance of a Jackson `ObjectMapper` with sensible defaults and modules configured
@ -380,7 +380,7 @@ final class JacksonObjectMapperProvider(system: ExtendedActorSystem) extends Ext
* @see [[JacksonObjectMapperProvider#getOrCreate]]
*/
def create(bindingName: String, jsonFactory: Optional[JsonFactory]): ObjectMapper =
create(bindingName, jsonFactory.asScala)
create(bindingName, jsonFactory.toScala)
}

View file

@ -44,7 +44,7 @@ class StreamConvertersSpec extends StreamSpec with DefaultTimeout {
import java.util.stream.IntStream
import java.util.stream.Stream
import scala.compat.java8.FunctionConverters._
import pekko.util.FunctionConverters._
def javaStreamInts =
IntStream.iterate(1,

View file

@ -18,7 +18,6 @@ import java.time.Duration
import java.util.Optional
import scala.annotation.tailrec
import scala.compat.java8.OptionConverters._
import scala.concurrent.duration.FiniteDuration
import scala.reflect.{ classTag, ClassTag }
import scala.util.control.NonFatal
@ -33,6 +32,7 @@ import pekko.stream.impl.TraversalBuilder
import pekko.util.{ ByteString, OptionVal }
import pekko.util.JavaDurationConverters._
import pekko.util.LineNumbers
import pekko.util.OptionConverters._
/**
* Holds attributes which can be used to alter [[pekko.stream.scaladsl.Flow]] / [[pekko.stream.javadsl.Flow]]
@ -84,7 +84,7 @@ final case class Attributes(attributeList: List[Attributes.Attribute] = Nil) {
* This is the expected way for operators to access attributes.
*/
def getAttribute[T <: Attribute](c: Class[T]): Optional[T] =
attributeList.collectFirst { case attr if c.isInstance(attr) => c.cast(attr) }.asJava
attributeList.collectFirst { case attr if c.isInstance(attr) => c.cast(attr) }.toJava
/**
* Scala API: Get the most specific attribute value for a given Attribute type or subclass thereof or
@ -275,7 +275,7 @@ final case class Attributes(attributeList: List[Attributes.Attribute] = Nil) {
*/
@deprecated("Attributes should always be most specific, use get[T]", "Akka 2.5.7")
def getFirstAttribute[T <: Attribute](c: Class[T]): Optional[T] =
attributeList.reverseIterator.collectFirst { case attr if c.isInstance(attr) => c.cast(attr) }.asJava
attributeList.reverseIterator.collectFirst { case attr if c.isInstance(attr) => c.cast(attr) }.toJava
/**
* Scala API: Get the least specific attribute (added first) of a given type parameter T `Class` or subclass thereof.

View file

@ -13,12 +13,12 @@
package org.apache.pekko.stream
import scala.compat.java8.FunctionConverters._
import scala.concurrent.duration.FiniteDuration
import org.apache.pekko
import pekko.event.Logging
import pekko.event.Logging.LogLevel
import pekko.util.ConstantFun
import pekko.util.FunctionConverters._
import pekko.util.JavaDurationConverters._
final class RestartSettings private (

View file

@ -22,8 +22,6 @@ import java.util.function.Supplier
import scala.annotation.{ nowarn, varargs }
import scala.annotation.unchecked.uncheckedVariance
import scala.collection.immutable
import scala.compat.java8.FutureConverters._
import scala.compat.java8.OptionConverters.RichOptionalGeneric
import scala.concurrent.duration.FiniteDuration
import scala.reflect.ClassTag
@ -43,7 +41,9 @@ import pekko.japi.function
import pekko.japi.function.Creator
import pekko.stream.{ javadsl, _ }
import pekko.util.ConstantFun
import pekko.util.FutureConverters._
import pekko.util.JavaDurationConverters._
import pekko.util.OptionConverters._
import pekko.util.Timeout
import pekko.util.unused
@ -88,7 +88,7 @@ object Flow {
*/
def fromMaterializer[I, O, M](
factory: BiFunction[Materializer, Attributes, Flow[I, O, M]]): Flow[I, O, CompletionStage[M]] =
scaladsl.Flow.fromMaterializer((mat, attr) => factory(mat, attr).asScala).mapMaterializedValue(_.toJava).asJava
scaladsl.Flow.fromMaterializer((mat, attr) => factory(mat, attr).asScala).mapMaterializedValue(_.asJava).asJava
/**
* Defers the creation of a [[Flow]] until materialization. The `factory` function
@ -98,7 +98,7 @@ object Flow {
@deprecated("Use 'fromMaterializer' instead", "Akka 2.6.0")
def setup[I, O, M](
factory: BiFunction[ActorMaterializer, Attributes, Flow[I, O, M]]): Flow[I, O, CompletionStage[M]] =
scaladsl.Flow.setup((mat, attr) => factory(mat, attr).asScala).mapMaterializedValue(_.toJava).asJava
scaladsl.Flow.setup((mat, attr) => factory(mat, attr).asScala).mapMaterializedValue(_.asJava).asJava
/**
* Creates a `Flow` from a `Sink` and a `Source` where the Flow's input
@ -273,9 +273,9 @@ object Flow {
def lazyInit[I, O, M](
flowFactory: function.Function[I, CompletionStage[Flow[I, O, M]]],
fallback: function.Creator[M]): Flow[I, O, M] = {
import scala.compat.java8.FutureConverters._
import pekko.util.FutureConverters._
val sflow = scaladsl.Flow.lazyInit(
(flowFactory.apply(_)).andThen(_.toScala.map(_.asScala)(ExecutionContexts.parasitic)),
(flowFactory.apply(_)).andThen(_.asScala.map(_.asScala)(ExecutionContexts.parasitic)),
fallback.create _)
new Flow(sflow)
}
@ -299,12 +299,12 @@ object Flow {
@deprecated("Use 'Flow.lazyCompletionStageFlow' instead", "Akka 2.6.0")
def lazyInitAsync[I, O, M](
flowFactory: function.Creator[CompletionStage[Flow[I, O, M]]]): Flow[I, O, CompletionStage[Optional[M]]] = {
import scala.compat.java8.FutureConverters._
import pekko.util.FutureConverters._
val sflow = scaladsl.Flow
.lazyInitAsync(() => flowFactory.create().toScala.map(_.asScala)(ExecutionContexts.parasitic))
.lazyInitAsync(() => flowFactory.create().asScala.map(_.asScala)(ExecutionContexts.parasitic))
.mapMaterializedValue(fut =>
fut.map(_.fold[Optional[M]](Optional.empty())(m => Optional.ofNullable(m)))(ExecutionContexts.parasitic).toJava)
fut.map(_.fold[Optional[M]](Optional.empty())(m => Optional.ofNullable(m)))(ExecutionContexts.parasitic).asJava)
new Flow(sflow)
}
@ -316,9 +316,9 @@ object Flow {
* [[NeverMaterializedException]] if upstream fails or downstream cancels before the completion stage has completed.
*/
def completionStageFlow[I, O, M](flow: CompletionStage[Flow[I, O, M]]): Flow[I, O, CompletionStage[M]] = {
import scala.compat.java8.FutureConverters._
import pekko.util.FutureConverters._
val sflow =
scaladsl.Flow.futureFlow(flow.toScala.map(_.asScala)(ExecutionContexts.parasitic)).mapMaterializedValue(_.toJava)
scaladsl.Flow.futureFlow(flow.asScala.map(_.asScala)(ExecutionContexts.parasitic)).mapMaterializedValue(_.asJava)
new javadsl.Flow(sflow)
}
@ -338,12 +338,12 @@ object Flow {
* '''Cancels when''' downstream cancels
*/
def lazyFlow[I, O, M](create: Creator[Flow[I, O, M]]): Flow[I, O, CompletionStage[M]] = {
import scala.compat.java8.FutureConverters._
import pekko.util.FutureConverters._
val sflow = scaladsl.Flow
.lazyFlow { () =>
create.create().asScala
}
.mapMaterializedValue(_.toJava)
.mapMaterializedValue(_.asJava)
new javadsl.Flow(sflow)
}
@ -371,8 +371,8 @@ object Flow {
def lazyCompletionStageFlow[I, O, M](
create: Creator[CompletionStage[Flow[I, O, M]]]): Flow[I, O, CompletionStage[M]] =
scaladsl.Flow
.lazyFutureFlow[I, O, M](() => create.create().toScala.map(_.asScala)(ExecutionContexts.parasitic))
.mapMaterializedValue(_.toJava)
.lazyFutureFlow[I, O, M](() => create.create().asScala.map(_.asScala)(ExecutionContexts.parasitic))
.mapMaterializedValue(_.asJava)
.asJava
/**
@ -757,7 +757,7 @@ final class Flow[In, Out, Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends Gr
new Flow(
delegate.statefulMap(() => create.create())(
(s: S, out: Out) => f.apply(s, out).toScala,
(s: S) => onComplete.apply(s).asScala))
(s: S) => onComplete.apply(s).toScala))
/**
* Transform each input element into an `Iterable` of output elements that is
@ -822,7 +822,7 @@ final class Flow[In, Out, Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends Gr
* @see [[#mapAsyncUnordered]]
*/
def mapAsync[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): javadsl.Flow[In, T, Mat] =
new Flow(delegate.mapAsync(parallelism)(x => f(x).toScala))
new Flow(delegate.mapAsync(parallelism)(x => f(x).asScala))
/**
* Transform this stream by applying the given function to each of the elements
@ -856,7 +856,7 @@ final class Flow[In, Out, Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends Gr
* @see [[#mapAsync]]
*/
def mapAsyncUnordered[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): javadsl.Flow[In, T, Mat] =
new Flow(delegate.mapAsyncUnordered(parallelism)(x => f(x).toScala))
new Flow(delegate.mapAsyncUnordered(parallelism)(x => f(x).asScala))
/**
* Use the `ask` pattern to send a request-reply message to the target `ref` actor.
@ -1169,7 +1169,7 @@ final class Flow[In, Out, Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends Gr
*/
def scanAsync[T](zero: T)(f: function.Function2[T, Out, CompletionStage[T]]): javadsl.Flow[In, T, Mat] =
new Flow(delegate.scanAsync(zero) { (out, in) =>
f(out, in).toScala
f(out, in).asScala
})
/**
@ -1219,7 +1219,7 @@ final class Flow[In, Out, Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends Gr
*/
def foldAsync[T](zero: T)(f: function.Function2[T, Out, CompletionStage[T]]): javadsl.Flow[In, T, Mat] =
new Flow(delegate.foldAsync(zero) { (out, in) =>
f(out, in).toScala
f(out, in).asScala
})
/**
@ -2193,7 +2193,7 @@ final class Flow[In, Out, Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends Gr
f: function.Function[java.lang.Iterable[Out], javadsl.Flow[Out, Out2, Mat2]],
matF: function.Function2[Mat, CompletionStage[Mat2], Mat3]): javadsl.Flow[In, Out2, Mat3] = {
val newDelegate = delegate.flatMapPrefixMat(n)(seq => f(seq.asJava).asScala) { (m1, fm2) =>
matF(m1, fm2.toJava)
matF(m1, fm2.asJava)
}
new javadsl.Flow(newDelegate)
}
@ -3879,7 +3879,7 @@ final class Flow[In, Out, Mat](delegate: scaladsl.Flow[In, Out, Mat]) extends Gr
* downstream.
*/
def watchTermination[M]()(matF: function.Function2[Mat, CompletionStage[Done], M]): javadsl.Flow[In, Out, M] =
new Flow(delegate.watchTermination()((left, right) => matF(left, right.toJava)))
new Flow(delegate.watchTermination()((left, right) => matF(left, right.asJava)))
/**
* Materializes to `FlowMonitor[Out]` that allows monitoring of the current flow. All events are propagated

View file

@ -16,7 +16,6 @@ package org.apache.pekko.stream.javadsl
import java.util.concurrent.CompletionStage
import scala.annotation.unchecked.uncheckedVariance
import scala.compat.java8.FutureConverters._
import org.apache.pekko
import pekko.annotation.ApiMayChange
@ -25,6 +24,7 @@ import pekko.japi.{ function, Pair, Util }
import pekko.stream._
import pekko.util.ConstantFun
import pekko.util.ccompat.JavaConverters._
import pekko.util.FutureConverters._
import pekko.util.JavaDurationConverters._
object FlowWithContext {
@ -176,7 +176,7 @@ final class FlowWithContext[In, CtxIn, Out, CtxOut, +Mat](
def mapAsync[Out2](
parallelism: Int,
f: function.Function[Out, CompletionStage[Out2]]): FlowWithContext[In, CtxIn, Out2, CtxOut, Mat] =
viaScala(_.mapAsync[Out2](parallelism)(o => f.apply(o).toScala))
viaScala(_.mapAsync[Out2](parallelism)(o => f.apply(o).asScala))
/**
* Context-preserving variant of [[pekko.stream.javadsl.Flow.mapConcat]].

View file

@ -16,14 +16,14 @@ package org.apache.pekko.stream.javadsl
import java.util.Optional
import java.util.concurrent.CompletionStage
import scala.compat.java8.FutureConverters._
import scala.compat.java8.OptionConverters._
import scala.concurrent.Future
import org.apache.pekko
import pekko.Done
import pekko.dispatch.ExecutionContexts
import pekko.stream.QueueOfferResult
import pekko.util.FutureConverters._
import pekko.util.OptionConverters._
/**
* This trait allows to have a queue as a data source for some stream.
@ -93,9 +93,9 @@ object SourceQueueWithComplete {
// would have been better to add `asScala` in SourceQueueWithComplete trait, but not doing
// that for backwards compatibility reasons
new pekko.stream.scaladsl.SourceQueueWithComplete[T] {
def offer(elem: T): Future[QueueOfferResult] = queue.offer(elem).toScala
def offer(elem: T): Future[QueueOfferResult] = queue.offer(elem).asScala
def watchCompletion(): Future[Done] = queue.watchCompletion().toScala
def watchCompletion(): Future[Done] = queue.watchCompletion().asScala
def complete(): Unit = queue.complete()
@ -141,7 +141,7 @@ object SinkQueueWithCancel {
new pekko.stream.scaladsl.SinkQueueWithCancel[T] {
override def pull(): Future[Option[T]] =
queue.pull().toScala.map(_.asScala)(ExecutionContexts.parasitic)
queue.pull().asScala.map(_.toScala)(ExecutionContexts.parasitic)
override def cancel(): Unit = queue.cancel()
}

View file

@ -15,13 +15,12 @@ package org.apache.pekko.stream.javadsl
import java.util.Optional
import scala.compat.java8.OptionConverters._
import org.apache.pekko
import pekko.annotation.ApiMayChange
import pekko.japi.Pair
import pekko.stream.scaladsl
import pekko.util.JavaDurationConverters._
import pekko.util.OptionConverters._
object RetryFlow {
@ -57,7 +56,7 @@ object RetryFlow {
scaladsl.RetryFlow
.withBackoff[In, Out, Mat](minBackoff.asScala, maxBackoff.asScala, randomFactor, maxRetries, flow.asScala) {
(in, out) =>
decideRetry.apply(in, out).asScala
decideRetry.apply(in, out).toScala
}
.asJava
@ -101,7 +100,7 @@ object RetryFlow {
randomFactor,
maxRetries,
flow.asScala) { (in, out) =>
decideRetry.apply(Pair(in._1, in._2), Pair(out._1, out._2)).asScala.map(_.toScala)
decideRetry.apply(Pair(in._1, in._2), Pair(out._1, out._2)).toScala.map(_.toScala)
}
.asJava[In, InCtx, Out, OutCtx, Mat]

View file

@ -21,8 +21,6 @@ import java.util.stream.Collector
import scala.annotation.unchecked.uncheckedVariance
import scala.collection.immutable
import scala.compat.java8.FutureConverters._
import scala.compat.java8.OptionConverters._
import scala.concurrent.ExecutionContext
import scala.util.Try
@ -42,6 +40,8 @@ import pekko.stream.impl.LinearTraversalBuilder
import pekko.stream.javadsl
import pekko.stream.scaladsl
import pekko.stream.scaladsl.SinkToCompletionStage
import pekko.util.FutureConverters._
import pekko.util.OptionConverters._
/** Java API */
object Sink {
@ -66,7 +66,7 @@ object Sink {
def foldAsync[U, In](
zero: U,
f: function.Function2[U, In, CompletionStage[U]]): javadsl.Sink[In, CompletionStage[U]] =
new Sink(scaladsl.Sink.foldAsync[U, In](zero)(f(_, _).toScala).toCompletionStage())
new Sink(scaladsl.Sink.foldAsync[U, In](zero)(f(_, _).asScala).toCompletionStage())
/**
* Creates a sink which materializes into a ``CompletionStage`` which will be completed with a result of the Java ``Collector``
@ -150,7 +150,7 @@ object Sink {
f: function.Function[T, CompletionStage[Void]]): Sink[T, CompletionStage[Done]] =
new Sink(
scaladsl.Sink
.foreachAsync(parallelism)((x: T) => f(x).toScala.map(_ => ())(ExecutionContexts.parasitic))
.foreachAsync(parallelism)((x: T) => f(x).asScala.map(_ => ())(ExecutionContexts.parasitic))
.toCompletionStage())
/**
@ -197,7 +197,7 @@ object Sink {
* See also [[head]].
*/
def headOption[In](): Sink[In, CompletionStage[Optional[In]]] =
new Sink(scaladsl.Sink.headOption[In].mapMaterializedValue(_.map(_.asJava)(ExecutionContexts.parasitic).toJava))
new Sink(scaladsl.Sink.headOption[In].mapMaterializedValue(_.map(_.toJava)(ExecutionContexts.parasitic).asJava))
/**
* A `Sink` that materializes into a `CompletionStage` of the last value received.
@ -217,7 +217,7 @@ object Sink {
* See also [[head]], [[takeLast]].
*/
def lastOption[In](): Sink[In, CompletionStage[Optional[In]]] =
new Sink(scaladsl.Sink.lastOption[In].mapMaterializedValue(_.map(_.asJava)(ExecutionContexts.parasitic).toJava))
new Sink(scaladsl.Sink.lastOption[In].mapMaterializedValue(_.map(_.toJava)(ExecutionContexts.parasitic).asJava))
/**
* A `Sink` that materializes into a a `CompletionStage` of `List<In>` containing the last `n` collected elements.
@ -231,7 +231,7 @@ object Sink {
new Sink(
scaladsl.Sink
.takeLast[In](n)
.mapMaterializedValue(fut => fut.map(sq => sq.asJava)(ExecutionContexts.parasitic).toJava))
.mapMaterializedValue(fut => fut.map(sq => sq.asJava)(ExecutionContexts.parasitic).asJava))
}
/**
@ -247,7 +247,7 @@ object Sink {
def seq[In]: Sink[In, CompletionStage[java.util.List[In]]] = {
import pekko.util.ccompat.JavaConverters._
new Sink(
scaladsl.Sink.seq[In].mapMaterializedValue(fut => fut.map(sq => sq.asJava)(ExecutionContexts.parasitic).toJava))
scaladsl.Sink.seq[In].mapMaterializedValue(fut => fut.map(sq => sq.asJava)(ExecutionContexts.parasitic).asJava))
}
/**
@ -355,7 +355,7 @@ object Sink {
* [[Attributes]] of the [[Sink]] returned by this method.
*/
def fromMaterializer[T, M](factory: BiFunction[Materializer, Attributes, Sink[T, M]]): Sink[T, CompletionStage[M]] =
scaladsl.Sink.fromMaterializer((mat, attr) => factory(mat, attr).asScala).mapMaterializedValue(_.toJava).asJava
scaladsl.Sink.fromMaterializer((mat, attr) => factory(mat, attr).asScala).mapMaterializedValue(_.asJava).asJava
/**
* Defers the creation of a [[Sink]] until materialization. The `factory` function
@ -364,7 +364,7 @@ object Sink {
*/
@deprecated("Use 'fromMaterializer' instead", "Akka 2.6.0")
def setup[T, M](factory: BiFunction[ActorMaterializer, Attributes, Sink[T, M]]): Sink[T, CompletionStage[M]] =
scaladsl.Sink.setup((mat, attr) => factory(mat, attr).asScala).mapMaterializedValue(_.toJava).asJava
scaladsl.Sink.setup((mat, attr) => factory(mat, attr).asScala).mapMaterializedValue(_.asJava).asJava
/**
* Combine several sinks with fan-out strategy like `Broadcast` or `Balance` and returns `Sink`.
@ -435,9 +435,9 @@ object Sink {
new Sink(
scaladsl.Sink
.lazyInit[T, M](
t => sinkFactory.apply(t).toScala.map(_.asScala)(ExecutionContexts.parasitic),
t => sinkFactory.apply(t).asScala.map(_.asScala)(ExecutionContexts.parasitic),
() => fallback.create())
.mapMaterializedValue(_.toJava))
.mapMaterializedValue(_.asJava))
/**
* Creates a real `Sink` upon receiving the first element. Internal `Sink` will not be created if there are no elements,
@ -452,9 +452,9 @@ object Sink {
def lazyInitAsync[T, M](
sinkFactory: function.Creator[CompletionStage[Sink[T, M]]]): Sink[T, CompletionStage[Optional[M]]] = {
val sSink = scaladsl.Sink
.lazyInitAsync[T, M](() => sinkFactory.create().toScala.map(_.asScala)(ExecutionContexts.parasitic))
.lazyInitAsync[T, M](() => sinkFactory.create().asScala.map(_.asScala)(ExecutionContexts.parasitic))
.mapMaterializedValue(fut =>
fut.map(_.fold(Optional.empty[M]())(m => Optional.ofNullable(m)))(ExecutionContexts.parasitic).toJava)
fut.map(_.fold(Optional.empty[M]())(m => Optional.ofNullable(m)))(ExecutionContexts.parasitic).asJava)
new Sink(sSink)
}
@ -491,8 +491,8 @@ object Sink {
*/
def lazyCompletionStageSink[T, M](create: Creator[CompletionStage[Sink[T, M]]]): Sink[T, CompletionStage[M]] =
new Sink(scaladsl.Sink.lazyFutureSink { () =>
create.create().toScala.map(_.asScala)(ExecutionContexts.parasitic)
}).mapMaterializedValue(_.toJava)
create.create().asScala.map(_.asScala)(ExecutionContexts.parasitic)
}).mapMaterializedValue(_.asJava)
}
/**

View file

@ -21,8 +21,6 @@ import java.util.function.{ BiFunction, Supplier }
import scala.annotation.{ nowarn, varargs }
import scala.annotation.unchecked.uncheckedVariance
import scala.collection.immutable
import scala.compat.java8.FutureConverters._
import scala.compat.java8.OptionConverters.RichOptionalGeneric
import scala.concurrent.{ Future, Promise }
import scala.concurrent.duration.FiniteDuration
import scala.reflect.ClassTag
@ -40,7 +38,9 @@ import pekko.japi.function.Creator
import pekko.stream._
import pekko.stream.impl.{ LinearTraversalBuilder, UnfoldAsyncJava }
import pekko.util.{ unused, _ }
import pekko.util.FutureConverters._
import pekko.util.JavaDurationConverters._
import pekko.util.OptionConverters._
import pekko.util.ccompat.JavaConverters._
/** Java API */
@ -73,7 +73,7 @@ object Source {
new Source(scaladsl.Source.maybe[T].mapMaterializedValue { (scalaOptionPromise: Promise[Option[T]]) =>
val javaOptionPromise = new CompletableFuture[Optional[T]]()
scalaOptionPromise.completeWith(
javaOptionPromise.toScala.map(_.asScala)(pekko.dispatch.ExecutionContexts.parasitic))
javaOptionPromise.asScala.map(_.toScala)(pekko.dispatch.ExecutionContexts.parasitic))
javaOptionPromise
})
@ -268,7 +268,7 @@ object Source {
* a pair of the next state `S` and output elements of type `E`.
*/
def unfold[S, E](s: S, f: function.Function[S, Optional[Pair[S, E]]]): Source[E, NotUsed] =
new Source(scaladsl.Source.unfold(s)((s: S) => f.apply(s).asScala.map(_.toScala)))
new Source(scaladsl.Source.unfold(s)((s: S) => f.apply(s).toScala.map(_.toScala)))
/**
* Same as [[unfold]], but uses an async function to generate the next state-element tuple.
@ -289,7 +289,7 @@ object Source {
*/
@deprecated("Use 'Source.lazySource' instead", "Akka 2.6.0")
def lazily[T, M](create: function.Creator[Source[T, M]]): Source[T, CompletionStage[M]] =
scaladsl.Source.lazily[T, M](() => create.create().asScala).mapMaterializedValue(_.toJava).asJava
scaladsl.Source.lazily[T, M](() => create.create().asScala).mapMaterializedValue(_.asJava).asJava
/**
* Creates a `Source` from supplied future factory that is not called until downstream demand. When source gets
@ -300,7 +300,7 @@ object Source {
*/
@deprecated("Use 'Source.lazyCompletionStage' instead", "Akka 2.6.0")
def lazilyAsync[T](create: function.Creator[CompletionStage[T]]): Source[T, Future[NotUsed]] =
scaladsl.Source.lazilyAsync[T](() => create.create().toScala).asJava
scaladsl.Source.lazilyAsync[T](() => create.create().asScala).asJava
/**
* Emits a single value when the given Scala `Future` is successfully completed and then completes the stream.
@ -323,7 +323,7 @@ object Source {
* If the `CompletionStage` is completed with a failure the stream is failed.
*/
def completionStage[T](completionStage: CompletionStage[T]): Source[T, NotUsed] =
future(completionStage.toScala)
future(completionStage.asScala)
/**
* Turn a `CompletionStage[Source]` into a source that will emit the values of the source when the future completes successfully.
@ -331,8 +331,8 @@ object Source {
*/
def completionStageSource[T, M](completionStageSource: CompletionStage[Source[T, M]]): Source[T, CompletionStage[M]] =
scaladsl.Source
.futureSource(completionStageSource.toScala.map(_.asScala)(ExecutionContexts.parasitic))
.mapMaterializedValue(_.toJava)
.futureSource(completionStageSource.asScala.map(_.asScala)(ExecutionContexts.parasitic))
.mapMaterializedValue(_.asJava)
.asJava
/**
@ -368,7 +368,7 @@ object Source {
def lazyCompletionStage[T](create: Creator[CompletionStage[T]]): Source[T, NotUsed] =
scaladsl.Source
.lazySource { () =>
val f = create.create().toScala
val f = create.create().asScala
scaladsl.Source.future(f)
}
.mapMaterializedValue(_ => NotUsed.notUsed())
@ -391,7 +391,7 @@ object Source {
* is failed with a [[pekko.stream.NeverMaterializedException]]
*/
def lazySource[T, M](create: Creator[Source[T, M]]): Source[T, CompletionStage[M]] =
scaladsl.Source.lazySource(() => create.create().asScala).mapMaterializedValue(_.toJava).asJava
scaladsl.Source.lazySource(() => create.create().asScala).mapMaterializedValue(_.asJava).asJava
/**
* Defers invoking the `create` function to create a future source until there is downstream demand.
@ -648,7 +648,7 @@ object Source {
*/
def fromMaterializer[T, M](
factory: BiFunction[Materializer, Attributes, Source[T, M]]): Source[T, CompletionStage[M]] =
scaladsl.Source.fromMaterializer((mat, attr) => factory(mat, attr).asScala).mapMaterializedValue(_.toJava).asJava
scaladsl.Source.fromMaterializer((mat, attr) => factory(mat, attr).asScala).mapMaterializedValue(_.asJava).asJava
/**
* Defers the creation of a [[Source]] until materialization. The `factory` function
@ -657,7 +657,7 @@ object Source {
*/
@deprecated("Use 'fromMaterializer' instead", "Akka 2.6.0")
def setup[T, M](factory: BiFunction[ActorMaterializer, Attributes, Source[T, M]]): Source[T, CompletionStage[M]] =
scaladsl.Source.setup((mat, attr) => factory(mat, attr).asScala).mapMaterializedValue(_.toJava).asJava
scaladsl.Source.setup((mat, attr) => factory(mat, attr).asScala).mapMaterializedValue(_.asJava).asJava
/**
* Combines several sources with fan-in strategy like [[Merge]] or [[Concat]] into a single [[Source]].
@ -840,7 +840,7 @@ object Source {
create: function.Creator[S],
read: function.Function[S, Optional[T]],
close: function.Procedure[S]): javadsl.Source[T, NotUsed] =
new Source(scaladsl.Source.unfoldResource[T, S](create.create _, (s: S) => read.apply(s).asScala, close.apply))
new Source(scaladsl.Source.unfoldResource[T, S](create.create _, (s: S) => read.apply(s).toScala, close.apply))
/**
* Start a new `Source` from some resource which can be opened, read and closed.
@ -868,9 +868,9 @@ object Source {
close: function.Function[S, CompletionStage[Done]]): javadsl.Source[T, NotUsed] =
new Source(
scaladsl.Source.unfoldResourceAsync[T, S](
() => create.create().toScala,
(s: S) => read.apply(s).toScala.map(_.asScala)(pekko.dispatch.ExecutionContexts.parasitic),
(s: S) => close.apply(s).toScala))
() => create.create().asScala,
(s: S) => read.apply(s).asScala.map(_.toScala)(pekko.dispatch.ExecutionContexts.parasitic),
(s: S) => close.apply(s).asScala))
/**
* Upcast a stream of elements to a stream of supertypes of that element. Useful in combination with
@ -1061,7 +1061,7 @@ final class Source[Out, Mat](delegate: scaladsl.Source[Out, Mat]) extends Graph[
* [[pekko.stream.SystemMaterializer]] for running the stream.
*/
def run(materializer: Materializer): CompletionStage[Done] =
delegate.run()(materializer).toJava
delegate.run()(materializer).asJava
/**
* Connect this `Source` to the `Sink.ignore` and run it. Elements from the stream will be consumed and discarded.
@ -1070,7 +1070,7 @@ final class Source[Out, Mat](delegate: scaladsl.Source[Out, Mat]) extends Graph[
* [[pekko.stream.SystemMaterializer]] for running the stream.
*/
def run(systemProvider: ClassicActorSystemProvider): CompletionStage[Done] =
delegate.run()(SystemMaterializer(systemProvider.classicSystem).materializer).toJava
delegate.run()(SystemMaterializer(systemProvider.classicSystem).materializer).asJava
/**
* Connect this `Source` to a `Sink` and run it. The returned value is the materialized value
@ -2432,7 +2432,7 @@ final class Source[Out, Mat](delegate: scaladsl.Source[Out, Mat]) extends Graph[
new Source(
delegate.statefulMap(() => create.create())(
(s: S, out: Out) => f.apply(s, out).toScala,
(s: S) => onComplete.apply(s).asScala))
(s: S) => onComplete.apply(s).toScala))
/**
* Transform each input element into an `Iterable` of output elements that is
@ -2498,7 +2498,7 @@ final class Source[Out, Mat](delegate: scaladsl.Source[Out, Mat]) extends Graph[
* @see [[#mapAsyncUnordered]]
*/
def mapAsync[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): javadsl.Source[T, Mat] =
new Source(delegate.mapAsync(parallelism)(x => f(x).toScala))
new Source(delegate.mapAsync(parallelism)(x => f(x).asScala))
/**
* Transform this stream by applying the given function to each of the elements
@ -2532,7 +2532,7 @@ final class Source[Out, Mat](delegate: scaladsl.Source[Out, Mat]) extends Graph[
* @see [[#mapAsync]]
*/
def mapAsyncUnordered[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): javadsl.Source[T, Mat] =
new Source(delegate.mapAsyncUnordered(parallelism)(x => f(x).toScala))
new Source(delegate.mapAsyncUnordered(parallelism)(x => f(x).asScala))
/**
* Use the `ask` pattern to send a request-reply message to the target `ref` actor.
@ -2841,7 +2841,7 @@ final class Source[Out, Mat](delegate: scaladsl.Source[Out, Mat]) extends Graph[
*/
def scanAsync[T](zero: T)(f: function.Function2[T, Out, CompletionStage[T]]): javadsl.Source[T, Mat] =
new Source(delegate.scanAsync(zero) { (out, in) =>
f(out, in).toScala
f(out, in).asScala
})
/**
@ -2891,7 +2891,7 @@ final class Source[Out, Mat](delegate: scaladsl.Source[Out, Mat]) extends Graph[
*/
def foldAsync[T](zero: T)(f: function.Function2[T, Out, CompletionStage[T]]): javadsl.Source[T, Mat] =
new Source(delegate.foldAsync(zero) { (out, in) =>
f(out, in).toScala
f(out, in).asScala
})
/**
@ -3658,7 +3658,7 @@ final class Source[Out, Mat](delegate: scaladsl.Source[Out, Mat]) extends Graph[
f: function.Function[java.lang.Iterable[Out], javadsl.Flow[Out, Out2, Mat2]],
matF: function.Function2[Mat, CompletionStage[Mat2], Mat3]): javadsl.Source[Out2, Mat3] = {
val newDelegate = delegate.flatMapPrefixMat(n)(seq => f(seq.asJava).asScala) { (m1, fm2) =>
matF(m1, fm2.toJava)
matF(m1, fm2.asJava)
}
new javadsl.Source(newDelegate)
}
@ -4432,7 +4432,7 @@ final class Source[Out, Mat](delegate: scaladsl.Source[Out, Mat]) extends Graph[
* downstream.
*/
def watchTermination[M]()(matF: function.Function2[Mat, CompletionStage[Done], M]): javadsl.Source[Out, M] =
new Source(delegate.watchTermination()((left, right) => matF(left, right.toJava)))
new Source(delegate.watchTermination()((left, right) => matF(left, right.asJava)))
/**
* Materializes to `FlowMonitor<Out>` that allows monitoring of the current flow. All events are propagated

View file

@ -16,7 +16,6 @@ package org.apache.pekko.stream.javadsl
import java.util.concurrent.CompletionStage
import scala.annotation.unchecked.uncheckedVariance
import scala.compat.java8.FutureConverters._
import org.apache.pekko
import pekko.actor.ClassicActorSystemProvider
@ -28,6 +27,7 @@ import pekko.japi.function
import pekko.stream._
import pekko.util.ConstantFun
import pekko.util.ccompat.JavaConverters._
import pekko.util.FutureConverters._
import pekko.util.JavaDurationConverters._
object SourceWithContext {
@ -172,7 +172,7 @@ final class SourceWithContext[+Out, +Ctx, +Mat](delegate: scaladsl.SourceWithCon
def mapAsync[Out2](
parallelism: Int,
f: function.Function[Out, CompletionStage[Out2]]): SourceWithContext[Out2, Ctx, Mat] =
viaScala(_.mapAsync[Out2](parallelism)(o => f.apply(o).toScala))
viaScala(_.mapAsync[Out2](parallelism)(o => f.apply(o).asScala))
/**
* Context-preserving variant of [[pekko.stream.javadsl.Source.mapConcat]].

View file

@ -20,8 +20,6 @@ import java.util.function.Supplier
import scala.annotation.{ nowarn, varargs }
import scala.annotation.unchecked.uncheckedVariance
import scala.collection.immutable
import scala.compat.java8.FutureConverters._
import scala.compat.java8.OptionConverters.RichOptionalGeneric
import scala.concurrent.duration.FiniteDuration
import scala.reflect.ClassTag
@ -32,7 +30,9 @@ import pekko.event.{ LogMarker, LoggingAdapter, MarkerLoggingAdapter }
import pekko.japi.{ function, Pair, Util }
import pekko.stream._
import pekko.util.ConstantFun
import pekko.util.FutureConverters._
import pekko.util.JavaDurationConverters._
import pekko.util.OptionConverters._
import pekko.util.ccompat.JavaConverters._
object SubFlow {
@ -242,7 +242,7 @@ class SubFlow[In, Out, Mat](
new SubFlow(
delegate.statefulMap(() => create.create())(
(s: S, out: Out) => f.apply(s, out).toScala,
(s: S) => onComplete.apply(s).asScala))
(s: S) => onComplete.apply(s).toScala))
/**
* Transform each input element into an `Iterable` of output elements that is
@ -308,7 +308,7 @@ class SubFlow[In, Out, Mat](
* @see [[#mapAsyncUnordered]]
*/
def mapAsync[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): SubFlow[In, T, Mat] =
new SubFlow(delegate.mapAsync(parallelism)(x => f(x).toScala))
new SubFlow(delegate.mapAsync(parallelism)(x => f(x).asScala))
/**
* Transform this stream by applying the given function to each of the elements
@ -342,7 +342,7 @@ class SubFlow[In, Out, Mat](
* @see [[#mapAsync]]
*/
def mapAsyncUnordered[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): SubFlow[In, T, Mat] =
new SubFlow(delegate.mapAsyncUnordered(parallelism)(x => f(x).toScala))
new SubFlow(delegate.mapAsyncUnordered(parallelism)(x => f(x).asScala))
/**
* Only pass on those elements that satisfy the given predicate.
@ -575,7 +575,7 @@ class SubFlow[In, Out, Mat](
*/
def scanAsync[T](zero: T)(f: function.Function2[T, Out, CompletionStage[T]]): SubFlow[In, T, Mat] =
new SubFlow(delegate.scanAsync(zero) { (out, in) =>
f(out, in).toScala
f(out, in).asScala
})
/**
@ -625,7 +625,7 @@ class SubFlow[In, Out, Mat](
*/
def foldAsync[T](zero: T)(f: function.Function2[T, Out, CompletionStage[T]]): SubFlow[In, T, Mat] =
new SubFlow(delegate.foldAsync(zero) { (out, in) =>
f(out, in).toScala
f(out, in).asScala
})
/**

View file

@ -20,8 +20,6 @@ import java.util.function.Supplier
import scala.annotation.{ nowarn, varargs }
import scala.annotation.unchecked.uncheckedVariance
import scala.collection.immutable
import scala.compat.java8.FutureConverters._
import scala.compat.java8.OptionConverters.RichOptionalGeneric
import scala.concurrent.duration.FiniteDuration
import scala.reflect.ClassTag
@ -32,7 +30,9 @@ import pekko.event.{ LogMarker, LoggingAdapter, MarkerLoggingAdapter }
import pekko.japi.{ function, Pair, Util }
import pekko.stream._
import pekko.util.ConstantFun
import pekko.util.FutureConverters._
import pekko.util.JavaDurationConverters._
import pekko.util.OptionConverters._
import pekko.util.ccompat.JavaConverters._
/**
@ -233,7 +233,7 @@ class SubSource[Out, Mat](
new SubSource(
delegate.statefulMap(() => create.create())(
(s: S, out: Out) => f.apply(s, out).toScala,
(s: S) => onComplete.apply(s).asScala))
(s: S) => onComplete.apply(s).toScala))
/**
* Transform each input element into an `Iterable` of output elements that is
@ -299,7 +299,7 @@ class SubSource[Out, Mat](
* @see [[#mapAsyncUnordered]]
*/
def mapAsync[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): SubSource[T, Mat] =
new SubSource(delegate.mapAsync(parallelism)(x => f(x).toScala))
new SubSource(delegate.mapAsync(parallelism)(x => f(x).asScala))
/**
* Transform this stream by applying the given function to each of the elements
@ -333,7 +333,7 @@ class SubSource[Out, Mat](
* @see [[#mapAsync]]
*/
def mapAsyncUnordered[T](parallelism: Int, f: function.Function[Out, CompletionStage[T]]): SubSource[T, Mat] =
new SubSource(delegate.mapAsyncUnordered(parallelism)(x => f(x).toScala))
new SubSource(delegate.mapAsyncUnordered(parallelism)(x => f(x).asScala))
/**
* Only pass on those elements that satisfy the given predicate.
@ -566,7 +566,7 @@ class SubSource[Out, Mat](
*/
def scanAsync[T](zero: T)(f: function.Function2[T, Out, CompletionStage[T]]): SubSource[T, Mat] =
new SubSource(delegate.scanAsync(zero) { (out, in) =>
f(out, in).toScala
f(out, in).asScala
})
/**
@ -614,7 +614,7 @@ class SubSource[Out, Mat](
*/
def foldAsync[T](zero: T)(f: function.Function2[T, Out, CompletionStage[T]]): SubSource[T, Mat] =
new SubSource(delegate.foldAsync(zero) { (out, in) =>
f(out, in).toScala
f(out, in).asScala
})
/**

View file

@ -17,7 +17,6 @@ import java.util.Optional
import java.util.function.{ Consumer, Supplier }
import javax.net.ssl.{ SSLContext, SSLEngine, SSLSession }
import scala.compat.java8.OptionConverters
import scala.util.Try
import com.typesafe.sslconfig.pekko.PekkoSSLConfig
@ -27,6 +26,7 @@ import pekko.{ japi, NotUsed }
import pekko.stream._
import pekko.stream.TLSProtocol._
import pekko.util.ByteString
import pekko.util.OptionConverters._
/**
* Stream cipher support based upon JSSE.
@ -83,7 +83,7 @@ object TLS {
sslConfig: Optional[PekkoSSLConfig],
firstSession: NegotiateNewSession,
role: TLSRole): BidiFlow[SslTlsOutbound, ByteString, ByteString, SslTlsInbound, NotUsed] =
new javadsl.BidiFlow(scaladsl.TLS.apply(sslContext, OptionConverters.toScala(sslConfig), firstSession, role))
new javadsl.BidiFlow(scaladsl.TLS.apply(sslContext, sslConfig.toScala, firstSession, role))
/**
* Create a StreamTls [[pekko.stream.javadsl.BidiFlow]] in client mode. The
@ -132,11 +132,11 @@ object TLS {
new javadsl.BidiFlow(
scaladsl.TLS.apply(
sslContext,
OptionConverters.toScala(sslConfig),
sslConfig.toScala,
firstSession,
role,
closing,
OptionConverters.toScala(hostInfo).map(e => (e.first, e.second))))
hostInfo.toScala.map(e => (e.first, e.second))))
/**
* Create a StreamTls [[pekko.stream.javadsl.BidiFlow]] in client mode. The
@ -169,7 +169,7 @@ object TLS {
firstSession,
role,
closing,
OptionConverters.toScala(hostInfo).map(e => (e.first, e.second))))
hostInfo.toScala.map(e => (e.first, e.second))))
/**
* Create a StreamTls [[pekko.stream.javadsl.BidiFlow]]. This is a low-level interface.

View file

@ -23,8 +23,6 @@ import javax.net.ssl.SSLContext
import javax.net.ssl.SSLEngine
import javax.net.ssl.SSLSession
import scala.compat.java8.FutureConverters._
import scala.compat.java8.OptionConverters._
import scala.concurrent.duration._
import scala.util.Failure
import scala.util.Success
@ -47,7 +45,9 @@ import pekko.stream.TLSClosing
import pekko.stream.TLSProtocol.NegotiateNewSession
import pekko.stream.scaladsl
import pekko.util.ByteString
import pekko.util.FutureConverters._
import pekko.util.JavaDurationConverters._
import pekko.util.OptionConverters._
object Tcp extends ExtensionId[Tcp] with ExtensionIdProvider {
@ -69,12 +69,12 @@ object Tcp extends ExtensionId[Tcp] with ExtensionIdProvider {
*
* The produced [[java.util.concurrent.CompletionStage]] is fulfilled when the unbinding has been completed.
*/
def unbind(): CompletionStage[Unit] = delegate.unbind().toJava
def unbind(): CompletionStage[Unit] = delegate.unbind().asJava
/**
* @return A completion operator that is completed when manually unbound, or failed if the server fails
*/
def whenUnbound(): CompletionStage[Done] = delegate.whenUnbound.toJava
def whenUnbound(): CompletionStage[Done] = delegate.whenUnbound.asJava
}
/**
@ -185,7 +185,7 @@ class Tcp(system: ExtendedActorSystem) extends pekko.actor.Extension {
delegate
.bind(interface, port, backlog, immutableSeq(options), halfClose, optionalDurationToScala(idleTimeout))
.map(new IncomingConnection(_))
.mapMaterializedValue(_.map(new ServerBinding(_))(parasitic).toJava))
.mapMaterializedValue(_.map(new ServerBinding(_))(parasitic).asJava))
/**
* Creates a [[Tcp.ServerBinding]] instance which represents a prospective TCP server binding on the given `endpoint`.
@ -231,7 +231,7 @@ class Tcp(system: ExtendedActorSystem) extends pekko.actor.Extension {
delegate
.bind(interface, port)
.map(new IncomingConnection(_))
.mapMaterializedValue(_.map(new ServerBinding(_))(parasitic).toJava))
.mapMaterializedValue(_.map(new ServerBinding(_))(parasitic).asJava))
/**
* Creates an [[Tcp.OutgoingConnection]] instance representing a prospective TCP client connection to the given endpoint.
@ -264,12 +264,12 @@ class Tcp(system: ExtendedActorSystem) extends pekko.actor.Extension {
delegate
.outgoingConnection(
remoteAddress,
localAddress.asScala,
localAddress.toScala,
immutableSeq(options),
halfClose,
optionalDurationToScala(connectTimeout),
optionalDurationToScala(idleTimeout))
.mapMaterializedValue(_.map(new OutgoingConnection(_))(parasitic).toJava))
.mapMaterializedValue(_.map(new OutgoingConnection(_))(parasitic).asJava))
/**
* Creates an [[Tcp.OutgoingConnection]] instance representing a prospective TCP client connection to the given endpoint.
@ -319,7 +319,7 @@ class Tcp(system: ExtendedActorSystem) extends pekko.actor.Extension {
Flow.fromGraph(
delegate
.outgoingConnection(new InetSocketAddress(host, port))
.mapMaterializedValue(_.map(new OutgoingConnection(_))(parasitic).toJava))
.mapMaterializedValue(_.map(new OutgoingConnection(_))(parasitic).asJava))
/**
* Creates an [[Tcp.OutgoingConnection]] with TLS.
@ -340,7 +340,7 @@ class Tcp(system: ExtendedActorSystem) extends pekko.actor.Extension {
Flow.fromGraph(
delegate
.outgoingTlsConnection(host, port, sslContext, negotiateNewSession)
.mapMaterializedValue(_.map(new OutgoingConnection(_))(parasitic).toJava))
.mapMaterializedValue(_.map(new OutgoingConnection(_))(parasitic).asJava))
/**
* Creates an [[Tcp.OutgoingConnection]] with TLS.
@ -369,11 +369,11 @@ class Tcp(system: ExtendedActorSystem) extends pekko.actor.Extension {
remoteAddress,
sslContext,
negotiateNewSession,
localAddress.asScala,
localAddress.toScala,
immutableSeq(options),
connectTimeout,
idleTimeout)
.mapMaterializedValue(_.map(new OutgoingConnection(_))(parasitic).toJava))
.mapMaterializedValue(_.map(new OutgoingConnection(_))(parasitic).asJava))
/**
* Creates an [[Tcp.OutgoingConnection]] with TLS.
@ -391,7 +391,7 @@ class Tcp(system: ExtendedActorSystem) extends pekko.actor.Extension {
Flow.fromGraph(
delegate
.outgoingConnectionWithTls(remoteAddress, createSSLEngine = () => createSSLEngine.get())
.mapMaterializedValue(_.map(new OutgoingConnection(_))(parasitic).toJava))
.mapMaterializedValue(_.map(new OutgoingConnection(_))(parasitic).asJava))
/**
* Creates an [[Tcp.OutgoingConnection]] with TLS.
@ -417,17 +417,17 @@ class Tcp(system: ExtendedActorSystem) extends pekko.actor.Extension {
.outgoingConnectionWithTls(
remoteAddress,
createSSLEngine = () => createSSLEngine.get(),
localAddress.asScala,
localAddress.toScala,
immutableSeq(options),
optionalDurationToScala(connectTimeout),
optionalDurationToScala(idleTimeout),
session =>
verifySession.apply(session).asScala match {
verifySession.apply(session).toScala match {
case None => Success(())
case Some(t) => Failure(t)
},
closing)
.mapMaterializedValue(_.map(new OutgoingConnection(_))(parasitic).toJava))
.mapMaterializedValue(_.map(new OutgoingConnection(_))(parasitic).asJava))
}
/**
@ -457,7 +457,7 @@ class Tcp(system: ExtendedActorSystem) extends pekko.actor.Extension {
delegate
.bindTls(interface, port, sslContext, negotiateNewSession, backlog, immutableSeq(options), idleTimeout)
.map(new IncomingConnection(_))
.mapMaterializedValue(_.map(new ServerBinding(_))(parasitic).toJava))
.mapMaterializedValue(_.map(new ServerBinding(_))(parasitic).asJava))
/**
* Creates a [[Tcp.ServerBinding]] instance which represents a prospective TCP server binding on the given `endpoint`
@ -478,7 +478,7 @@ class Tcp(system: ExtendedActorSystem) extends pekko.actor.Extension {
delegate
.bindTls(interface, port, sslContext, negotiateNewSession)
.map(new IncomingConnection(_))
.mapMaterializedValue(_.map(new ServerBinding(_))(parasitic).toJava))
.mapMaterializedValue(_.map(new ServerBinding(_))(parasitic).asJava))
/**
* Creates a [[Tcp.ServerBinding]] instance which represents a prospective TCP server binding on the given `endpoint`
@ -494,7 +494,7 @@ class Tcp(system: ExtendedActorSystem) extends pekko.actor.Extension {
delegate
.bindWithTls(interface, port, createSSLEngine = () => createSSLEngine.get())
.map(new IncomingConnection(_))
.mapMaterializedValue(_.map(new ServerBinding(_))(parasitic).toJava))
.mapMaterializedValue(_.map(new ServerBinding(_))(parasitic).asJava))
}
/**
@ -522,13 +522,13 @@ class Tcp(system: ExtendedActorSystem) extends pekko.actor.Extension {
immutableSeq(options),
optionalDurationToScala(idleTimeout),
session =>
verifySession.apply(session).asScala match {
verifySession.apply(session).toScala match {
case None => Success(())
case Some(t) => Failure(t)
},
closing)
.map(new IncomingConnection(_))
.mapMaterializedValue(_.map(new ServerBinding(_))(parasitic).toJava))
.mapMaterializedValue(_.map(new ServerBinding(_))(parasitic).asJava))
}
private def optionalDurationToScala(duration: Optional[java.time.Duration]) = {

View file

@ -16,8 +16,6 @@ package org.apache.pekko.stream.scaladsl
import java.util.Optional
import java.util.concurrent.CompletionStage
import scala.compat.java8.FutureConverters._
import scala.compat.java8.OptionConverters._
import scala.concurrent.Future
import org.apache.pekko
@ -25,6 +23,8 @@ import pekko.Done
import pekko.annotation.InternalApi
import pekko.dispatch.ExecutionContexts
import pekko.stream.QueueOfferResult
import pekko.util.FutureConverters._
import pekko.util.OptionConverters._
/**
* This trait allows to have a queue as a data source for some stream.
@ -107,9 +107,9 @@ object SourceQueueWithComplete {
queue: SourceQueueWithComplete[T]): pekko.stream.javadsl.SourceQueueWithComplete[T] =
new pekko.stream.javadsl.SourceQueueWithComplete[T] {
def offer(elem: T): CompletionStage[QueueOfferResult] =
queue.offer(elem).toJava
queue.offer(elem).asJava
def watchCompletion(): CompletionStage[Done] =
queue.watchCompletion().toJava
queue.watchCompletion().asJava
def complete(): Unit = queue.complete()
def fail(ex: Throwable): Unit = queue.fail(ex)
}
@ -157,7 +157,7 @@ object SinkQueueWithCancel {
queue: SinkQueueWithCancel[T]): pekko.stream.javadsl.SinkQueueWithCancel[T] =
new pekko.stream.javadsl.SinkQueueWithCancel[T] {
override def pull(): CompletionStage[Optional[T]] =
queue.pull().map(_.asJava)(ExecutionContexts.parasitic).toJava
queue.pull().map(_.toJava)(ExecutionContexts.parasitic).asJava
override def cancel(): Unit = queue.cancel()
}
}

View file

@ -18,7 +18,6 @@ import java.util.concurrent.CompletionStage
import scala.annotation.tailrec
import scala.annotation.unchecked.uncheckedVariance
import scala.collection.immutable
import scala.compat.java8.FutureConverters._
import scala.concurrent.{ Future, Promise }
import scala.concurrent.duration.FiniteDuration
@ -35,6 +34,7 @@ import pekko.stream.impl.fusing.GraphStages
import pekko.stream.impl.fusing.GraphStages._
import pekko.stream.stage.GraphStageWithMaterializedValue
import pekko.util.ConstantFun
import pekko.util.FutureConverters._
/**
* A `Source` is a set of stream processing steps that has one open output. It can comprise
@ -376,7 +376,7 @@ object Source {
*/
@deprecated("Use 'Source.completionStage' instead", "Akka 2.6.0")
def fromCompletionStage[T](future: CompletionStage[T]): Source[T, NotUsed] =
fromGraph(new FutureSource(future.toScala))
fromGraph(new FutureSource(future.asScala))
/**
* Streams the elements of the given future source once it successfully completes.
@ -396,7 +396,7 @@ object Source {
@deprecated("Use scala-compat CompletionStage to future converter and 'Source.futureSource' instead", "Akka 2.6.0")
def fromSourceCompletionStage[T, M](
completion: CompletionStage[_ <: Graph[SourceShape[T], M]]): Source[T, CompletionStage[M]] =
fromFutureSource(completion.toScala).mapMaterializedValue(_.toJava)
fromFutureSource(completion.asScala).mapMaterializedValue(_.asJava)
/**
* Elements are emitted periodically with the specified interval.
@ -526,7 +526,7 @@ object Source {
* Here for Java interoperability, the normal use from Scala should be [[Source.future]]
*/
def completionStage[T](completionStage: CompletionStage[T]): Source[T, NotUsed] =
future(completionStage.toScala)
future(completionStage.asScala)
/**
* Turn a `Future[Source]` into a source that will emit the values of the source when the future completes successfully.

View file

@ -13,9 +13,10 @@
package org.apache.pekko.stream
import org.apache.pekko.util.FutureConverters
import java.util.concurrent.CompletionStage
import scala.compat.java8.FutureConverters
import scala.concurrent.Future
/**
@ -66,10 +67,10 @@ import scala.concurrent.Future
package object scaladsl {
implicit class SourceToCompletionStage[Out, T](val src: Source[Out, Future[T]]) extends AnyVal {
def toCompletionStage(): Source[Out, CompletionStage[T]] =
src.mapMaterializedValue(FutureConverters.toJava)
src.mapMaterializedValue(FutureConverters.asJava)
}
implicit class SinkToCompletionStage[In, T](val sink: Sink[In, Future[T]]) extends AnyVal {
def toCompletionStage(): Sink[In, CompletionStage[T]] =
sink.mapMaterializedValue(FutureConverters.toJava)
sink.mapMaterializedValue(FutureConverters.asJava)
}
}