replace akka based names in logging code (#197)

This commit is contained in:
PJ Fanning 2023-02-16 10:46:33 +01:00 committed by GitHub
parent 3ee028f915
commit 2824a44abe
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
18 changed files with 112 additions and 112 deletions

View file

@ -74,7 +74,7 @@ import pekko.testkit.TestKit
if (event.message == null) "" else event.message
private def sourceOrEmpty(event: LoggingEvent): String =
event.mdc.getOrElse("akkaSource", "")
event.mdc.getOrElse("pekkoSource", "")
def apply(event: LoggingEvent): Boolean = {
if (matches(event)) {

View file

@ -59,8 +59,8 @@ import pekko.annotation.DoNotInherit
def withLoggerName(newLoggerName: String): LoggingTestKit
/**
* Matching events that have "akkaSource" MDC value equal to the given value.
* "akkaSource" is typically the actor path.
* Matching events that have "pekkoSource" MDC value equal to the given value.
* "pekkoSource" is typically the actor path.
*/
def withSource(newSource: String): LoggingTestKit

View file

@ -59,8 +59,8 @@ import pekko.annotation.DoNotInherit
def withLoggerName(newLoggerName: String): LoggingTestKit
/**
* Matching events that have "akkaSource" MDC value equal to the given value.
* "akkaSource" is typically the actor path.
* Matching events that have "pekkoSource" MDC value equal to the given value.
* "pekkoSource" is typically the actor path.
*/
def withSource(newSource: String): LoggingTestKit

View file

@ -64,7 +64,7 @@ class LoggingTestKitSpec extends ScalaTestWithActorTestKit with AnyWordSpecLike
timeStamp = System.currentTimeMillis(),
marker = None,
throwable = None,
mdc = Map("akkaSource" -> source))
mdc = Map("pekkoSource" -> source))
"The LoggingEventFilter.error" must {
"filter errors without cause" in {

View file

@ -278,11 +278,11 @@ class ActorLoggingSpec extends ScalaTestWithActorTestKit("""
}
}
val actor =
LoggingTestKit.info("Starting up").withMdc(Map(ActorMdc.AkkaTagsKey -> "tag1,tag2")).expect {
LoggingTestKit.info("Starting up").withMdc(Map(ActorMdc.PekkoTagsKey -> "tag1,tag2")).expect {
spawn(behavior, ActorTags("tag1", "tag2"))
}
LoggingTestKit.info("Got message").withMdc(Map(ActorMdc.AkkaTagsKey -> "tag1,tag2")).expect {
LoggingTestKit.info("Got message").withMdc(Map(ActorMdc.PekkoTagsKey -> "tag1,tag2")).expect {
actor ! "ping"
}
}
@ -357,10 +357,10 @@ class ActorLoggingSpec extends ScalaTestWithActorTestKit("""
// mdc on defer is empty
val ref = LoggingTestKit
.info("Starting")
// not counting for example "akkaSource", but it shouldn't have any other entries
// not counting for example "pekkoSource", but it shouldn't have any other entries
.withCustom(logEvent =>
logEvent.mdc.keysIterator.forall(entry =>
entry.startsWith("akka") || entry == "sourceActorSystem" || entry == "static") &&
entry.startsWith("pekko") || entry == "sourceActorSystem" || entry == "static") &&
logEvent.mdc("static") == "1")
.expect {
spawn(behaviors)
@ -499,8 +499,8 @@ class ActorLoggingSpec extends ScalaTestWithActorTestKit("""
try {
event.mdc should contain allElementsOf (
Map(
ActorMdc.AkkaAddressKey -> system.classicSystem.asInstanceOf[ExtendedActorSystem].provider.addressString,
ActorMdc.AkkaSourceKey -> actorPath.get.toString,
ActorMdc.PekkoAddressKey -> system.classicSystem.asInstanceOf[ExtendedActorSystem].provider.addressString,
ActorMdc.PekkoSourceKey -> actorPath.get.toString,
ActorMdc.SourceActorSystemKey -> system.name))
true
} catch {

View file

@ -55,9 +55,9 @@ import scala.util.Success
// on each log entry or message, so do that up front here
tags.mkString(",")
val akkaSource = ctx.self.path.toString
val pekkoSource = ctx.self.path.toString
val akkaAddress =
val pekkoAddress =
ctx.system match {
case adapter: ActorSystemAdapter[_] => adapter.provider.addressString
case _ => Address("akka", ctx.system.name).toString
@ -65,16 +65,16 @@ import scala.util.Success
val sourceActorSystem = ctx.system.name
new LoggingContext(logger, tagsString, akkaSource, sourceActorSystem, akkaAddress, hasCustomName = false)
new LoggingContext(logger, tagsString, pekkoSource, sourceActorSystem, pekkoAddress, hasCustomName = false)
}
}
final case class LoggingContext(
logger: Logger,
tagsString: String,
akkaSource: String,
pekkoSource: String,
sourceActorSystem: String,
akkaAddress: String,
pekkoAddress: String,
hasCustomName: Boolean) {
// toggled once per message if logging is used to avoid having to
// touch the mdc thread local for cleanup in the end

View file

@ -22,19 +22,19 @@ import org.apache.pekko.annotation.InternalApi
*/
@InternalApi private[pekko] object ActorMdc {
val SourceActorSystemKey = "sourceActorSystem"
val AkkaSourceKey = "akkaSource"
val AkkaTagsKey = "akkaTags"
val AkkaAddressKey = "akkaAddress"
val PekkoSourceKey = "pekkoSource"
val PekkoTagsKey = "pekkoTags"
val PekkoAddressKey = "pekkoAddress"
def setMdc(context: ActorContextImpl.LoggingContext): Unit = {
// avoid access to MDC ThreadLocal if not needed, see details in LoggingContext
context.mdcUsed = true
MDC.put(AkkaSourceKey, context.akkaSource)
MDC.put(PekkoSourceKey, context.pekkoSource)
MDC.put(SourceActorSystemKey, context.sourceActorSystem)
MDC.put(AkkaAddressKey, context.akkaAddress)
MDC.put(PekkoAddressKey, context.pekkoAddress)
// empty string for no tags, a single tag or a comma separated list of tags
if (context.tagsString.nonEmpty)
MDC.put(AkkaTagsKey, context.tagsString)
MDC.put(PekkoTagsKey, context.tagsString)
}
// MDC is cleared (if used) from aroundReceive in ActorAdapter after processing each message,

View file

@ -89,7 +89,7 @@ trait ActorContext[T] extends TypedActorContext[T] with ClassicActorContextProvi
/**
* Replace the current logger (or initialize a new logger if the logger was not touched before) with one that
* has ghe given name as logger name. Logger source MDC entry "akkaSource" will be the actor path.
* has ghe given name as logger name. Logger source MDC entry "pekkoSource" will be the actor path.
*
* *Warning*: This method is not thread-safe and must not be accessed from threads other
* than the ordinary actor message processing thread, such as [[java.util.concurrent.CompletionStage]] callbacks.
@ -98,7 +98,7 @@ trait ActorContext[T] extends TypedActorContext[T] with ClassicActorContextProvi
/**
* Replace the current logger (or initialize a new logger if the logger was not touched before) with one that
* has ghe given class name as logger name. Logger source MDC entry "akkaSource" will be the actor path.
* has ghe given class name as logger name. Logger source MDC entry "pekkoSource" will be the actor path.
*
* *Warning*: This method is not thread-safe and must not be accessed from threads other
* than the ordinary actor message processing thread, such as [[java.util.concurrent.CompletionStage]] callbacks.

View file

@ -89,7 +89,7 @@ trait ActorContext[T] extends TypedActorContext[T] with ClassicActorContextProvi
/**
* Replace the current logger (or initialize a new logger if the logger was not touched before) with one that
* has the given name as logger name. Logger source MDC entry "akkaSource" will be the actor path.
* has the given name as logger name. Logger source MDC entry "pekkoSource" will be the actor path.
*
* *Warning*: This method is not thread-safe and must not be accessed from threads other
* than the ordinary actor message processing thread, such as [[java.util.concurrent.CompletionStage]] callbacks.
@ -98,7 +98,7 @@ trait ActorContext[T] extends TypedActorContext[T] with ClassicActorContextProvi
/**
* Replace the current logger (or initialize a new logger if the logger was not touched before) with one that
* has the given class name as logger name. Logger source MDC entry "akkaSource" will be the actor path.
* has the given class name as logger name. Logger source MDC entry "pekkoSource" will be the actor path.
*
* *Warning*: This method is not thread-safe and must not be accessed from threads other
* than the ordinary actor message processing thread, such as [[scala.concurrent.Future]] callbacks.

View file

@ -31,30 +31,30 @@ object ShardingLogMarker {
* INTERNAL API
*/
@InternalApi private[pekko] object Properties {
val ShardTypeName = "akkaShardTypeName"
val ShardId = "akkaShardId"
val ShardTypeName = "pekkoShardTypeName"
val ShardId = "pekkoShardId"
}
/**
* Marker "akkaShardAllocated" of log event when `ShardCoordinator` allocates a shard to a region.
* @param shardTypeName The `typeName` of the shard. Included as property "akkaShardTypeName".
* @param shardId The id of the shard. Included as property "akkaShardId".
* @param node The address of the node where the shard is allocated. Included as property "akkaRemoteAddress".
* Marker "pekkoShardAllocated" of log event when `ShardCoordinator` allocates a shard to a region.
* @param shardTypeName The `typeName` of the shard. Included as property "pekkoShardTypeName".
* @param shardId The id of the shard. Included as property "pekkoShardId".
* @param node The address of the node where the shard is allocated. Included as property "pekkoRemoteAddress".
*/
def shardAllocated(shardTypeName: String, shardId: String, node: Address): LogMarker =
LogMarker(
"akkaShardAllocated",
"pekkoShardAllocated",
Map(
Properties.ShardTypeName -> shardTypeName,
Properties.ShardId -> shardId,
LogMarker.Properties.RemoteAddress -> node))
/**
* Marker "akkaShardStarted" of log event when `ShardRegion` starts a shard.
* @param shardTypeName The `typeName` of the shard. Included as property "akkaShardTypeName".
* @param shardId The id of the shard. Included as property "akkaShardId".
* Marker "pekkoShardStarted" of log event when `ShardRegion` starts a shard.
* @param shardTypeName The `typeName` of the shard. Included as property "pekkoShardTypeName".
* @param shardId The id of the shard. Included as property "pekkoShardId".
*/
def shardStarted(shardTypeName: String, shardId: String): LogMarker =
LogMarker("akkaShardStarted", Map(Properties.ShardTypeName -> shardTypeName, Properties.ShardId -> shardId))
LogMarker("pekkoShardStarted", Map(Properties.ShardTypeName -> shardTypeName, Properties.ShardId -> shardId))
}

View file

@ -55,7 +55,7 @@ class ClusterActorLoggingSpec
LoggingTestKit
.info("Starting")
.withCustom { event =>
event.mdc(ActorMdc.AkkaAddressKey) == addressString
event.mdc(ActorMdc.PekkoAddressKey) == addressString
}
.expect {
spawn(behavior)

View file

@ -32,124 +32,124 @@ object ClusterLogMarker {
* INTERNAL API
*/
@InternalApi private[pekko] object Properties {
val MemberStatus = "akkaMemberStatus"
val SbrDecision = "akkaSbrDecision"
val MemberStatus = "pekkoMemberStatus"
val SbrDecision = "pekkoSbrDecision"
}
/**
* Marker "akkaUnreachable" of log event when a node is marked as unreachable based no failure detector observation.
* @param node The address of the node that is marked as unreachable. Included as property "akkaRemoteAddress".
* Marker "pekkoUnreachable" of log event when a node is marked as unreachable based no failure detector observation.
* @param node The address of the node that is marked as unreachable. Included as property "pekkoRemoteAddress".
*/
def unreachable(node: Address): LogMarker =
LogMarker("akkaUnreachable", Map(LogMarker.Properties.RemoteAddress -> node))
LogMarker("pekkoUnreachable", Map(LogMarker.Properties.RemoteAddress -> node))
/**
* Marker "akkaReachable" of log event when a node is marked as reachable again based no failure detector observation.
* @param node The address of the node that is marked as reachable. Included as property "akkaRemoteAddress".
* Marker "pekkoReachable" of log event when a node is marked as reachable again based no failure detector observation.
* @param node The address of the node that is marked as reachable. Included as property "pekkoRemoteAddress".
*/
def reachable(node: Address): LogMarker =
LogMarker("akkaReachable", Map(LogMarker.Properties.RemoteAddress -> node))
LogMarker("pekkoReachable", Map(LogMarker.Properties.RemoteAddress -> node))
/**
* Marker "akkaHeartbeatStarvation" of log event when scheduled heartbeat was delayed.
* Marker "pekkoHeartbeatStarvation" of log event when scheduled heartbeat was delayed.
*/
val heartbeatStarvation: LogMarker =
LogMarker("akkaHeartbeatStarvation")
LogMarker("pekkoHeartbeatStarvation")
/**
* Marker "akkaClusterLeaderIncapacitated" of log event when leader can't perform its duties.
* Marker "pekkoClusterLeaderIncapacitated" of log event when leader can't perform its duties.
* Typically because there are unreachable nodes that have not been downed.
*/
val leaderIncapacitated: LogMarker =
LogMarker("akkaClusterLeaderIncapacitated")
LogMarker("pekkoClusterLeaderIncapacitated")
/**
* Marker "akkaClusterLeaderRestored" of log event when leader can perform its duties again.
* Marker "pekkoClusterLeaderRestored" of log event when leader can perform its duties again.
*/
val leaderRestored: LogMarker =
LogMarker("akkaClusterLeaderRestored")
LogMarker("pekkoClusterLeaderRestored")
/**
* Marker "akkaJoinFailed" of log event when node couldn't join seed nodes.
* Marker "pekkoJoinFailed" of log event when node couldn't join seed nodes.
*/
val joinFailed: LogMarker =
LogMarker("akkaJoinFailed")
LogMarker("pekkoJoinFailed")
/**
* Marker "akkaMemberChanged" of log event when a member's status is changed by the leader.
* @param node The address of the node that is changed. Included as property "akkaRemoteAddress"
* and "akkaRemoteAddressUid".
* @param status New member status. Included as property "akkaMemberStatus".
* Marker "pekkoMemberChanged" of log event when a member's status is changed by the leader.
* @param node The address of the node that is changed. Included as property "pekkoRemoteAddress"
* and "pekkoRemoteAddressUid".
* @param status New member status. Included as property "pekkoMemberStatus".
*/
def memberChanged(node: UniqueAddress, status: MemberStatus): LogMarker =
LogMarker(
"akkaMemberChanged",
"pekkoMemberChanged",
Map(
LogMarker.Properties.RemoteAddress -> node.address,
LogMarker.Properties.RemoteAddressUid -> node.longUid,
Properties.MemberStatus -> status))
/**
* Marker "akkaClusterSingletonStarted" of log event when Cluster Singleton
* Marker "pekkoClusterSingletonStarted" of log event when Cluster Singleton
* instance has started.
*/
val singletonStarted: LogMarker =
LogMarker("akkaClusterSingletonStarted")
LogMarker("pekkoClusterSingletonStarted")
/**
* Marker "akkaClusterSingletonTerminated" of log event when Cluster Singleton
* Marker "pekkoClusterSingletonTerminated" of log event when Cluster Singleton
* instance has terminated.
*/
val singletonTerminated: LogMarker =
LogMarker("akkaClusterSingletonTerminated")
LogMarker("pekkoClusterSingletonTerminated")
/**
* Marker "akkaSbrDowning" of log event when Split Brain Resolver has made a downing decision. Followed
* Marker "pekkoSbrDowning" of log event when Split Brain Resolver has made a downing decision. Followed
* by [[ClusterLogMarker.sbrDowningNode]] for each node that is downed.
* @param decision The downing decision. Included as property "akkaSbrDecision".
* @param decision The downing decision. Included as property "pekkoSbrDecision".
*/
def sbrDowning(decision: DowningStrategy.Decision): LogMarker =
LogMarker("akkaSbrDowning", Map(Properties.SbrDecision -> decision))
LogMarker("pekkoSbrDowning", Map(Properties.SbrDecision -> decision))
/**
* Marker "akkaSbrDowningNode" of log event when a member is downed by Split Brain Resolver.
* @param node The address of the node that is downed. Included as property "akkaRemoteAddress"
* and "akkaRemoteAddressUid".
* @param decision The downing decision. Included as property "akkaSbrDecision".
* Marker "pekkoSbrDowningNode" of log event when a member is downed by Split Brain Resolver.
* @param node The address of the node that is downed. Included as property "pekkoRemoteAddress"
* and "pekkoRemoteAddressUid".
* @param decision The downing decision. Included as property "pekkoSbrDecision".
*/
def sbrDowningNode(node: UniqueAddress, decision: DowningStrategy.Decision): LogMarker =
LogMarker(
"akkaSbrDowningNode",
"pekkoSbrDowningNode",
Map(
LogMarker.Properties.RemoteAddress -> node.address,
LogMarker.Properties.RemoteAddressUid -> node.longUid,
Properties.SbrDecision -> decision))
/**
* Marker "akkaSbrInstability" of log event when Split Brain Resolver has detected too much instability
* Marker "pekkoSbrInstability" of log event when Split Brain Resolver has detected too much instability
* and will down all nodes.
*/
val sbrInstability: LogMarker =
LogMarker("akkaSbrInstability")
LogMarker("pekkoSbrInstability")
/**
* Marker "akkaSbrLeaseAcquired" of log event when Split Brain Resolver has acquired the lease.
* @param decision The downing decision. Included as property "akkaSbrDecision".
* Marker "pekkoSbrLeaseAcquired" of log event when Split Brain Resolver has acquired the lease.
* @param decision The downing decision. Included as property "pekkoSbrDecision".
*/
def sbrLeaseAcquired(decision: DowningStrategy.Decision): LogMarker =
LogMarker("akkaSbrLeaseAcquired", Map(Properties.SbrDecision -> decision))
LogMarker("pekkoSbrLeaseAcquired", Map(Properties.SbrDecision -> decision))
/**
* Marker "akkaSbrLeaseDenied" of log event when Split Brain Resolver has acquired the lease.
* @param reverseDecision The (reverse) downing decision. Included as property "akkaSbrDecision".
* Marker "pekkoSbrLeaseDenied" of log event when Split Brain Resolver has acquired the lease.
* @param reverseDecision The (reverse) downing decision. Included as property "pekkoSbrDecision".
*/
def sbrLeaseDenied(reverseDecision: DowningStrategy.Decision): LogMarker =
LogMarker("akkaSbrLeaseDenied", Map(Properties.SbrDecision -> reverseDecision))
LogMarker("pekkoSbrLeaseDenied", Map(Properties.SbrDecision -> reverseDecision))
/**
* Marker "akkaSbrLeaseReleased" of log event when Split Brain Resolver has released the lease.
* Marker "pekkoSbrLeaseReleased" of log event when Split Brain Resolver has released the lease.
*/
val sbrLeaseReleased: LogMarker =
LogMarker("akkaSbrLeaseReleased")
LogMarker("pekkoSbrLeaseReleased")
}

View file

@ -5,7 +5,7 @@
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%date{ISO8601} [%-5level] [%logger] [%marker] [%X{akkaSource}] [%X{persistencePhase}] [%X{persistenceId}] - %msg %n</pattern>
<pattern>%date{ISO8601} [%-5level] [%logger] [%marker] [%X{pekkoSource}] [%X{persistencePhase}] [%X{persistenceId}] - %msg %n</pattern>
</encoder>
</appender>

View file

@ -132,7 +132,7 @@ public class LoggerSourceTest extends JUnitSuite {
.withCustom(
event -> {
return event.loggerName().equals(LoggingBehavior.class.getName())
&& event.getMdc().get("akkaSource").equals(ref.path().toString());
&& event.getMdc().get("pekkoSource").equals(ref.path().toString());
})
.expect(
testKit.system(),
@ -150,7 +150,7 @@ public class LoggerSourceTest extends JUnitSuite {
.withCustom(
event -> {
return event.loggerName().equals(LoggingBehavior.class.getName())
&& event.getMdc().get("akkaSource").equals(ref.path().toString());
&& event.getMdc().get("pekkoSource").equals(ref.path().toString());
})
.expect(
testKit.system(),

View file

@ -5,7 +5,7 @@
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%date{ISO8601} [%-5level] [%logger] [%marker] [%X{akkaSource}] [%X{persistencePhase}] [%X{persistenceId}] - %msg %n</pattern>
<pattern>%date{ISO8601} [%-5level] [%logger] [%marker] [%X{pekkoSource}] [%X{persistencePhase}] [%X{persistenceId}] - %msg %n</pattern>
</encoder>
</appender>

View file

@ -67,13 +67,13 @@ class Slf4jLogger extends Actor with SLF4JLogging with RequiresMessageQueue[Logg
val mdcThreadAttributeName = "sourceThread"
val mdcActorSystemAttributeName = "sourceActorSystem"
val mdcAkkaSourceAttributeName = "akkaSource"
val mdcAkkaTimestamp = "akkaTimestamp"
val mdcAkkaAddressAttributeName = "akkaAddress"
val mdcAkkaUidAttributeName = "akkaUid"
val mdcPekkoSourceAttributeName = "pekkoSource"
val mdcPekkoTimestamp = "pekkoTimestamp"
val mdcPekkoAddressAttributeName = "pekkoAddress"
val mdcPekkoUidAttributeName = "pekkoUid"
private def akkaAddress = context.system.asInstanceOf[ExtendedActorSystem].provider.addressString
private val akkaUid: String = context.system.asInstanceOf[ExtendedActorSystem].uid.toString
private def pekkoAddress = context.system.asInstanceOf[ExtendedActorSystem].provider.addressString
private val pekkoUid: String = context.system.asInstanceOf[ExtendedActorSystem].uid.toString
def receive = {
@ -129,12 +129,12 @@ class Slf4jLogger extends Actor with SLF4JLogging with RequiresMessageQueue[Logg
case _ =>
}
MDC.put(mdcAkkaSourceAttributeName, logSource)
MDC.put(mdcPekkoSourceAttributeName, logSource)
MDC.put(mdcThreadAttributeName, logEvent.thread.getName)
MDC.put(mdcAkkaTimestamp, formatTimestamp(logEvent.timestamp))
MDC.put(mdcPekkoTimestamp, formatTimestamp(logEvent.timestamp))
MDC.put(mdcActorSystemAttributeName, context.system.name)
MDC.put(mdcAkkaAddressAttributeName, akkaAddress)
MDC.put(mdcAkkaUidAttributeName, akkaUid)
MDC.put(mdcPekkoAddressAttributeName, pekkoAddress)
MDC.put(mdcPekkoUidAttributeName, pekkoUid)
logEvent.mdc.foreach { case (k, v) => MDC.put(k, String.valueOf(v)) }
try logStatement

View file

@ -2,7 +2,7 @@
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%date{ISO8601} %-5level %logger %X{akkaSource} %X{sourceThread} - %msg%n</pattern>
<pattern>%date{ISO8601} %-5level %logger %X{pekkoSource} %X{sourceThread} - %msg%n</pattern>
</encoder>
</appender>
<appender name="TEST" class="org.apache.pekko.event.slf4j.Slf4jLoggerSpec$TestAppender">

View file

@ -101,9 +101,9 @@ class Slf4jLoggerSpec extends PekkoSpec(Slf4jLoggerSpec.config) with BeforeAndAf
awaitCond(outputString.contains("----"), 5 seconds)
val s = outputString
s should include("akkaSource=akka://Slf4jLoggerSpec/user/logProducer")
s should include("akkaAddress=akka://Slf4jLoggerSpec")
s should include("akkaUid=")
s should include("pekkoSource=akka://Slf4jLoggerSpec/user/logProducer")
s should include("pekkoAddress=akka://Slf4jLoggerSpec")
s should include("pekkoUid=")
s should include("level=[ERROR]")
s should include("logger=[org.apache.pekko.event.slf4j.Slf4jLoggerSpec$LogProducer]")
(s should include).regex(sourceThreadRegex)
@ -117,8 +117,8 @@ class Slf4jLoggerSpec extends PekkoSpec(Slf4jLoggerSpec.config) with BeforeAndAf
awaitCond(outputString.contains("----"), 5 seconds)
val s = outputString
s should include("akkaSource=akka://Slf4jLoggerSpec/user/logProducer")
s should include("akkaAddress=akka://Slf4jLoggerSpec")
s should include("pekkoSource=akka://Slf4jLoggerSpec/user/logProducer")
s should include("pekkoAddress=akka://Slf4jLoggerSpec")
s should include("level=[INFO]")
s should include("logger=[org.apache.pekko.event.slf4j.Slf4jLoggerSpec$LogProducer]")
(s should include).regex(sourceThreadRegex)
@ -177,7 +177,7 @@ class Slf4jLoggerSpec extends PekkoSpec(Slf4jLoggerSpec.config) with BeforeAndAf
awaitCond(outputString.contains("----"), 5 seconds)
val s = outputString
s should include("akkaSource=akka://Slf4jLoggerSpec/user/logProducer")
s should include("pekkoSource=akka://Slf4jLoggerSpec/user/logProducer")
s should include("level=[INFO]")
s should include("logger=[org.apache.pekko.event.slf4j.Slf4jLoggerSpec$LogProducer]")
(s should include).regex(sourceThreadRegex)
@ -198,7 +198,7 @@ class Slf4jLoggerSpec extends PekkoSpec(Slf4jLoggerSpec.config) with BeforeAndAf
awaitCond(outputString.contains("----"), 5 seconds)
val s = outputString
s should include("akkaSource=akka://Slf4jLoggerSpec/user/logProducer")
s should include("pekkoSource=akka://Slf4jLoggerSpec/user/logProducer")
s should include("level=[INFO]")
s should include("logger=[org.apache.pekko.event.slf4j.Slf4jLoggerSpec$LogProducer]")
println(s)
@ -207,39 +207,39 @@ class Slf4jLoggerSpec extends PekkoSpec(Slf4jLoggerSpec.config) with BeforeAndAf
s should include("msg=[Message with null custom MDC values]")
}
"include system info in akkaSource when creating Logging with system" in {
"include system info in pekkoSource when creating Logging with system" in {
val log = Logging(system, "org.apache.pekko.event.slf4j.Slf4jLoggerSpec.MyLogSource")
log.info("test")
awaitCond(outputString.contains("----"), 5 seconds)
val s = outputString
s should include("akkaSource=org.apache.pekko.event.slf4j.Slf4jLoggerSpec.MyLogSource(akka://Slf4jLoggerSpec)")
s should include("pekkoSource=org.apache.pekko.event.slf4j.Slf4jLoggerSpec.MyLogSource(akka://Slf4jLoggerSpec)")
s should include("logger=[org.apache.pekko.event.slf4j.Slf4jLoggerSpec.MyLogSource(akka://Slf4jLoggerSpec)]")
}
"not include system info in akkaSource when creating Logging with system.eventStream" in {
"not include system info in pekkoSource when creating Logging with system.eventStream" in {
val log = Logging(system.eventStream, "org.apache.pekko.event.slf4j.Slf4jLoggerSpec.MyLogSource")
log.info("test")
awaitCond(outputString.contains("----"), 5 seconds)
val s = outputString
s should include("akkaSource=org.apache.pekko.event.slf4j.Slf4jLoggerSpec.MyLogSource")
s should include("pekkoSource=org.apache.pekko.event.slf4j.Slf4jLoggerSpec.MyLogSource")
s should include("logger=[org.apache.pekko.event.slf4j.Slf4jLoggerSpec.MyLogSource]")
}
"use short class name and include system info in akkaSource when creating Logging with system and class" in {
"use short class name and include system info in pekkoSource when creating Logging with system and class" in {
val log = Logging(system, classOf[MyLogSource])
log.info("test")
awaitCond(outputString.contains("----"), 5 seconds)
val s = outputString
s should include("akkaSource=Slf4jLoggerSpec$MyLogSource(akka://Slf4jLoggerSpec)")
s should include("pekkoSource=Slf4jLoggerSpec$MyLogSource(akka://Slf4jLoggerSpec)")
s should include("logger=[org.apache.pekko.event.slf4j.Slf4jLoggerSpec$MyLogSource]")
}
"use short class name in akkaSource when creating Logging with system.eventStream and class" in {
"use short class name in pekkoSource when creating Logging with system.eventStream and class" in {
val log = Logging(system.eventStream, classOf[MyLogSource])
log.info("test")
awaitCond(outputString.contains("----"), 5 seconds)
val s = outputString
s should include("akkaSource=Slf4jLoggerSpec$MyLogSource")
s should include("pekkoSource=Slf4jLoggerSpec$MyLogSource")
s should include("logger=[org.apache.pekko.event.slf4j.Slf4jLoggerSpec$MyLogSource]")
}