some minor wording updates (#31326)

This commit is contained in:
Arnout Engelen 2022-04-11 16:13:47 +02:00 committed by GitHub
parent d823b8838e
commit 34df60a22f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7 changed files with 8 additions and 8 deletions

View file

@ -86,7 +86,7 @@ class ActorPathSpec extends AnyWordSpec with Matchers {
RootActorPath(Address("akka", "mysys"), "/user/boom/*") // illegally pass in a path where name is expected
}.getMessage should include("is a path separator")
// sanity check that creating such path still works
// check that creating such path still works
ActorPath.fromString("akka://mysys/user/boom/*")
}

View file

@ -31,7 +31,7 @@ class EmptySourceBenchmark {
Await.result(setup.run(), Duration.Inf)
/*
(not serious benchmark, just sanity check: run on macbook 15, late 2013)
(not serious benchmark, just ballpark check: run on macbook 15, late 2013)
While it was a PublisherSource:
[info] EmptySourceBenchmark.empty thrpt 10 11.219 ± 6.498 ops/ms

View file

@ -332,7 +332,7 @@ changes in the message format.
### Remove event class and ignore events
**Situation:**
While investigating app performance you notice that insane amounts of `CustomerBlinked` events are being stored
While investigating app performance you notice that unreasonable amounts of `CustomerBlinked` events are being stored
for every customer each time he/she blinks. Upon investigation, you decide that the event does not add any value
and should be deleted. You still have to be able to replay from a journal which contains those old CustomerBlinked events though.

View file

@ -1247,7 +1247,7 @@ abstract class PersistentActorSpec(config: Config) extends PersistenceSpec(confi
acks should equal(expectedAcks)
}
"reply to the original sender() of a command, even when using persistAsync" in {
// sanity check, the setting of sender() for PersistentRepl is handled by PersistentActor currently
// the setting of sender() for PersistentRepl is handled by PersistentActor currently
// but as we want to remove it soon, keeping the explicit test here.
val persistentActor = asyncPersistThreeTimesPersistentActor

View file

@ -57,7 +57,7 @@ class InmemEventAdaptersSpec extends AkkaSpec {
"pick the most specific adapter available" in {
val adapters = EventAdapters(extendedActorSystem, inmemConfig)
// sanity check; precise case, matching non-user classes
// precise case, matching non-user classes
adapters.get(classOf[java.lang.String]).getClass should ===(classOf[ExampleEventAdapter])
// pick adapter by implemented marker interface

View file

@ -33,7 +33,7 @@ private[remote] final class CompressionTable[T](
def invert: DecompressionTable[T] =
if (_dictionary.isEmpty) DecompressionTable.empty[T].copy(originUid = originUid, version = version)
else {
// TODO: these are some expensive sanity checks, about the numbers being consecutive, without gaps
// TODO: these are some expensive checks, about the numbers being consecutive, without gaps
// TODO: we can remove them, make them re-map (not needed I believe though)
val expectedGaplessSum = Integer.valueOf((_dictionary.size * (_dictionary.size + 1)) / 2) /* Dirichlet */
import akka.util.ccompat.JavaConverters._

View file

@ -43,7 +43,7 @@ class InterpreterStressSpec extends StreamSpec with GraphInterpreterSpecKit {
lastEvents() should be(Set(OnComplete))
val time = (System.nanoTime() - tstamp) / (1000.0 * 1000.0 * 1000.0)
// Not a real benchmark, just for sanity check
// Not a real benchmark, just for ballpark check
info(s"Chain finished in $time seconds ${(chainLength * repetition) / (time * 1000 * 1000)} million maps/s")
}
@ -73,7 +73,7 @@ class InterpreterStressSpec extends StreamSpec with GraphInterpreterSpecKit {
Set(Cancel(SubscriptionWithCancelException.StageWasCompleted), OnComplete, OnNext(0 + chainLength)))
val time = (System.nanoTime() - tstamp) / (1000.0 * 1000.0 * 1000.0)
// Not a real benchmark, just for sanity check
// Not a real benchmark, just for ballpark check
info(s"Chain finished in $time seconds ${(chainLength * repetition) / (time * 1000 * 1000)} million maps/s")
}