Renames in doc #98 (#100)

* Rename some base_url values in Paradox.scala
* Rename Akka to Pekko in documentation, also adapt/remove small blocks of text that were no longer consistent or adequate
* Rename Akka to Pekko in some related files outside the doc directory
* Rename akka-bom to pekko-bom
* Rename artifacts prefixes from akka to pekko
* Add links to Apache documents in licenses.md
* Add links to Akka migration guides for earlier versions
This commit is contained in:
Jonas Chapuis 2023-01-18 08:13:01 +01:00 committed by GitHub
parent 6594ccd3d3
commit 3d93c29737
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
224 changed files with 1469 additions and 2871 deletions

View file

@ -43,7 +43,7 @@ import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import static org.apache.pekko.pattern.Patterns.ask;
import static jdocs.stream.TwitterStreamQuickstartDocTest.Model.AKKA;
import static jdocs.stream.TwitterStreamQuickstartDocTest.Model.PEKKO;
import static jdocs.stream.TwitterStreamQuickstartDocTest.Model.tweets;
import static junit.framework.TestCase.assertTrue;
@ -444,7 +444,7 @@ public class IntegrationDocTest extends AbstractJavaTest {
{
// #tweet-authors
final Source<Author, NotUsed> authors =
tweets.filter(t -> t.hashtags().contains(AKKA)).map(t -> t.author);
tweets.filter(t -> t.hashtags().contains(PEKKO)).map(t -> t.author);
// #tweet-authors
@ -461,7 +461,8 @@ public class IntegrationDocTest extends AbstractJavaTest {
final RunnableGraph<NotUsed> sendEmails =
emailAddresses
.mapAsync(
4, address -> emailServer.send(new Email(address, "Akka", "I like your tweet")))
4,
address -> emailServer.send(new Email(address, "PEKKO", "I like your tweet")))
.to(Sink.ignore());
sendEmails.run(system);
@ -473,7 +474,7 @@ public class IntegrationDocTest extends AbstractJavaTest {
probe.expectMsg("drewhk@somewhere.com");
probe.expectMsg("ktosopl@somewhere.com");
probe.expectMsg("mmartynas@somewhere.com");
probe.expectMsg("akkateam@somewhere.com");
probe.expectMsg("pekkoteam@somewhere.com");
}
};
}
@ -486,7 +487,7 @@ public class IntegrationDocTest extends AbstractJavaTest {
{
final Source<Author, NotUsed> authors =
tweets.filter(t -> t.hashtags().contains(AKKA)).map(t -> t.author);
tweets.filter(t -> t.hashtags().contains(PEKKO)).map(t -> t.author);
// #email-addresses-mapAsync-supervision
final Attributes resumeAttrib =
@ -512,7 +513,7 @@ public class IntegrationDocTest extends AbstractJavaTest {
{
// #external-service-mapAsyncUnordered
final Source<Author, NotUsed> authors =
tweets.filter(t -> t.hashtags().contains(AKKA)).map(t -> t.author);
tweets.filter(t -> t.hashtags().contains(PEKKO)).map(t -> t.author);
final Source<String, NotUsed> emailAddresses =
authors
@ -523,7 +524,8 @@ public class IntegrationDocTest extends AbstractJavaTest {
final RunnableGraph<NotUsed> sendEmails =
emailAddresses
.mapAsyncUnordered(
4, address -> emailServer.send(new Email(address, "Akka", "I like your tweet")))
4,
address -> emailServer.send(new Email(address, "Pekko", "I like your tweet")))
.to(Sink.ignore());
sendEmails.run(system);
@ -541,7 +543,7 @@ public class IntegrationDocTest extends AbstractJavaTest {
{
final Source<Author, NotUsed> authors =
tweets.filter(t -> t.hashtags().contains(AKKA)).map(t -> t.author);
tweets.filter(t -> t.hashtags().contains(PEKKO)).map(t -> t.author);
final Source<String, NotUsed> phoneNumbers =
authors
@ -574,7 +576,7 @@ public class IntegrationDocTest extends AbstractJavaTest {
assertTrue(set.contains(String.valueOf("drewhk".hashCode())));
assertTrue(set.contains(String.valueOf("ktosopl".hashCode())));
assertTrue(set.contains(String.valueOf("mmartynas".hashCode())));
assertTrue(set.contains(String.valueOf("akkateam".hashCode())));
assertTrue(set.contains(String.valueOf("pekkoteam".hashCode())));
}
};
}
@ -589,7 +591,7 @@ public class IntegrationDocTest extends AbstractJavaTest {
{
final Source<Author, NotUsed> authors =
tweets.filter(t -> t.hashtags().contains(AKKA)).map(t -> t.author);
tweets.filter(t -> t.hashtags().contains(PEKKO)).map(t -> t.author);
final Source<String, NotUsed> phoneNumbers =
authors
@ -613,7 +615,7 @@ public class IntegrationDocTest extends AbstractJavaTest {
probe.expectMsg(String.valueOf("drewhk".hashCode()));
probe.expectMsg(String.valueOf("ktosopl".hashCode()));
probe.expectMsg(String.valueOf("mmartynas".hashCode()));
probe.expectMsg(String.valueOf("akkateam".hashCode()));
probe.expectMsg(String.valueOf("pekkoteam".hashCode()));
}
};
}
@ -629,10 +631,10 @@ public class IntegrationDocTest extends AbstractJavaTest {
{
// #save-tweets
final Source<Tweet, NotUsed> akkaTweets = tweets.filter(t -> t.hashtags().contains(AKKA));
final Source<Tweet, NotUsed> pekkoTweets = tweets.filter(t -> t.hashtags().contains(PEKKO));
final RunnableGraph<NotUsed> saveTweets =
akkaTweets
pekkoTweets
.mapAsync(4, tweet -> ask(database, new Save(tweet), Duration.ofMillis(300L)))
.to(Sink.ignore());
// #save-tweets
@ -645,7 +647,7 @@ public class IntegrationDocTest extends AbstractJavaTest {
probe.expectMsg("drewhk");
probe.expectMsg("ktosopl");
probe.expectMsg("mmartynas");
probe.expectMsg("akkateam");
probe.expectMsg("pekkoteam");
}
};
}

View file

@ -37,7 +37,7 @@ import org.reactivestreams.Subscription;
import java.lang.Exception;
import static jdocs.stream.ReactiveStreamsDocTest.Fixture.Data.authors;
import static jdocs.stream.TwitterStreamQuickstartDocTest.Model.AKKA;
import static jdocs.stream.TwitterStreamQuickstartDocTest.Model.PEKKO;
public class ReactiveStreamsDocTest extends AbstractJavaTest {
@ -66,7 +66,7 @@ public class ReactiveStreamsDocTest extends AbstractJavaTest {
static // #authors
final Flow<Tweet, Author, NotUsed> authors =
Flow.of(Tweet.class).filter(t -> t.hashtags().contains(AKKA)).map(t -> t.author);
Flow.of(Tweet.class).filter(t -> t.hashtags().contains(PEKKO)).map(t -> t.author);
// #authors
}
@ -95,7 +95,7 @@ public class ReactiveStreamsDocTest extends AbstractJavaTest {
}
/**
* This is a minimal version of SubscriberProbe, which lives in akka-stream-testkit (test
* This is a minimal version of SubscriberProbe, which lives in pekko-stream-testkit (test
* scope) and for now wanted to avoid setting up (test -> compile) dependency for Maven).
*
* <p>TODO: Once SubscriberProbe is easily used here replace this MPS with it.
@ -259,7 +259,7 @@ public class ReactiveStreamsDocTest extends AbstractJavaTest {
storageProbe.expectMsg(new Author("drewhk"));
storageProbe.expectMsg(new Author("ktosopl"));
storageProbe.expectMsg(new Author("mmartynas"));
storageProbe.expectMsg(new Author("akkateam"));
storageProbe.expectMsg(new Author("pekkoteam"));
storageProbe.expectMsg("complete");
}
}

View file

@ -33,7 +33,7 @@ public class RestartDocTest {
static ActorSystem system;
static Materializer materializer;
// Mocking akka-http
// Mocking pekko-http
public static class Http {
public static Http get(ActorSystem system) {
return new Http();

View file

@ -40,7 +40,7 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
import static jdocs.stream.TwitterStreamQuickstartDocTest.Model.AKKA;
import static jdocs.stream.TwitterStreamQuickstartDocTest.Model.PEKKO;
import static jdocs.stream.TwitterStreamQuickstartDocTest.Model.tweets;
@SuppressWarnings("unused")
@ -173,21 +173,21 @@ public class TwitterStreamQuickstartDocTest extends AbstractJavaTest {
// #model
public static final Hashtag AKKA = new Hashtag("#akka");
public static final Hashtag PEKKO = new Hashtag("#pekko");
// #model
public static final Source<Tweet, NotUsed> tweets =
Source.from(
Arrays.asList(
new Tweet[] {
new Tweet(new Author("rolandkuhn"), System.currentTimeMillis(), "#akka rocks!"),
new Tweet(new Author("patriknw"), System.currentTimeMillis(), "#akka !"),
new Tweet(new Author("bantonsson"), System.currentTimeMillis(), "#akka !"),
new Tweet(new Author("drewhk"), System.currentTimeMillis(), "#akka !"),
new Tweet(new Author("rolandkuhn"), System.currentTimeMillis(), "#pekko rocks!"),
new Tweet(new Author("patriknw"), System.currentTimeMillis(), "#pekko !"),
new Tweet(new Author("bantonsson"), System.currentTimeMillis(), "#pekko !"),
new Tweet(new Author("drewhk"), System.currentTimeMillis(), "#pekko !"),
new Tweet(
new Author("ktosopl"), System.currentTimeMillis(), "#akka on the rocks!"),
new Tweet(new Author("mmartynas"), System.currentTimeMillis(), "wow #akka !"),
new Tweet(new Author("akkateam"), System.currentTimeMillis(), "#akka rocks!"),
new Author("ktosopl"), System.currentTimeMillis(), "#pekko on the rocks!"),
new Tweet(new Author("mmartynas"), System.currentTimeMillis(), "wow #pekko !"),
new Tweet(new Author("pekkoteam"), System.currentTimeMillis(), "#pekko rocks!"),
new Tweet(new Author("bananaman"), System.currentTimeMillis(), "#bananas rock!"),
new Tweet(new Author("appleman"), System.currentTimeMillis(), "#apples rock!"),
new Tweet(
@ -241,7 +241,7 @@ public class TwitterStreamQuickstartDocTest extends AbstractJavaTest {
// #authors-filter-map
final Source<Author, NotUsed> authors =
tweets.filter(t -> t.hashtags().contains(AKKA)).map(t -> t.author);
tweets.filter(t -> t.hashtags().contains(PEKKO)).map(t -> t.author);
// #first-sample
// #authors-filter-map
@ -250,7 +250,7 @@ public class TwitterStreamQuickstartDocTest extends AbstractJavaTest {
JavaPartialFunction<Tweet, Author> collectFunction =
new JavaPartialFunction<Tweet, Author>() {
public Author apply(Tweet t, boolean isCheck) {
if (t.hashtags().contains(AKKA)) {
if (t.hashtags().contains(PEKKO)) {
if (isCheck) return null; // to spare the expensive or side-effecting code
return t.author;
} else {
@ -368,7 +368,7 @@ public class TwitterStreamQuickstartDocTest extends AbstractJavaTest {
Sink.<Integer, Integer>fold(0, (acc, elem) -> acc + elem);
final RunnableGraph<CompletionStage<Integer>> counterRunnableGraph =
tweetsInMinuteFromNow
.filter(t -> t.hashtags().contains(AKKA))
.filter(t -> t.hashtags().contains(PEKKO))
.map(t -> 1)
.toMat(sumSink, Keep.right());

View file

@ -104,7 +104,7 @@ public class StreamFileDocTest extends AbstractJavaTest {
try {
// #file-sink
Sink<ByteString, CompletionStage<IOResult>> fileSink = FileIO.toPath(file);
Source<String, NotUsed> textSource = Source.single("Hello Akka Stream!");
Source<String, NotUsed> textSource = Source.single("Hello Pekko Stream!");
CompletionStage<IOResult> ioResult =
textSource.map(ByteString::fromString).runWith(fileSink, system);

View file

@ -51,7 +51,7 @@ public class RecipeParseLines extends RecipeTest {
ByteString.fromString("Hello World"),
ByteString.fromString("\r"),
ByteString.fromString("!\r"),
ByteString.fromString("\nHello Akka!\r\nHello Streams!"),
ByteString.fromString("\nHello Pekko!\r\nHello Streams!"),
ByteString.fromString("\r\n\r\n")));
// #parse-lines

View file

@ -54,7 +54,7 @@ public class RecipeReduceByKeyTest extends RecipeTest {
new TestKit(system) {
{
final Source<String, NotUsed> words =
Source.from(Arrays.asList("hello", "world", "and", "hello", "akka"));
Source.from(Arrays.asList("hello", "world", "and", "hello", "pekko"));
// #word-count
final int MAXIMUM_DISTINCT_WORDS = 1000;
@ -79,7 +79,7 @@ public class RecipeReduceByKeyTest extends RecipeTest {
expected.add(new Pair<>("hello", 2));
expected.add(new Pair<>("world", 1));
expected.add(new Pair<>("and", 1));
expected.add(new Pair<>("akka", 1));
expected.add(new Pair<>("pekko", 1));
Assert.assertEquals(expected, result);
}
};
@ -106,7 +106,7 @@ public class RecipeReduceByKeyTest extends RecipeTest {
new TestKit(system) {
{
final Source<String, NotUsed> words =
Source.from(Arrays.asList("hello", "world", "and", "hello", "akka"));
Source.from(Arrays.asList("hello", "world", "and", "hello", "pekko"));
// #reduce-by-key-general2
final int MAXIMUM_DISTINCT_WORDS = 1000;
@ -128,7 +128,7 @@ public class RecipeReduceByKeyTest extends RecipeTest {
expected.add(new Pair<>("hello", 2));
expected.add(new Pair<>("world", 1));
expected.add(new Pair<>("and", 1));
expected.add(new Pair<>("akka", 1));
expected.add(new Pair<>("pekko", 1));
Assert.assertEquals(expected, result);
}
};

View file

@ -37,7 +37,7 @@ public class RecipeSourceFromFunction extends RecipeTest {
ActorSystem.create(
"RecipeSourceFromFunction",
ConfigFactory.parseString(
"pekko.loglevel=DEBUG\nakka.loggers = [org.apache.pekko.testkit.TestEventListener]"));
"pekko.loglevel=DEBUG\npekko.loggers = [org.apache.pekko.testkit.TestEventListener]"));
}
@AfterClass

View file

@ -56,7 +56,7 @@ public class Restart {
// [INFO] [12/10/2019 13:51:58.302] [default-pekko.test.stream-dispatcher-7]
// [pekko.actor.ActorSystemImpl(default)] 3
// [WARN] [12/10/2019 13:51:58.310] [default-pekko.test.stream-dispatcher-7]
// [RestartWithBackoffSource(akka://default)] Restarting graph due to failure. stack_trace:
// [RestartWithBackoffSource(pekko://default)] Restarting graph due to failure. stack_trace:
// (RuntimeException: darn)
// --> 1 second gap
// [INFO] [12/10/2019 13:51:59.379] [default-pekko.test.stream-dispatcher-8]
@ -66,7 +66,7 @@ public class Restart {
// [INFO] [12/10/2019 13:51:59.383] [default-pekko.test.stream-dispatcher-8]
// [pekko.actor.ActorSystemImpl(default)] 3
// [WARN] [12/10/2019 13:51:59.386] [default-pekko.test.stream-dispatcher-8]
// [RestartWithBackoffSource(akka://default)] Restarting graph due to failure. stack_trace:
// [RestartWithBackoffSource(pekko://default)] Restarting graph due to failure. stack_trace:
// (RuntimeException: darn)
// --> 2 second gap
// [INFO] [12/10/2019 13:52:01.594] [default-pekko.test.stream-dispatcher-8]
@ -76,7 +76,7 @@ public class Restart {
// [INFO] [12/10/2019 13:52:01.595] [default-pekko.test.stream-dispatcher-8]
// [pekko.actor.ActorSystemImpl(default)] 3
// [WARN] [12/10/2019 13:52:01.596] [default-pekko.test.stream-dispatcher-8]
// [RestartWithBackoffSource(akka://default)] Restarting graph due to failure. stack_trace:
// [RestartWithBackoffSource(pekko://default)] Restarting graph due to failure. stack_trace:
// (RuntimeException: darn)
// #restart-failure-inner-failure