You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pekko.apache.org by fa...@apache.org on 2023/02/16 09:39:23 UTC

[incubator-pekko] branch main updated: update some random akka refs (#196)

This is an automated email from the ASF dual-hosted git repository.

fanningpj pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-pekko.git


The following commit(s) were added to refs/heads/main by this push:
     new 3ee028f915 update some random akka refs (#196)
3ee028f915 is described below

commit 3ee028f9153096456ea1d75d11ee01047a94c383
Author: PJ Fanning <pj...@users.noreply.github.com>
AuthorDate: Thu Feb 16 10:39:18 2023 +0100

    update some random akka refs (#196)
---
 .../PrimitivesSerializationSpec.scala              | 10 +--
 .../apache/pekko/serialization/SerializeSpec.scala |  2 +-
 .../pekko/util/BoundedBlockingQueueSpec.scala      | 22 +++---
 .../jdocs/org/apache/pekko/typed/IntroTest.java    |  2 +-
 .../apache/pekko/typed/SpawnProtocolDocTest.java   |  2 +-
 .../docs/org/apache/pekko/typed/IntroSpec.scala    |  4 +-
 .../apache/pekko/typed/SpawnProtocolDocSpec.scala  |  2 +-
 .../pekko/actor/typed/internal/jfr/Events.scala    | 44 ++++++------
 .../actor/typed/delivery/ProducerController.scala  |  2 +-
 .../delivery/WorkPullingProducerController.scala   |  2 +-
 .../actor/typed/internal/pubsub/TopicImpl.scala    |  2 +-
 .../scala/org/apache/pekko/actor/ActorSystem.scala |  4 +-
 .../scala/org/apache/pekko/event/Logging.scala     |  2 +-
 .../apache/pekko/serialization/Serialization.scala | 18 ++---
 .../pekko/util/ByteString_apply_Benchmark.scala    |  2 +-
 .../pekko/util/ByteString_grouped_Benchmark.scala  |  2 +-
 .../pekko/util/ByteString_toArray_Benchmark.scala  |  2 +-
 .../delivery/ShardingProducerController.scala      |  2 +-
 .../sharding/typed/javadsl/ClusterSharding.scala   |  6 +-
 .../sharding/typed/scaladsl/ClusterSharding.scala  |  6 +-
 .../cluster/sharding/internal/jfr/Events.scala     | 10 +--
 .../pekko/cluster/sharding/ClusterSharding.scala   |  6 +-
 .../pekko/cluster/client/ClusterClient.scala       | 26 +++----
 .../cluster/sbr/SplitBrainResolverSettings.scala   |  2 +-
 .../pekko/cluster/sbr/LeaseMajority5NodeSpec.scala |  2 +-
 .../pekko/cluster/sbr/LeaseMajoritySpec.scala      | 11 +--
 .../pekko/cluster/sbr/SplitBrainResolverSpec.scala |  2 +-
 docs/src/main/paradox/persistence-query.md         |  2 +-
 docs/src/main/paradox/typed/from-classic.md        |  4 +-
 .../apache/pekko/osgi/OsgiActorSystemFactory.scala |  6 +-
 .../org/apache/pekko/osgi/PojoSRTestSupport.scala  |  2 +-
 .../org/apache/pekko/persistence/PluginSpec.scala  |  2 +-
 .../japi/snapshot/JavaSnapshotStoreSpec.scala      |  2 +-
 .../pekko/persistence/journal/JournalSpec.scala    |  2 +-
 .../persistence/snapshot/SnapshotStoreSpec.scala   |  2 +-
 project/JavaFormatter.scala                        |  2 +-
 project/OSGi.scala                                 |  2 +-
 project/PekkoDisciplinePlugin.scala                |  6 +-
 project/plugins.sbt                                |  2 +-
 .../apache/pekko/remote/artery/jfr/Events.scala    | 80 +++++++++++-----------
 .../apache/pekko/remote/MessageSerializer.scala    |  4 +-
 .../artery/tcp/ssl/PemManagersProviderSpec.scala   |  2 +-
 .../PrimitivesSerializationSpec.scala              |  8 +--
 .../serialization/jackson/JacksonModule.scala      |  2 +-
 .../jackson/JacksonObjectMapperProvider.scala      |  4 +-
 .../serialization/jackson/JacksonSerializer.scala  |  4 +-
 .../jackson/JacksonSerializerSpec.scala            |  6 +-
 ...IterablePublisherViaJavaFlowPublisherTest.scala |  4 +-
 .../tck/PekkoIdentityProcessorVerification.scala   |  2 +-
 49 files changed, 175 insertions(+), 172 deletions(-)

diff --git a/actor-tests/src/test/scala/org/apache/pekko/serialization/PrimitivesSerializationSpec.scala b/actor-tests/src/test/scala/org/apache/pekko/serialization/PrimitivesSerializationSpec.scala
index bf6cbe7562..c3e6a05a6f 100644
--- a/actor-tests/src/test/scala/org/apache/pekko/serialization/PrimitivesSerializationSpec.scala
+++ b/actor-tests/src/test/scala/org/apache/pekko/serialization/PrimitivesSerializationSpec.scala
@@ -79,7 +79,7 @@ class PrimitivesSerializationSpec extends PekkoSpec(PrimitivesSerializationSpec.
       }
 
     "have right serializer id" in {
-      // checking because moved to akka-actor
+      // checking because moved to pekko-actor
       serialization.serializerFor(1L.asInstanceOf[AnyRef].getClass).identifier === 18
     }
 
@@ -102,7 +102,7 @@ class PrimitivesSerializationSpec extends PekkoSpec(PrimitivesSerializationSpec.
     }
 
     "have right serializer id" in {
-      // checking because moved to akka-actor
+      // checking because moved to pekko-actor
       serialization.serializerFor(1L.asInstanceOf[AnyRef].getClass).identifier === 19
     }
   }
@@ -124,7 +124,7 @@ class PrimitivesSerializationSpec extends PekkoSpec(PrimitivesSerializationSpec.
     }
 
     "have right serializer id  ($i)" in {
-      // checking because moved to akka-actor
+      // checking because moved to pekko-actor
       serialization.serializerFor(true.asInstanceOf[AnyRef].getClass).identifier === 35
     }
   }
@@ -148,7 +148,7 @@ class PrimitivesSerializationSpec extends PekkoSpec(PrimitivesSerializationSpec.
       }
 
     "have right serializer id" in {
-      // checking because moved to akka-actor
+      // checking because moved to pekko-actor
       serialization.serializerFor(1L.asInstanceOf[AnyRef].getClass).identifier === 20
     }
 
@@ -178,7 +178,7 @@ class PrimitivesSerializationSpec extends PekkoSpec(PrimitivesSerializationSpec.
     }
 
     "have right serializer id" in {
-      // checking because moved to akka-actor
+      // checking because moved to pekko-actor
       serialization.serializerFor(1L.asInstanceOf[AnyRef].getClass).identifier === 21
     }
 
diff --git a/actor-tests/src/test/scala/org/apache/pekko/serialization/SerializeSpec.scala b/actor-tests/src/test/scala/org/apache/pekko/serialization/SerializeSpec.scala
index f71f065087..3d0104f097 100644
--- a/actor-tests/src/test/scala/org/apache/pekko/serialization/SerializeSpec.scala
+++ b/actor-tests/src/test/scala/org/apache/pekko/serialization/SerializeSpec.scala
@@ -276,7 +276,7 @@ class SerializeSpec extends PekkoSpec(SerializationTests.serializeConf) {
     }
 
     "log warning if non-Pekko serializer is configured for Pekko message" in {
-      EventFilter.warning(pattern = ".*not implemented by Pekko.*", occurrences = 1).intercept {
+      EventFilter.warning(pattern = ".*not implemented by Apache Pekko.*", occurrences = 1).intercept {
         ser.serialize(new Other).get
       }
     }
diff --git a/actor-tests/src/test/scala/org/apache/pekko/util/BoundedBlockingQueueSpec.scala b/actor-tests/src/test/scala/org/apache/pekko/util/BoundedBlockingQueueSpec.scala
index a09ac3bd7b..ae55370b46 100644
--- a/actor-tests/src/test/scala/org/apache/pekko/util/BoundedBlockingQueueSpec.scala
+++ b/actor-tests/src/test/scala/org/apache/pekko/util/BoundedBlockingQueueSpec.scala
@@ -576,7 +576,7 @@ class BoundedBlockingQueueSpec
       val TestContext(queue, _, _, _, _, _) = newBoundedBlockingQueue(2)
       val elems = List("Hello", "World")
       elems.foreach(queue.put)
-      queue.containsAll(("Akka" :: elems).asJava) should equal(false)
+      queue.containsAll(("Pekko" :: elems).asJava) should equal(false)
     }
 
     "returns true if the queue contains exactly all elements" in {
@@ -590,7 +590,7 @@ class BoundedBlockingQueueSpec
       val TestContext(queue, _, _, _, _, _) = newBoundedBlockingQueue(3)
       val elems = List("Hello", "World")
       elems.foreach(queue.put)
-      queue.put("Akka")
+      queue.put("Pekko")
       queue.containsAll(elems.asJava) should equal(true)
     }
   }
@@ -600,20 +600,20 @@ class BoundedBlockingQueueSpec
       val TestContext(queue, _, _, _, _, _) = newBoundedBlockingQueue(3)
       val elems = List("Hello", "World")
       elems.foreach(queue.put)
-      queue.put("Akka")
+      queue.put("Pekko")
 
       queue.removeAll(elems.asJava) should equal(true)
       queue.remainingCapacity() should equal(2)
-      queue.poll() should equal("Akka")
+      queue.poll() should equal("Pekko")
     }
 
     "return false if no elements were removed" in {
       val TestContext(queue, _, _, _, _, _) = newBoundedBlockingQueue(1)
       val elems = List("Hello", "World")
-      queue.put("Akka")
+      queue.put("Pekko")
 
       queue.removeAll(elems.asJava) should equal(false)
-      queue.poll() should equal("Akka")
+      queue.poll() should equal("Pekko")
     }
   }
 
@@ -622,17 +622,17 @@ class BoundedBlockingQueueSpec
       val TestContext(queue, _, _, _, _, _) = newBoundedBlockingQueue(3)
       val elems = List("Hello", "World")
       elems.foreach(queue.put)
-      queue.put("Akka")
+      queue.put("Pekko")
 
       queue.retainAll(elems.asJava) should equal(true)
       queue.remainingCapacity() should equal(1)
-      queue.toArray() shouldNot contain("Akka")
+      queue.toArray() shouldNot contain("Pekko")
       queue.toArray() should contain theSameElementsAs elems
     }
 
     "return false if no elements were removed" in {
       val TestContext(queue, _, _, _, _, _) = newBoundedBlockingQueue(2)
-      val elems = List("Hello", "World", "Akka")
+      val elems = List("Hello", "World", "Pekko")
       queue.put("Hello")
       queue.put("World")
 
@@ -650,7 +650,7 @@ class BoundedBlockingQueueSpec
     "disallow remove() before calling next()" in {
       val TestContext(queue, _, _, _, _, _) = newBoundedBlockingQueue(2)
       queue.put("Hello")
-      queue.put("Akka")
+      queue.put("Pekko")
 
       val iter = queue.iterator()
       intercept[IllegalStateException] {
@@ -661,7 +661,7 @@ class BoundedBlockingQueueSpec
     "disallow two subsequent remove()s" in {
       val TestContext(queue, _, _, _, _, _) = newBoundedBlockingQueue(2)
       queue.put("Hello")
-      queue.put("Akka")
+      queue.put("Pekko")
 
       val iter = queue.iterator()
       iter.next()
diff --git a/actor-typed-tests/src/test/java/jdocs/org/apache/pekko/typed/IntroTest.java b/actor-typed-tests/src/test/java/jdocs/org/apache/pekko/typed/IntroTest.java
index 8661ad3dd8..cef8b4dfa7 100644
--- a/actor-typed-tests/src/test/java/jdocs/org/apache/pekko/typed/IntroTest.java
+++ b/actor-typed-tests/src/test/java/jdocs/org/apache/pekko/typed/IntroTest.java
@@ -201,7 +201,7 @@ public interface IntroTest {
         ActorSystem.create(HelloWorldMain.create(), "hello");
 
     system.tell(new HelloWorldMain.SayHello("World"));
-    system.tell(new HelloWorldMain.SayHello("Akka"));
+    system.tell(new HelloWorldMain.SayHello("Pekko"));
     // #hello-world
 
     Thread.sleep(3000);
diff --git a/actor-typed-tests/src/test/java/jdocs/org/apache/pekko/typed/SpawnProtocolDocTest.java b/actor-typed-tests/src/test/java/jdocs/org/apache/pekko/typed/SpawnProtocolDocTest.java
index 40eed715f7..c6bbd5dacc 100644
--- a/actor-typed-tests/src/test/java/jdocs/org/apache/pekko/typed/SpawnProtocolDocTest.java
+++ b/actor-typed-tests/src/test/java/jdocs/org/apache/pekko/typed/SpawnProtocolDocTest.java
@@ -85,7 +85,7 @@ public interface SpawnProtocolDocTest {
             greetedReplyTo.whenComplete(
                 (greetedReplyToRef, exc2) -> {
                   if (exc2 == null) {
-                    greeterRef.tell(new HelloWorld.Greet("Akka", greetedReplyToRef));
+                    greeterRef.tell(new HelloWorld.Greet("Pekko", greetedReplyToRef));
                   }
                 });
           }
diff --git a/actor-typed-tests/src/test/scala/docs/org/apache/pekko/typed/IntroSpec.scala b/actor-typed-tests/src/test/scala/docs/org/apache/pekko/typed/IntroSpec.scala
index 284abdffe2..441e70acbf 100644
--- a/actor-typed-tests/src/test/scala/docs/org/apache/pekko/typed/IntroSpec.scala
+++ b/actor-typed-tests/src/test/scala/docs/org/apache/pekko/typed/IntroSpec.scala
@@ -103,7 +103,7 @@ object IntroSpec {
         ActorSystem(HelloWorldMain(), "hello")
 
       system ! HelloWorldMain.SayHello("World")
-      system ! HelloWorldMain.SayHello("Akka")
+      system ! HelloWorldMain.SayHello("Pekko")
     }
     //#hello-world-main
   }
@@ -260,7 +260,7 @@ class IntroSpec extends ScalaTestWithActorTestKit with AnyWordSpecLike with LogC
         ActorSystem(HelloWorldMain(), "hello")
 
       system ! HelloWorldMain.SayHello("World")
-      system ! HelloWorldMain.SayHello("Akka")
+      system ! HelloWorldMain.SayHello("Pekko")
 
       // #hello-world
 
diff --git a/actor-typed-tests/src/test/scala/docs/org/apache/pekko/typed/SpawnProtocolDocSpec.scala b/actor-typed-tests/src/test/scala/docs/org/apache/pekko/typed/SpawnProtocolDocSpec.scala
index aac676a134..141b836da9 100644
--- a/actor-typed-tests/src/test/scala/docs/org/apache/pekko/typed/SpawnProtocolDocSpec.scala
+++ b/actor-typed-tests/src/test/scala/docs/org/apache/pekko/typed/SpawnProtocolDocSpec.scala
@@ -87,7 +87,7 @@ class SpawnProtocolDocSpec extends ScalaTestWithActorTestKit with AnyWordSpecLik
         system.ask(SpawnProtocol.Spawn(greetedBehavior, name = "", props = Props.empty, _))
 
       for (greeterRef <- greeter; replyToRef <- greetedReplyTo) {
-        greeterRef ! HelloWorld.Greet("Akka", replyToRef)
+        greeterRef ! HelloWorld.Greet("Pekko", replyToRef)
       }
 
       // #system-spawn
diff --git a/actor-typed/src/main/scala-jdk-9/org/apache/pekko/actor/typed/internal/jfr/Events.scala b/actor-typed/src/main/scala-jdk-9/org/apache/pekko/actor/typed/internal/jfr/Events.scala
index 67a35bfc87..d0ef7fc0a8 100644
--- a/actor-typed/src/main/scala-jdk-9/org/apache/pekko/actor/typed/internal/jfr/Events.scala
+++ b/actor-typed/src/main/scala-jdk-9/org/apache/pekko/actor/typed/internal/jfr/Events.scala
@@ -28,21 +28,21 @@ import org.apache.pekko.annotation.InternalApi
 @InternalApi
 @Enabled(true)
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ProducerController")) @Label("Delivery ProducerController created")
+@Category(Array("Pekko", "Delivery", "ProducerController")) @Label("Delivery ProducerController created")
 final class DeliveryProducerCreated(val producerId: String, val actorPath: String) extends Event
 
 /** INTERNAL API */
 @InternalApi
 @Enabled(true)
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ProducerController")) @Label("Delivery ProducerController started")
+@Category(Array("Pekko", "Delivery", "ProducerController")) @Label("Delivery ProducerController started")
 final class DeliveryProducerStarted(val producerId: String, val actorPath: String) extends Event
 
 /** INTERNAL API */
 @InternalApi
 @Enabled(false) // hi frequency event
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ProducerController")) @Label("Delivery ProducerController sent RequestNext")
+@Category(Array("Pekko", "Delivery", "ProducerController")) @Label("Delivery ProducerController sent RequestNext")
 final class DeliveryProducerRequestNext(val producerId: String, val currentSeqNr: Long, val confirmedSeqNr: Long)
     extends Event
 
@@ -50,21 +50,21 @@ final class DeliveryProducerRequestNext(val producerId: String, val currentSeqNr
 @InternalApi
 @Enabled(false) // hi frequency event
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ProducerController")) @Label("Delivery ProducerController sent SequencedMessage")
+@Category(Array("Pekko", "Delivery", "ProducerController")) @Label("Delivery ProducerController sent SequencedMessage")
 final class DeliveryProducerSent(val producerId: String, val seqNr: Long) extends Event
 
 /** INTERNAL API */
 @InternalApi
 @Enabled(true)
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ProducerController")) @Label("Delivery ProducerController waiting for demand")
+@Category(Array("Pekko", "Delivery", "ProducerController")) @Label("Delivery ProducerController waiting for demand")
 final class DeliveryProducerWaitingForRequest(val producerId: String, val currentSeqNr: Long) extends Event
 
 /** INTERNAL API */
 @InternalApi
 @Enabled(true)
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ProducerController")) @Label("Delivery ProducerController resent unconfirmed")
+@Category(Array("Pekko", "Delivery", "ProducerController")) @Label("Delivery ProducerController resent unconfirmed")
 final class DeliveryProducerResentUnconfirmed(val producerId: String, val fromSeqNr: Long, val toSeqNr: Long)
     extends Event
 
@@ -72,14 +72,14 @@ final class DeliveryProducerResentUnconfirmed(val producerId: String, val fromSe
 @InternalApi
 @Enabled(true)
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ProducerController")) @Label("Delivery ProducerController resent first")
+@Category(Array("Pekko", "Delivery", "ProducerController")) @Label("Delivery ProducerController resent first")
 final class DeliveryProducerResentFirst(val producerId: String, val firstSeqNr: Long) extends Event
 
 /** INTERNAL API */
 @InternalApi
 @Enabled(true)
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ProducerController")) @Label(
+@Category(Array("Pekko", "Delivery", "ProducerController")) @Label(
   "Delivery ProducerController resent first unconfirmed")
 final class DeliveryProducerResentFirstUnconfirmed(val producerId: String, val seqNr: Long) extends Event
 
@@ -87,14 +87,15 @@ final class DeliveryProducerResentFirstUnconfirmed(val producerId: String, val s
 @InternalApi
 @Enabled(false) // hi frequency event
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ProducerController")) @Label("Delivery ProducerController received message")
+@Category(Array("Pekko", "Delivery", "ProducerController")) @Label("Delivery ProducerController received message")
 final class DeliveryProducerReceived(val producerId: String, val currentSeqNr: Long) extends Event
 
 /** INTERNAL API */
 @InternalApi
 @Enabled(true)
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ProducerController")) @Label("Delivery ProducerController received demand request")
+@Category(Array("Pekko", "Delivery", "ProducerController")) @Label(
+  "Delivery ProducerController received demand request")
 final class DeliveryProducerReceivedRequest(val producerId: String, val requestedSeqNr: Long, confirmedSeqNr: Long)
     extends Event
 
@@ -102,35 +103,36 @@ final class DeliveryProducerReceivedRequest(val producerId: String, val requeste
 @InternalApi
 @Enabled(true)
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ProducerController")) @Label("Delivery ProducerController received resend request")
+@Category(Array("Pekko", "Delivery", "ProducerController")) @Label(
+  "Delivery ProducerController received resend request")
 final class DeliveryProducerReceivedResend(val producerId: String, val fromSeqNr: Long) extends Event
 
 /** INTERNAL API */
 @InternalApi
 @Enabled(true)
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ConsumerController")) @Label("Delivery ConsumerController created")
+@Category(Array("Pekko", "Delivery", "ConsumerController")) @Label("Delivery ConsumerController created")
 final class DeliveryConsumerCreated(val actorPath: String) extends Event
 
 /** INTERNAL API */
 @InternalApi
 @Enabled(true)
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ConsumerController")) @Label("Delivery ConsumerController started")
+@Category(Array("Pekko", "Delivery", "ConsumerController")) @Label("Delivery ConsumerController started")
 final class DeliveryConsumerStarted(val actorPath: String) extends Event
 
 /** INTERNAL API */
 @InternalApi
 @Enabled(false) // hi frequency event
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ConsumerController")) @Label("Delivery ConsumerController received")
+@Category(Array("Pekko", "Delivery", "ConsumerController")) @Label("Delivery ConsumerController received")
 final class DeliveryConsumerReceived(val producerId: String, val seqNr: Long) extends Event
 
 /** INTERNAL API */
 @InternalApi
 @Enabled(false) // hi frequency event
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ConsumerController")) @Label(
+@Category(Array("Pekko", "Delivery", "ConsumerController")) @Label(
   "Delivery ConsumerController received, previous in progress")
 final class DeliveryConsumerReceivedPreviousInProgress(val producerId: String, val seqNr: Long, val stashed: Int)
     extends Event
@@ -139,21 +141,21 @@ final class DeliveryConsumerReceivedPreviousInProgress(val producerId: String, v
 @InternalApi
 @Enabled(true)
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ConsumerController")) @Label("Delivery ConsumerController received duplicate")
+@Category(Array("Pekko", "Delivery", "ConsumerController")) @Label("Delivery ConsumerController received duplicate")
 final class DeliveryConsumerDuplicate(val producerId: String, val expectedSeqNr: Long, val seqNr: Long) extends Event
 
 /** INTERNAL API */
 @InternalApi
 @Enabled(true)
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ConsumerController")) @Label("Delivery ConsumerController received missing")
+@Category(Array("Pekko", "Delivery", "ConsumerController")) @Label("Delivery ConsumerController received missing")
 final class DeliveryConsumerMissing(val producerId: String, val expectedSeqNr: Long, val seqNr: Long) extends Event
 
 /** INTERNAL API */
 @InternalApi
 @Enabled(true)
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ConsumerController")) @Label(
+@Category(Array("Pekko", "Delivery", "ConsumerController")) @Label(
   "Delivery ConsumerController received expected resend")
 final class DeliveryConsumerReceivedResend(val seqNr: Long) extends Event
 
@@ -161,19 +163,19 @@ final class DeliveryConsumerReceivedResend(val seqNr: Long) extends Event
 @InternalApi
 @Enabled(true)
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ConsumerController")) @Label("Delivery ConsumerController sent demand Request")
+@Category(Array("Pekko", "Delivery", "ConsumerController")) @Label("Delivery ConsumerController sent demand Request")
 final class DeliveryConsumerSentRequest(val producerId: String, val requestedSeqNr: Long) extends Event
 
 /** INTERNAL API */
 @InternalApi
 @Enabled(true)
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ConsumerController")) @Label("Delivery ConsumerController producer changed")
+@Category(Array("Pekko", "Delivery", "ConsumerController")) @Label("Delivery ConsumerController producer changed")
 final class DeliveryConsumerChangedProducer(val producerId: String) extends Event
 
 /** INTERNAL API */
 @InternalApi
 @Enabled(true)
 @StackTrace(false)
-@Category(Array("Akka", "Delivery", "ConsumerController")) @Label("Delivery ConsumerController stash is full")
+@Category(Array("Pekko", "Delivery", "ConsumerController")) @Label("Delivery ConsumerController stash is full")
 final class DeliveryConsumerStashFull(val producerId: String, val seqNr: Long) extends Event
diff --git a/actor-typed/src/main/scala/org/apache/pekko/actor/typed/delivery/ProducerController.scala b/actor-typed/src/main/scala/org/apache/pekko/actor/typed/delivery/ProducerController.scala
index ed314f27f3..442e54fe32 100644
--- a/actor-typed/src/main/scala/org/apache/pekko/actor/typed/delivery/ProducerController.scala
+++ b/actor-typed/src/main/scala/org/apache/pekko/actor/typed/delivery/ProducerController.scala
@@ -68,7 +68,7 @@ import pekko.util.JavaDurationConverters._
  * To make sure the messages can be delivered also in that scenario the `ProducerController` can be
  * used with a [[DurableProducerQueue]]. Then the unconfirmed messages are stored in a durable way so
  * that they can be redelivered when the producer is started again. An implementation of the
- * `DurableProducerQueue` is provided by `EventSourcedProducerQueue` in `akka-persistence-typed`.
+ * `DurableProducerQueue` is provided by `EventSourcedProducerQueue` in `pekko-persistence-typed`.
  *
  * Instead of using `tell` with the `sendNextTo` in the `RequestNext` the producer can use `context.ask`
  * with the `askNextTo` in the `RequestNext`. The difference is that a reply is sent back when the
diff --git a/actor-typed/src/main/scala/org/apache/pekko/actor/typed/delivery/WorkPullingProducerController.scala b/actor-typed/src/main/scala/org/apache/pekko/actor/typed/delivery/WorkPullingProducerController.scala
index ad71b6c8d3..4a86e3d7c9 100644
--- a/actor-typed/src/main/scala/org/apache/pekko/actor/typed/delivery/WorkPullingProducerController.scala
+++ b/actor-typed/src/main/scala/org/apache/pekko/actor/typed/delivery/WorkPullingProducerController.scala
@@ -83,7 +83,7 @@ import pekko.util.JavaDurationConverters._
  * To make sure the messages can be delivered also in that scenario the `WorkPullingProducerController` can be
  * used with a [[DurableProducerQueue]]. Then the unconfirmed messages are stored in a durable way so
  * that they can be redelivered when the producer is started again. An implementation of the
- * `DurableProducerQueue` is provided by `EventSourcedProducerQueue` in `akka-persistence-typed`.
+ * `DurableProducerQueue` is provided by `EventSourcedProducerQueue` in `pekko-persistence-typed`.
  *
  * Instead of using `tell` with the `sendNextTo` in the `RequestNext` the producer can use `context.ask`
  * with the `askNextTo` in the `RequestNext`. The difference is that a reply is sent back when the
diff --git a/actor-typed/src/main/scala/org/apache/pekko/actor/typed/internal/pubsub/TopicImpl.scala b/actor-typed/src/main/scala/org/apache/pekko/actor/typed/internal/pubsub/TopicImpl.scala
index 90ae6b5e05..fde1365797 100644
--- a/actor-typed/src/main/scala/org/apache/pekko/actor/typed/internal/pubsub/TopicImpl.scala
+++ b/actor-typed/src/main/scala/org/apache/pekko/actor/typed/internal/pubsub/TopicImpl.scala
@@ -45,7 +45,7 @@ private[pekko] object TopicImpl {
   final case class Subscribe[T](subscriber: ActorRef[T]) extends Topic.Command[T]
   final case class Unsubscribe[T](subscriber: ActorRef[T]) extends Topic.Command[T]
 
-  // internal messages, note that the protobuf serializer for those sent remotely is defined in akka-cluster-typed
+  // internal messages, note that the protobuf serializer for those sent remotely is defined in pekko-cluster-typed
   final case class GetTopicStats[T](replyTo: ActorRef[TopicStats]) extends Topic.Command[T]
   final case class TopicStats(localSubscriberCount: Int, topicInstanceCount: Int) extends Topic.TopicStats
   final case class TopicInstancesUpdated[T](topics: Set[ActorRef[TopicImpl.Command[T]]]) extends Command[T]
diff --git a/actor/src/main/scala/org/apache/pekko/actor/ActorSystem.scala b/actor/src/main/scala/org/apache/pekko/actor/ActorSystem.scala
index cd77bc9766..52aa29087f 100644
--- a/actor/src/main/scala/org/apache/pekko/actor/ActorSystem.scala
+++ b/actor/src/main/scala/org/apache/pekko/actor/ActorSystem.scala
@@ -334,8 +334,8 @@ object ActorSystem {
      * INTERNAL API
      *
      * When using Akka Typed the Slf4jLogger should be used by default.
-     * Looking for config property `pekko.use-slf4j` (defined in akka-actor-typed) and
-     * that `Slf4jLogger` (akka-slf4j) is in  classpath.
+     * Looking for config property `pekko.use-slf4j` (defined in pekko-actor-typed) and
+     * that `Slf4jLogger` (pekko-slf4j) is in  classpath.
      * Then adds `Slf4jLogger` to configured loggers and removes `DefaultLogger`.
      */
     @InternalApi private[pekko] def amendSlf4jConfig(config: Config, dynamicAccess: DynamicAccess): Config = {
diff --git a/actor/src/main/scala/org/apache/pekko/event/Logging.scala b/actor/src/main/scala/org/apache/pekko/event/Logging.scala
index 5eeb249328..94212d880b 100644
--- a/actor/src/main/scala/org/apache/pekko/event/Logging.scala
+++ b/actor/src/main/scala/org/apache/pekko/event/Logging.scala
@@ -1683,7 +1683,7 @@ trait DiagnosticLoggingAdapter extends LoggingAdapter {
   def clearMDC(): Unit = mdc(emptyMDC)
 }
 
-/** DO NOT INHERIT: Class is open only for use by akka-slf4j */
+/** DO NOT INHERIT: Class is open only for use by pekko-slf4j */
 @DoNotInherit
 class LogMarker(val name: String, val properties: Map[String, Any]) {
 
diff --git a/actor/src/main/scala/org/apache/pekko/serialization/Serialization.scala b/actor/src/main/scala/org/apache/pekko/serialization/Serialization.scala
index c2e9c47067..f09d3b8f9c 100644
--- a/actor/src/main/scala/org/apache/pekko/serialization/Serialization.scala
+++ b/actor/src/main/scala/org/apache/pekko/serialization/Serialization.scala
@@ -369,7 +369,7 @@ class Serialization(val system: ExtendedActorSystem) extends Extension {
                 clazz.getName)
             }
 
-            if (!warnUnexpectedNonAkkaSerializer(clazz, ser))
+            if (!warnUnexpectedNonPekkoSerializer(clazz, ser))
               log.debug("Using serializer [{}] for message [{}]", ser.getClass.getName, clazz.getName)
 
             ser
@@ -447,7 +447,7 @@ class Serialization(val system: ExtendedActorSystem) extends Extension {
   private[pekko] val bindings: immutable.Seq[ClassSerializer] = {
     val fromConfig = for {
       (className: String, alias: String) <- settings.SerializationBindings
-      if alias != "none" && checkGoogleProtobuf(className) && checkAkkaProtobuf(className)
+      if alias != "none" && checkGoogleProtobuf(className) && checkPekkoProtobuf(className)
     } yield (system.dynamicAccess.getClassFor[Any](className).get, serializers(alias))
 
     val fromSettings = serializerDetails.flatMap { detail =>
@@ -456,7 +456,7 @@ class Serialization(val system: ExtendedActorSystem) extends Extension {
 
     val result = sort(fromConfig ++ fromSettings)
     ensureOnlyAllowedSerializers(result.iterator.map { case (_, ser) => ser })
-    result.foreach { case (clazz, ser) => warnUnexpectedNonAkkaSerializer(clazz, ser) }
+    result.foreach { case (clazz, ser) => warnUnexpectedNonPekkoSerializer(clazz, ser) }
     result
   }
 
@@ -466,12 +466,12 @@ class Serialization(val system: ExtendedActorSystem) extends Extension {
     }
   }
 
-  private def warnUnexpectedNonAkkaSerializer(clazz: Class[_], ser: Serializer): Boolean = {
+  private def warnUnexpectedNonPekkoSerializer(clazz: Class[_], ser: Serializer): Boolean = {
     if (clazz.getName.startsWith("org.apache.pekko.") && !ser.getClass.getName.startsWith("org.apache.pekko.")) {
       log.warning(
         "Using serializer [{}] for message [{}]. Note that this serializer " +
-        "is not implemented by Pekko. It's not recommended to replace serializers for messages " +
-        "provided by Pekko.",
+        "is not implemented by Apache Pekko. It's not recommended to replace serializers for messages " +
+        "provided by Apache Pekko.",
         ser.getClass.getName,
         clazz.getName)
       true
@@ -484,9 +484,9 @@ class Serialization(val system: ExtendedActorSystem) extends Extension {
   // include "com.google.protobuf.GeneratedMessage" = proto in configured serialization-bindings.
   private def checkGoogleProtobuf(className: String): Boolean = checkClass("com.google.protobuf", className)
 
-  // akka-protobuf is now not a dependency of remote so only load if user has explicitly added it
-  // remove in 2.7
-  private def checkAkkaProtobuf(className: String): Boolean = checkClass("org.apache.pekko.protobuf", className)
+  // pekko-protobuf is now not a dependency of remote so only load if user has explicitly added it
+  // remove in v1.1
+  private def checkPekkoProtobuf(className: String): Boolean = checkClass("org.apache.pekko.protobuf", className)
 
   private def checkClass(prefix: String, className: String): Boolean =
     !className.startsWith(prefix) || system.dynamicAccess.getClassFor[Any](className).isSuccess
diff --git a/bench-jmh/src/main/scala/org/apache/pekko/util/ByteString_apply_Benchmark.scala b/bench-jmh/src/main/scala/org/apache/pekko/util/ByteString_apply_Benchmark.scala
index 7e0fb73e48..f2c2c56f1b 100644
--- a/bench-jmh/src/main/scala/org/apache/pekko/util/ByteString_apply_Benchmark.scala
+++ b/bench-jmh/src/main/scala/org/apache/pekko/util/ByteString_apply_Benchmark.scala
@@ -26,7 +26,7 @@ class ByteString_apply_Benchmark {
   val bss = ByteStrings(Vector.fill(1024)(ByteString1(Array(0.toByte))))
 
   /*
-    akka-bench-jmh/jmh:run -f 1 -wi 3 -i 3 .*ByteString_apply_Benchmark.*
+    bench-jmh/jmh:run -f 1 -wi 3 -i 3 .*ByteString_apply_Benchmark.*
 
     2.12 original
     ByteString_apply_Benchmark.bss_apply_best_case   thrpt    3  204261596.303 ± 94507102.894  ops/s
diff --git a/bench-jmh/src/main/scala/org/apache/pekko/util/ByteString_grouped_Benchmark.scala b/bench-jmh/src/main/scala/org/apache/pekko/util/ByteString_grouped_Benchmark.scala
index e030f74b56..0282c6fea3 100644
--- a/bench-jmh/src/main/scala/org/apache/pekko/util/ByteString_grouped_Benchmark.scala
+++ b/bench-jmh/src/main/scala/org/apache/pekko/util/ByteString_grouped_Benchmark.scala
@@ -28,7 +28,7 @@ class ByteString_grouped_Benchmark {
   private val bsLarge = ByteString(Array.ofDim[Byte](1000 * 1000))
 
   /*
-    > akka-bench-jmh/jmh:run -f1 .*ByteString_grouped_Benchmark
+    > bench-jmh/jmh:run -f1 .*ByteString_grouped_Benchmark
     [info] Benchmark                             Mode  Cnt      Score      Error  Units
     [info] ByteString_grouped_Benchmark.grouped  avgt   10  59386.328 ± 1466.045  ns/op
    */
diff --git a/bench-jmh/src/main/scala/org/apache/pekko/util/ByteString_toArray_Benchmark.scala b/bench-jmh/src/main/scala/org/apache/pekko/util/ByteString_toArray_Benchmark.scala
index a84d6cd503..10d78997a9 100644
--- a/bench-jmh/src/main/scala/org/apache/pekko/util/ByteString_toArray_Benchmark.scala
+++ b/bench-jmh/src/main/scala/org/apache/pekko/util/ByteString_toArray_Benchmark.scala
@@ -30,7 +30,7 @@ class ByteString_toArray_Benchmark {
   var kb = 0
 
   /*
-    akka-bench-jmh/jmh:run -f 1 -wi 5 -i 5 .*ByteString_toArray_Benchmark.*
+    bench-jmh/jmh:run -f 1 -wi 5 -i 5 .*ByteString_toArray_Benchmark.*
 
 
     Benchmark             (kb)   Mode  Cnt        Score       Error  Units
diff --git a/cluster-sharding-typed/src/main/scala/org/apache/pekko/cluster/sharding/typed/delivery/ShardingProducerController.scala b/cluster-sharding-typed/src/main/scala/org/apache/pekko/cluster/sharding/typed/delivery/ShardingProducerController.scala
index 43c216d67f..e4a5d78ed0 100644
--- a/cluster-sharding-typed/src/main/scala/org/apache/pekko/cluster/sharding/typed/delivery/ShardingProducerController.scala
+++ b/cluster-sharding-typed/src/main/scala/org/apache/pekko/cluster/sharding/typed/delivery/ShardingProducerController.scala
@@ -78,7 +78,7 @@ import pekko.util.JavaDurationConverters._
  * To make sure the messages can be delivered also in that scenario the `ShardingProducerController` can be
  * used with a [[DurableProducerQueue]]. Then the unconfirmed messages are stored in a durable way so
  * that they can be redelivered when the producer is started again. An implementation of the
- * `DurableProducerQueue` is provided by `EventSourcedProducerQueue` in `akka-persistence-typed`.
+ * `DurableProducerQueue` is provided by `EventSourcedProducerQueue` in `pekko-persistence-typed`.
  *
  * Instead of using `tell` with the `sendNextTo` in the `RequestNext` the producer can use `context.ask`
  * with the `askNextTo` in the `RequestNext`. The difference is that a reply is sent back when the
diff --git a/cluster-sharding-typed/src/main/scala/org/apache/pekko/cluster/sharding/typed/javadsl/ClusterSharding.scala b/cluster-sharding-typed/src/main/scala/org/apache/pekko/cluster/sharding/typed/javadsl/ClusterSharding.scala
index fa1b2a4093..f7c1278a03 100644
--- a/cluster-sharding-typed/src/main/scala/org/apache/pekko/cluster/sharding/typed/javadsl/ClusterSharding.scala
+++ b/cluster-sharding-typed/src/main/scala/org/apache/pekko/cluster/sharding/typed/javadsl/ClusterSharding.scala
@@ -125,7 +125,7 @@ object ClusterSharding {
  * the shard and thereby allocate a new home for the shard and then buffered messages in the
  * `ShardRegion` actors are delivered to the new location. This means that the state of the entities
  * are not transferred or migrated. If the state of the entities are of importance it should be
- * persistent (durable), e.g. with `akka-persistence`, so that it can be recovered at the new
+ * persistent (durable), e.g. with `pekko-persistence`, so that it can be recovered at the new
  * location.
  *
  * The logic that decides which shards to rebalance is defined in a plugable shard
@@ -136,7 +136,7 @@ object ClusterSharding {
  * implementation.
  *
  * The state of shard locations in the `ShardCoordinator` is stored with `akka-distributed-data` or
- * `akka-persistence` to survive failures. When a crashed or unreachable coordinator
+ * `pekko-persistence` to survive failures. When a crashed or unreachable coordinator
  * node has been removed (via down) from the cluster a new `ShardCoordinator` singleton
  * actor will take over and the state is recovered. During such a failure period shards
  * with known location are still available, while messages for new (unknown) shards
@@ -146,7 +146,7 @@ object ClusterSharding {
  * actor the order of the messages is preserved. As long as the buffer limit is not reached
  * messages are delivered on a best effort basis, with at-most once delivery semantics,
  * in the same way as ordinary message sending. Reliable end-to-end messaging, with
- * at-least-once semantics can be added by using `AtLeastOnceDelivery` in `akka-persistence`.
+ * at-least-once semantics can be added by using `AtLeastOnceDelivery` in `pekko-persistence`.
  *
  * Some additional latency is introduced for messages targeted to new or previously
  * unused shards due to the round-trip to the coordinator. Rebalancing of shards may
diff --git a/cluster-sharding-typed/src/main/scala/org/apache/pekko/cluster/sharding/typed/scaladsl/ClusterSharding.scala b/cluster-sharding-typed/src/main/scala/org/apache/pekko/cluster/sharding/typed/scaladsl/ClusterSharding.scala
index 2d93d8c4be..023fc397e0 100644
--- a/cluster-sharding-typed/src/main/scala/org/apache/pekko/cluster/sharding/typed/scaladsl/ClusterSharding.scala
+++ b/cluster-sharding-typed/src/main/scala/org/apache/pekko/cluster/sharding/typed/scaladsl/ClusterSharding.scala
@@ -125,7 +125,7 @@ object ClusterSharding extends ExtensionId[ClusterSharding] {
  * the shard and thereby allocate a new home for the shard and then buffered messages in the
  * `ShardRegion` actors are delivered to the new location. This means that the state of the entities
  * are not transferred or migrated. If the state of the entities are of importance it should be
- * persistent (durable), e.g. with `akka-persistence`, so that it can be recovered at the new
+ * persistent (durable), e.g. with `pekko-persistence`, so that it can be recovered at the new
  * location.
  *
  * The logic that decides which shards to rebalance is defined in a plugable shard
@@ -137,7 +137,7 @@ object ClusterSharding extends ExtensionId[ClusterSharding] {
  * implementation.
  *
  * The state of shard locations in the `ShardCoordinator` is stored with `akka-distributed-data` or
- * `akka-persistence` to survive failures. When a crashed or unreachable coordinator
+ * `pekko-persistence` to survive failures. When a crashed or unreachable coordinator
  * node has been removed (via down) from the cluster a new `ShardCoordinator` singleton
  * actor will take over and the state is recovered. During such a failure period shards
  * with known location are still available, while messages for new (unknown) shards
@@ -147,7 +147,7 @@ object ClusterSharding extends ExtensionId[ClusterSharding] {
  * actor the order of the messages is preserved. As long as the buffer limit is not reached
  * messages are delivered on a best effort basis, with at-most once delivery semantics,
  * in the same way as ordinary message sending. Reliable end-to-end messaging, with
- * at-least-once semantics can be added by using `AtLeastOnceDelivery` in `akka-persistence`.
+ * at-least-once semantics can be added by using `AtLeastOnceDelivery` in `pekko-persistence`.
  *
  * Some additional latency is introduced for messages targeted to new or previously
  * unused shards due to the round-trip to the coordinator. Rebalancing of shards may
diff --git a/cluster-sharding/src/main/scala-jdk-9/org/apache/pekko/cluster/sharding/internal/jfr/Events.scala b/cluster-sharding/src/main/scala-jdk-9/org/apache/pekko/cluster/sharding/internal/jfr/Events.scala
index 5e466ebbef..3342e81500 100644
--- a/cluster-sharding/src/main/scala-jdk-9/org/apache/pekko/cluster/sharding/internal/jfr/Events.scala
+++ b/cluster-sharding/src/main/scala-jdk-9/org/apache/pekko/cluster/sharding/internal/jfr/Events.scala
@@ -22,25 +22,25 @@ import jdk.jfr.{ Category, Enabled, Event, Label, StackTrace, Timespan }
 
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Sharding", "Shard")) @Label("Remember Entity Operation")
+@Category(Array("Pekko", "Sharding", "Shard")) @Label("Remember Entity Operation")
 final class RememberEntityWrite(@Timespan(Timespan.NANOSECONDS) val timeTaken: Long) extends Event
 
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Sharding", "Shard")) @Label("Remember Entity Add")
+@Category(Array("Pekko", "Sharding", "Shard")) @Label("Remember Entity Add")
 final class RememberEntityAdd(val entityId: String) extends Event
 
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Sharding", "Shard")) @Label("Remember Entity Remove")
+@Category(Array("Pekko", "Sharding", "Shard")) @Label("Remember Entity Remove")
 final class RememberEntityRemove(val entityId: String) extends Event
 
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Sharding", "Shard")) @Label("Passivate")
+@Category(Array("Pekko", "Sharding", "Shard")) @Label("Passivate")
 final class Passivate(val entityId: String) extends Event
 
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Sharding", "Shard")) @Label("Passivate Restart")
+@Category(Array("Pekko", "Sharding", "Shard")) @Label("Passivate Restart")
 final class PassivateRestart(val entityId: String) extends Event
diff --git a/cluster-sharding/src/main/scala/org/apache/pekko/cluster/sharding/ClusterSharding.scala b/cluster-sharding/src/main/scala/org/apache/pekko/cluster/sharding/ClusterSharding.scala
index f20e1127c4..21ff205ea9 100755
--- a/cluster-sharding/src/main/scala/org/apache/pekko/cluster/sharding/ClusterSharding.scala
+++ b/cluster-sharding/src/main/scala/org/apache/pekko/cluster/sharding/ClusterSharding.scala
@@ -118,7 +118,7 @@ import pekko.util.ccompat.JavaConverters._
  * the shard, allocate a new home for the shard and then buffered messages in the
  * `ShardRegion` actors are delivered to the new location. This means that the state of the entities
  * are not transferred or migrated. If the state of the entities are of importance it should be
- * persistent (durable), e.g. with `akka-persistence` so that it can be recovered at the new
+ * persistent (durable), e.g. with `pekko-persistence` so that it can be recovered at the new
  * location.
  *
  * '''Shard Allocation''':
@@ -131,7 +131,7 @@ import pekko.util.ccompat.JavaConverters._
  *
  * '''Recovery''':
  * The state of shard locations in the `ShardCoordinator` is stored with `akka-distributed-data` or
- * `akka-persistence` to survive failures. When a crashed or unreachable coordinator
+ * `pekko-persistence` to survive failures. When a crashed or unreachable coordinator
  * node has been removed (via down) from the cluster a new `ShardCoordinator` singleton
  * actor will take over and the state is recovered. During such a failure period shards
  * with known location are still available, while messages for new (unknown) shards
@@ -142,7 +142,7 @@ import pekko.util.ccompat.JavaConverters._
  * actor the order of the messages is preserved. As long as the buffer limit is not reached
  * messages are delivered on a best effort basis, with at-most once delivery semantics,
  * in the same way as ordinary message sending. Reliable end-to-end messaging, with
- * at-least-once semantics can be added by using `AtLeastOnceDelivery` in `akka-persistence`.
+ * at-least-once semantics can be added by using `AtLeastOnceDelivery` in `pekko-persistence`.
  *
  * Some additional latency is introduced for messages targeted to new or previously
  * unused shards due to the round-trip to the coordinator. Rebalancing of shards may
diff --git a/cluster-tools/src/main/scala/org/apache/pekko/cluster/client/ClusterClient.scala b/cluster-tools/src/main/scala/org/apache/pekko/cluster/client/ClusterClient.scala
index fceeaaf909..9ec44c4ccf 100644
--- a/cluster-tools/src/main/scala/org/apache/pekko/cluster/client/ClusterClient.scala
+++ b/cluster-tools/src/main/scala/org/apache/pekko/cluster/client/ClusterClient.scala
@@ -127,7 +127,7 @@ object ClusterClientSettings {
  *   external service registry
  */
 @deprecated(
-  "Use Akka gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-akka-grpc",
+  "Use Apache Pekko gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-pekko-grpc",
   since = "Akka 2.6.0")
 final class ClusterClientSettings(
     val initialContacts: Set[ActorPath],
@@ -292,7 +292,7 @@ final case class ContactPoints(contactPoints: Set[ActorPath]) {
 }
 
 @deprecated(
-  "Use Akka gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-akka-grpc",
+  "Use Apache Pekko gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-pekko-grpc",
   since = "Akka 2.6.0")
 object ClusterClient {
 
@@ -300,14 +300,14 @@ object ClusterClient {
    * Scala API: Factory method for `ClusterClient` [[pekko.actor.Props]].
    */
   @deprecated(
-    "Use Akka gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-akka-grpc",
+    "Use Apache Pekko gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-pekko-grpc",
     since = "Akka 2.6.0")
   def props(settings: ClusterClientSettings): Props =
     Props(new ClusterClient(settings)).withDeploy(Deploy.local)
 
   @SerialVersionUID(1L)
   @deprecated(
-    "Use Akka gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-akka-grpc",
+    "Use Apache Pekko gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-pekko-grpc",
     since = "Akka 2.6.0")
   final case class Send(path: String, msg: Any, localAffinity: Boolean) {
 
@@ -318,13 +318,13 @@ object ClusterClient {
   }
   @SerialVersionUID(1L)
   @deprecated(
-    "Use Akka gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-akka-grpc",
+    "Use Apache Pekko gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-pekko-grpc",
     since = "Akka 2.6.0")
   final case class SendToAll(path: String, msg: Any)
 
   @SerialVersionUID(1L)
   @deprecated(
-    "Use Akka gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-akka-grpc",
+    "Use Apache Pekko gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-pekko-grpc",
     since = "Akka 2.6.0")
   final case class Publish(topic: String, msg: Any)
 
@@ -381,7 +381,7 @@ object ClusterClient {
  * nature of the actors involved.
  */
 @deprecated(
-  "Use Akka gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-akka-grpc",
+  "Use Apache Pekko gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-pekko-grpc",
   since = "Akka 2.6.0")
 final class ClusterClient(settings: ClusterClientSettings) extends Actor with ActorLogging {
 
@@ -565,7 +565,7 @@ final class ClusterClient(settings: ClusterClientSettings) extends Actor with Ac
 }
 
 @deprecated(
-  "Use Akka gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-akka-grpc",
+  "Use Apache Pekko gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-pekko-grpc",
   since = "Akka 2.6.0")
 object ClusterClientReceptionist extends ExtensionId[ClusterClientReceptionist] with ExtensionIdProvider {
   override def get(system: ActorSystem): ClusterClientReceptionist = super.get(system)
@@ -583,7 +583,7 @@ object ClusterClientReceptionist extends ExtensionId[ClusterClientReceptionist]
  * The [[pekko.cluster.pubsub.DistributedPubSubMediator]] is started by the [[pekko.cluster.pubsub.DistributedPubSub]] extension.
  */
 @deprecated(
-  "Use Akka gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-akka-grpc",
+  "Use Apache Pekko gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-pekko-grpc",
   since = "Akka 2.6.0")
 final class ClusterClientReceptionist(system: ExtendedActorSystem) extends Extension {
 
@@ -661,7 +661,7 @@ final class ClusterClientReceptionist(system: ExtendedActorSystem) extends Exten
 }
 
 @deprecated(
-  "Use Akka gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-akka-grpc",
+  "Use Apache Pekko gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-pekko-grpc",
   since = "Akka 2.6.0")
 object ClusterReceptionistSettings {
 
@@ -713,7 +713,7 @@ object ClusterReceptionistSettings {
  *   client will be stopped after this time of inactivity.
  */
 @deprecated(
-  "Use Akka gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-akka-grpc",
+  "Use Apache Pekko gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-pekko-grpc",
   since = "Akka 2.6.0")
 final class ClusterReceptionistSettings(
     val role: Option[String],
@@ -875,7 +875,7 @@ final case class ClusterClients(clusterClients: Set[ActorRef]) {
 }
 
 @deprecated(
-  "Use Akka gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-akka-grpc",
+  "Use Apache Pekko gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-pekko-grpc",
   since = "Akka 2.6.0")
 object ClusterReceptionist {
 
@@ -952,7 +952,7 @@ object ClusterReceptionist {
  * the client is supposed to communicate directly to the actor in the cluster.
  */
 @deprecated(
-  "Use Akka gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-akka-grpc",
+  "Use Apache Pekko gRPC instead, see https://pekko.apache.org/docs/pekko/current/cluster-client.html#migration-to-pekko-grpc",
   since = "Akka 2.6.0")
 final class ClusterReceptionist(pubSubMediator: ActorRef, settings: ClusterReceptionistSettings)
     extends Actor
diff --git a/cluster/src/main/scala/org/apache/pekko/cluster/sbr/SplitBrainResolverSettings.scala b/cluster/src/main/scala/org/apache/pekko/cluster/sbr/SplitBrainResolverSettings.scala
index f7edc8596f..8b9b31cef4 100644
--- a/cluster/src/main/scala/org/apache/pekko/cluster/sbr/SplitBrainResolverSettings.scala
+++ b/cluster/src/main/scala/org/apache/pekko/cluster/sbr/SplitBrainResolverSettings.scala
@@ -146,5 +146,5 @@ import pekko.util.Helpers.Requiring
     releaseAfter: FiniteDuration,
     role: Option[String],
     leaseName: Option[String]) {
-  def safeLeaseName(systemName: String) = leaseName.getOrElse(s"$systemName-akka-sbr")
+  def safeLeaseName(systemName: String) = leaseName.getOrElse(s"$systemName-pekko-sbr")
 }
diff --git a/cluster/src/multi-jvm/scala/org/apache/pekko/cluster/sbr/LeaseMajority5NodeSpec.scala b/cluster/src/multi-jvm/scala/org/apache/pekko/cluster/sbr/LeaseMajority5NodeSpec.scala
index 4562def329..4eb70c5ef0 100644
--- a/cluster/src/multi-jvm/scala/org/apache/pekko/cluster/sbr/LeaseMajority5NodeSpec.scala
+++ b/cluster/src/multi-jvm/scala/org/apache/pekko/cluster/sbr/LeaseMajority5NodeSpec.scala
@@ -82,7 +82,7 @@ class LeaseMajority5NodeSpecMultiJvmNode5 extends LeaseMajority5NodeSpec
 class LeaseMajority5NodeSpec extends MultiNodeClusterSpec(LeaseMajority5NodeSpec) {
   import LeaseMajority5NodeSpec._
 
-  private val testLeaseName = "LeaseMajority5NodeSpec-akka-sbr"
+  private val testLeaseName = "LeaseMajority5NodeSpec-pekko-sbr"
 
   def sortByAddress(roles: RoleName*): List[RoleName] = {
 
diff --git a/cluster/src/test/scala/org/apache/pekko/cluster/sbr/LeaseMajoritySpec.scala b/cluster/src/test/scala/org/apache/pekko/cluster/sbr/LeaseMajoritySpec.scala
index a55af5038b..f7038b6f3e 100644
--- a/cluster/src/test/scala/org/apache/pekko/cluster/sbr/LeaseMajoritySpec.scala
+++ b/cluster/src/test/scala/org/apache/pekko/cluster/sbr/LeaseMajoritySpec.scala
@@ -31,7 +31,7 @@ class LeaseMajoritySpec extends PekkoSpec() with Eventually {
     }""").withFallback(default)
   val named = ConfigFactory.parseString("""
      pekko.cluster.split-brain-resolver.lease-majority {
-       lease-name = "shopping-cart-akka-sbr"
+       lease-name = "shopping-cart-pekko-sbr"
      }""").withFallback(default)
 
   "Split Brain Resolver Lease Majority provider" must {
@@ -39,14 +39,15 @@ class LeaseMajoritySpec extends PekkoSpec() with Eventually {
     "read the configured name" in {
       new SplitBrainResolverSettings(default).leaseMajoritySettings.leaseName shouldBe None
       new SplitBrainResolverSettings(blank).leaseMajoritySettings.leaseName shouldBe None
-      new SplitBrainResolverSettings(named).leaseMajoritySettings.leaseName shouldBe Some("shopping-cart-akka-sbr")
+      new SplitBrainResolverSettings(named).leaseMajoritySettings.leaseName shouldBe Some("shopping-cart-pekko-sbr")
     }
 
     "use a safe name" in {
-      new SplitBrainResolverSettings(default).leaseMajoritySettings.safeLeaseName("sysName") shouldBe "sysName-akka-sbr"
-      new SplitBrainResolverSettings(blank).leaseMajoritySettings.safeLeaseName("sysName") shouldBe "sysName-akka-sbr"
+      new SplitBrainResolverSettings(default).leaseMajoritySettings.safeLeaseName(
+        "sysName") shouldBe "sysName-pekko-sbr"
+      new SplitBrainResolverSettings(blank).leaseMajoritySettings.safeLeaseName("sysName") shouldBe "sysName-pekko-sbr"
       new SplitBrainResolverSettings(named).leaseMajoritySettings
-        .safeLeaseName("sysName") shouldBe "shopping-cart-akka-sbr"
+        .safeLeaseName("sysName") shouldBe "shopping-cart-pekko-sbr"
     }
 
   }
diff --git a/cluster/src/test/scala/org/apache/pekko/cluster/sbr/SplitBrainResolverSpec.scala b/cluster/src/test/scala/org/apache/pekko/cluster/sbr/SplitBrainResolverSpec.scala
index 6f96494b55..cc9f538ec3 100644
--- a/cluster/src/test/scala/org/apache/pekko/cluster/sbr/SplitBrainResolverSpec.scala
+++ b/cluster/src/test/scala/org/apache/pekko/cluster/sbr/SplitBrainResolverSpec.scala
@@ -126,7 +126,7 @@ class SplitBrainResolverSpec
   private lazy val selfUniqueAddress = Cluster(system).selfUniqueAddress
 
   private val testLeaseSettings =
-    new LeaseSettings("akka-sbr", "test", new TimeoutSettings(1.second, 2.minutes, 3.seconds), ConfigFactory.empty)
+    new LeaseSettings("pekko-sbr", "test", new TimeoutSettings(1.second, 2.minutes, 3.seconds), ConfigFactory.empty)
 
   def createReachability(unreachability: Seq[(Member, Member)]): Reachability = {
     Reachability(
diff --git a/docs/src/main/paradox/persistence-query.md b/docs/src/main/paradox/persistence-query.md
index f3d65a552f..fd6585bb90 100644
--- a/docs/src/main/paradox/persistence-query.md
+++ b/docs/src/main/paradox/persistence-query.md
@@ -274,7 +274,7 @@ used the next time this projection is started. This pattern is implemented in th
 ## Query plugins
 
 Query plugins are various (mostly community driven) @apidoc[query.*.ReadJournal] implementations for all kinds
-of available datastores. The complete list of available plugins is maintained on the Pekko Persistence Query [Community Plugins](https://akka.io/community/#plugins-to-akka-persistence-query) page.
+of available datastores.
 
 This section aims to provide tips and guide plugin developers through implementing a custom query plugin.
 Most users will not need to implement journals themselves, except if targeting a not yet supported datastore.
diff --git a/docs/src/main/paradox/typed/from-classic.md b/docs/src/main/paradox/typed/from-classic.md
index 21ab456fe5..b64dbec57a 100644
--- a/docs/src/main/paradox/typed/from-classic.md
+++ b/docs/src/main/paradox/typed/from-classic.md
@@ -43,9 +43,9 @@ Artifact names:
 | pekko-distributed-data | pekko-cluster-typed          |
 | pekko-persistence      | pekko-persistence-typed      |
 | pekko-stream           | pekko-stream-typed           |
-| pekko-testkit          | akka-actor-testkit-typed     |
+| pekko-testkit          | pekko-actor-testkit-typed     |
 
-Cluster Singleton and Distributed Data are included in `akka-cluster-typed`.
+Cluster Singleton and Distributed Data are included in `pekko-cluster-typed`.
 
 Artifacts not listed in above table don't have a specific API for Pekko Typed.
 
diff --git a/osgi/src/main/scala/org/apache/pekko/osgi/OsgiActorSystemFactory.scala b/osgi/src/main/scala/org/apache/pekko/osgi/OsgiActorSystemFactory.scala
index e89e16028f..3bdf6d82c6 100644
--- a/osgi/src/main/scala/org/apache/pekko/osgi/OsgiActorSystemFactory.scala
+++ b/osgi/src/main/scala/org/apache/pekko/osgi/OsgiActorSystemFactory.scala
@@ -49,8 +49,8 @@ class OsgiActorSystemFactory(
 
   /**
    * Strategy method to create the Config for the ActorSystem
-   * ensuring that the default/reference configuration is loaded from the akka-actor bundle.
-   * Configuration files found in akka-actor bundle
+   * ensuring that the default/reference configuration is loaded from the pekko-actor bundle.
+   * Configuration files found in pekko-actor bundle
    */
   def actorSystemConfig(@unused context: BundleContext): Config = {
     config.withFallback(
@@ -71,7 +71,7 @@ class OsgiActorSystemFactory(
 object OsgiActorSystemFactory {
 
   /**
-   * Class loader of akka-actor bundle.
+   * Class loader of pekko-actor bundle.
    */
   def akkaActorClassLoader = classOf[ActorSystemActivator].getClassLoader
 
diff --git a/osgi/src/test/scala/org/apache/pekko/osgi/PojoSRTestSupport.scala b/osgi/src/test/scala/org/apache/pekko/osgi/PojoSRTestSupport.scala
index d97c401666..4a8cafea59 100644
--- a/osgi/src/test/scala/org/apache/pekko/osgi/PojoSRTestSupport.scala
+++ b/osgi/src/test/scala/org/apache/pekko/osgi/PojoSRTestSupport.scala
@@ -31,7 +31,7 @@ import org.scalatest.{ BeforeAndAfterAll, Suite }
 import org.apache.pekko.util.ccompat.JavaConverters._
 
 /**
- * Trait that provides support for building akka-osgi tests using PojoSR
+ * Trait that provides support for building pekko-osgi tests using PojoSR
  */
 trait PojoSRTestSupport extends Suite with BeforeAndAfterAll {
 
diff --git a/persistence-tck/src/main/scala/org/apache/pekko/persistence/PluginSpec.scala b/persistence-tck/src/main/scala/org/apache/pekko/persistence/PluginSpec.scala
index d72f7d0557..edd5e4b99f 100644
--- a/persistence-tck/src/main/scala/org/apache/pekko/persistence/PluginSpec.scala
+++ b/persistence-tck/src/main/scala/org/apache/pekko/persistence/PluginSpec.scala
@@ -40,7 +40,7 @@ abstract class PluginSpec(val config: Config)
   private var _writerUuid: String = _
 
   // used to avoid messages be delivered to a restarted actor,
-  // this is akka-persistence internals and journals themselves don't really care
+  // this is pekko-persistence internals and journals themselves don't really care
   protected val actorInstanceId = 1
 
   override protected def beforeEach(): Unit = {
diff --git a/persistence-tck/src/main/scala/org/apache/pekko/persistence/japi/snapshot/JavaSnapshotStoreSpec.scala b/persistence-tck/src/main/scala/org/apache/pekko/persistence/japi/snapshot/JavaSnapshotStoreSpec.scala
index 41f948217f..077335ebce 100644
--- a/persistence-tck/src/main/scala/org/apache/pekko/persistence/japi/snapshot/JavaSnapshotStoreSpec.scala
+++ b/persistence-tck/src/main/scala/org/apache/pekko/persistence/japi/snapshot/JavaSnapshotStoreSpec.scala
@@ -24,7 +24,7 @@ import org.scalatest.{ Args, ConfigMap, Filter, Status, Suite, TestData }
 /**
  * JAVA API
  *
- * This spec aims to verify custom akka-persistence [[pekko.persistence.snapshot.SnapshotStore]] implementations.
+ * This spec aims to verify custom pekko-persistence [[pekko.persistence.snapshot.SnapshotStore]] implementations.
  * Plugin authors are highly encouraged to include it in their plugin's test suites.
  *
  * In case your snapshot-store plugin needs some kind of setup or teardown, override the `beforeAll` or `afterAll`
diff --git a/persistence-tck/src/main/scala/org/apache/pekko/persistence/journal/JournalSpec.scala b/persistence-tck/src/main/scala/org/apache/pekko/persistence/journal/JournalSpec.scala
index 0867d3af77..e1bc18a748 100644
--- a/persistence-tck/src/main/scala/org/apache/pekko/persistence/journal/JournalSpec.scala
+++ b/persistence-tck/src/main/scala/org/apache/pekko/persistence/journal/JournalSpec.scala
@@ -40,7 +40,7 @@ object JournalSpec {
 }
 
 /**
- * This spec aims to verify custom akka-persistence Journal implementations.
+ * This spec aims to verify custom pekko-persistence Journal implementations.
  * Plugin authors are highly encouraged to include it in their plugin's test suites.
  *
  * In case your journal plugin needs some kind of setup or teardown, override the `beforeAll` or `afterAll`
diff --git a/persistence-tck/src/main/scala/org/apache/pekko/persistence/snapshot/SnapshotStoreSpec.scala b/persistence-tck/src/main/scala/org/apache/pekko/persistence/snapshot/SnapshotStoreSpec.scala
index 7e1b699879..a06633cb5a 100644
--- a/persistence-tck/src/main/scala/org/apache/pekko/persistence/snapshot/SnapshotStoreSpec.scala
+++ b/persistence-tck/src/main/scala/org/apache/pekko/persistence/snapshot/SnapshotStoreSpec.scala
@@ -40,7 +40,7 @@ object SnapshotStoreSpec {
 }
 
 /**
- * This spec aims to verify custom akka-persistence [[SnapshotStore]] implementations.
+ * This spec aims to verify custom pekko-persistence [[SnapshotStore]] implementations.
  * Plugin authors are highly encouraged to include it in their plugin's test suites.
  *
  * In case your journal plugin needs some kind of setup or teardown, override the `beforeAll` or `afterAll`
diff --git a/project/JavaFormatter.scala b/project/JavaFormatter.scala
index 76ae9ccba9..4dcc109d2e 100644
--- a/project/JavaFormatter.scala
+++ b/project/JavaFormatter.scala
@@ -24,7 +24,7 @@ object JavaFormatter extends AutoPlugin {
   private val ignoreConfigFileName: String = ".sbt-java-formatter.conf"
   private val descriptor: String = "sbt-java-formatter"
 
-  private val formatOnCompile = !sys.props.contains("akka.no.discipline")
+  private val formatOnCompile = !sys.props.contains("pekko.no.discipline")
 
   import JavaFormatterPlugin.autoImport._
   import sbt.Keys._
diff --git a/project/OSGi.scala b/project/OSGi.scala
index cc0a0fd28c..4c5f1edc98 100644
--- a/project/OSGi.scala
+++ b/project/OSGi.scala
@@ -47,7 +47,7 @@ object OSGi {
   val actor = osgiSettings ++ Seq(
     OsgiKeys.exportPackage := Seq("org.apache.pekko*"),
     OsgiKeys.privatePackage := Seq("org.apache.pekko.osgi.impl"),
-    // akka-actor packages are not imported, as contained in the CP
+    // pekko-actor packages are not imported, as contained in the CP
     OsgiKeys.importPackage := (osgiOptionalImports.map(optionalResolution)) ++ Seq(
       "!sun.misc",
       scalaJava8CompatImport(),
diff --git a/project/PekkoDisciplinePlugin.scala b/project/PekkoDisciplinePlugin.scala
index 57930f17eb..16d24f6b2a 100644
--- a/project/PekkoDisciplinePlugin.scala
+++ b/project/PekkoDisciplinePlugin.scala
@@ -23,8 +23,8 @@ object PekkoDisciplinePlugin extends AutoPlugin {
   override def requires: Plugins = JvmPlugin
   override lazy val projectSettings = disciplineSettings
 
-  // allow toggling for pocs/exploration of ideas without discpline
-  val enabled = !sys.props.contains("akka.no.discipline")
+  // allow toggling for pocs/exploration of ideas without discipline
+  val enabled = !sys.props.contains("pekko.no.discipline")
 
   // We allow warnings in docs to get the 'snippets' right
   val nonFatalJavaWarningsFor = Set(
@@ -140,7 +140,7 @@ object PekkoDisciplinePlugin extends AutoPlugin {
            else Seq.empty),
         // Discipline is not needed for the docs compilation run (which uses
         // different compiler phases from the regular run), and in particular
-        // '-Ywarn-unused:explicits' breaks 'sbt ++2.13.0-M5 akka-actor/doc'
+        // '-Ywarn-unused:explicits' breaks 'sbt ++2.13.0-M5 actor/doc'
         // https://github.com/akka/akka/issues/26119
         Compile / doc / scalacOptions --= disciplineScalacOptions.toSeq :+ "-Xfatal-warnings",
         // having discipline warnings in console is just an annoyance
diff --git a/project/plugins.sbt b/project/plugins.sbt
index 62d4406057..2aff7b73da 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -5,7 +5,7 @@ addSbtPlugin("com.lightbend.sbt" % "sbt-bill-of-materials" % "1.0.2")
 addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6")
 addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.33")
 // sbt-osgi 0.9.5 is available but breaks including jdk9-only classes
-// sbt-osgi 0.9.6 is available but breaks populating akka-protobuf-v3
+// sbt-osgi 0.9.6 is available but breaks populating pekko-protobuf-v3
 addSbtPlugin("com.typesafe.sbt" % "sbt-osgi" % "0.9.4")
 addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1")
 addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0")
diff --git a/remote/src/main/scala-jdk-9/org/apache/pekko/remote/artery/jfr/Events.scala b/remote/src/main/scala-jdk-9/org/apache/pekko/remote/artery/jfr/Events.scala
index f1d7a02c71..e1087cff7f 100644
--- a/remote/src/main/scala-jdk-9/org/apache/pekko/remote/artery/jfr/Events.scala
+++ b/remote/src/main/scala-jdk-9/org/apache/pekko/remote/artery/jfr/Events.scala
@@ -49,7 +49,7 @@ private[pekko] object JFREventUtils {
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Aeron")) @Label("Media driver started")
+@Category(Array("Pekko", "Remoting", "Aeron")) @Label("Media driver started")
 final class TransportMediaDriverStarted(val directoryName: String) extends Event
 
 /**
@@ -57,7 +57,7 @@ final class TransportMediaDriverStarted(val directoryName: String) extends Event
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Transport")) @Label("Transport started")
+@Category(Array("Pekko", "Remoting", "Transport")) @Label("Transport started")
 final class TransportStarted() extends Event
 
 /**
@@ -65,7 +65,7 @@ final class TransportStarted() extends Event
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Aeron")) @Label("Aeron error log started")
+@Category(Array("Pekko", "Remoting", "Aeron")) @Label("Aeron error log started")
 final class TransportAeronErrorLogStarted() extends Event
 
 /**
@@ -73,7 +73,7 @@ final class TransportAeronErrorLogStarted() extends Event
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Transport")) @Label("Task runner started")
+@Category(Array("Pekko", "Remoting", "Transport")) @Label("Task runner started")
 final class TransportTaskRunnerStarted() extends Event
 
 /**
@@ -81,7 +81,7 @@ final class TransportTaskRunnerStarted() extends Event
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Transport")) @Label("Unique address set")
+@Category(Array("Pekko", "Remoting", "Transport")) @Label("Unique address set")
 final class TransportUniqueAddressSet(_uniqueAddress: UniqueAddress) extends Event {
   val uniqueAddress = _uniqueAddress.toString()
 }
@@ -91,7 +91,7 @@ final class TransportUniqueAddressSet(_uniqueAddress: UniqueAddress) extends Eve
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Transport")) @Label("Materializer started")
+@Category(Array("Pekko", "Remoting", "Transport")) @Label("Materializer started")
 final class TransportMaterializerStarted() extends Event
 
 /**
@@ -99,7 +99,7 @@ final class TransportMaterializerStarted() extends Event
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Transport")) @Label("Startup finished")
+@Category(Array("Pekko", "Remoting", "Transport")) @Label("Startup finished")
 final class TransportStartupFinished() extends Event
 
 /**
@@ -107,7 +107,7 @@ final class TransportStartupFinished() extends Event
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Transport")) @Label("Kill switch pulled")
+@Category(Array("Pekko", "Remoting", "Transport")) @Label("Kill switch pulled")
 final class TransportKillSwitchPulled() extends Event
 
 /**
@@ -115,7 +115,7 @@ final class TransportKillSwitchPulled() extends Event
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Transport")) @Label("Stopped")
+@Category(Array("Pekko", "Remoting", "Transport")) @Label("Stopped")
 final class TransportStopped() extends Event
 
 /**
@@ -123,7 +123,7 @@ final class TransportStopped() extends Event
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Aeron")) @Label("Aeron log task stopped")
+@Category(Array("Pekko", "Remoting", "Aeron")) @Label("Aeron log task stopped")
 final class TransportAeronErrorLogTaskStopped() extends Event
 
 /**
@@ -131,7 +131,7 @@ final class TransportAeronErrorLogTaskStopped() extends Event
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Transport")) @Label("Media file deleted")
+@Category(Array("Pekko", "Remoting", "Transport")) @Label("Media file deleted")
 final class TransportMediaFileDeleted() extends Event
 
 /**
@@ -139,7 +139,7 @@ final class TransportMediaFileDeleted() extends Event
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Transport")) @Label("Send queue overflow")
+@Category(Array("Pekko", "Remoting", "Transport")) @Label("Send queue overflow")
 final class TransportSendQueueOverflow(val queueIndex: Int) extends Event
 
 /**
@@ -147,7 +147,7 @@ final class TransportSendQueueOverflow(val queueIndex: Int) extends Event
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Transport")) @Label("Stop idle outbound")
+@Category(Array("Pekko", "Remoting", "Transport")) @Label("Stop idle outbound")
 final class TransportStopIdleOutbound(_remoteAddress: Address, val queueIndex: Int) extends Event {
   val remoteAddress = _remoteAddress.toString
 }
@@ -157,7 +157,7 @@ final class TransportStopIdleOutbound(_remoteAddress: Address, val queueIndex: I
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Transport")) @Label("Quarantined")
+@Category(Array("Pekko", "Remoting", "Transport")) @Label("Quarantined")
 final class TransportQuarantined(_remoteAddress: Address, val uid: Long) extends Event {
   val remoteAddress = _remoteAddress.toString
 }
@@ -167,7 +167,7 @@ final class TransportQuarantined(_remoteAddress: Address, val uid: Long) extends
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Transport")) @Label("Remove quarantined")
+@Category(Array("Pekko", "Remoting", "Transport")) @Label("Remove quarantined")
 final class TransportRemoveQuarantined(_remoteAddress: Address) extends Event {
   val remoteAddress = _remoteAddress.toString
 }
@@ -177,7 +177,7 @@ final class TransportRemoveQuarantined(_remoteAddress: Address) extends Event {
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Transport")) @Label("Restart outbound")
+@Category(Array("Pekko", "Remoting", "Transport")) @Label("Restart outbound")
 final class TransportRestartOutbound(_remoteAddress: Address, val streamName: String) extends Event {
   val remoteAddress = _remoteAddress.toString
 }
@@ -187,7 +187,7 @@ final class TransportRestartOutbound(_remoteAddress: Address, val streamName: St
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Transport")) @Label("Restart inbound")
+@Category(Array("Pekko", "Remoting", "Transport")) @Label("Restart inbound")
 final class TransportRestartInbound(_remoteAddress: UniqueAddress, val streamName: String) extends Event {
   val remoteAddress = _remoteAddress.toString()
 }
@@ -199,7 +199,7 @@ final class TransportRestartInbound(_remoteAddress: UniqueAddress, val streamNam
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Aeron", "Sink")) @Label("Started")
+@Category(Array("Pekko", "Remoting", "Aeron", "Sink")) @Label("Started")
 final class AeronSinkStarted(val channel: String, val streamId: Int) extends Event {}
 
 /**
@@ -207,7 +207,7 @@ final class AeronSinkStarted(val channel: String, val streamId: Int) extends Eve
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Aeron", "Sink")) @Label("Task runner removed")
+@Category(Array("Pekko", "Remoting", "Aeron", "Sink")) @Label("Task runner removed")
 final class AeronSinkTaskRunnerRemoved(val channel: String, val streamId: Int) extends Event
 
 /**
@@ -215,7 +215,7 @@ final class AeronSinkTaskRunnerRemoved(val channel: String, val streamId: Int) e
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Aeron", "Sink")) @Label("Publication closed")
+@Category(Array("Pekko", "Remoting", "Aeron", "Sink")) @Label("Publication closed")
 final class AeronSinkPublicationClosed(val channel: String, val streamId: Int) extends Event
 
 /**
@@ -223,7 +223,7 @@ final class AeronSinkPublicationClosed(val channel: String, val streamId: Int) e
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Aeron", "Sink")) @Label("Publication closed unexpectedly")
+@Category(Array("Pekko", "Remoting", "Aeron", "Sink")) @Label("Publication closed unexpectedly")
 final class AeronSinkPublicationClosedUnexpectedly(val channel: String, val streamId: Int) extends Event
 
 /**
@@ -231,7 +231,7 @@ final class AeronSinkPublicationClosedUnexpectedly(val channel: String, val stre
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Aeron", "Sink")) @Label("Stopped")
+@Category(Array("Pekko", "Remoting", "Aeron", "Sink")) @Label("Stopped")
 final class AeronSinkStopped(val channel: String, val streamId: Int) extends Event
 
 /**
@@ -240,7 +240,7 @@ final class AeronSinkStopped(val channel: String, val streamId: Int) extends Eve
 @InternalApi
 @Enabled(false) // hi frequency event
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Aeron", "Sink")) @Label("Envelope grabbed")
+@Category(Array("Pekko", "Remoting", "Aeron", "Sink")) @Label("Envelope grabbed")
 final class AeronSinkEnvelopeGrabbed(@DataAmount() val lastMessageSize: Int) extends Event
 
 /**
@@ -249,7 +249,7 @@ final class AeronSinkEnvelopeGrabbed(@DataAmount() val lastMessageSize: Int) ext
 @InternalApi
 @Enabled(false) // hi frequency event
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Aeron", "Sink")) @Label("Envelope offered")
+@Category(Array("Pekko", "Remoting", "Aeron", "Sink")) @Label("Envelope offered")
 final class AeronSinkEnvelopeOffered(@DataAmount() val lastMessageSize: Int) extends Event
 
 /**
@@ -257,7 +257,7 @@ final class AeronSinkEnvelopeOffered(@DataAmount() val lastMessageSize: Int) ext
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Aeron", "Sink")) @Label("Gave up envelope")
+@Category(Array("Pekko", "Remoting", "Aeron", "Sink")) @Label("Gave up envelope")
 final class AeronSinkGaveUpEnvelope(val cause: String) extends Event
 
 /**
@@ -266,7 +266,7 @@ final class AeronSinkGaveUpEnvelope(val cause: String) extends Event
 @InternalApi
 @Enabled(false) // hi frequency event
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Aeron", "Sink")) @Label("Delegate to task runner")
+@Category(Array("Pekko", "Remoting", "Aeron", "Sink")) @Label("Delegate to task runner")
 final class AeronSinkDelegateToTaskRunner(val countBeforeDelegate: Long) extends Event
 
 /**
@@ -275,7 +275,7 @@ final class AeronSinkDelegateToTaskRunner(val countBeforeDelegate: Long) extends
 @InternalApi
 @Enabled(false) // hi frequency event
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Aeron", "Sink")) @Label("Return from task runner")
+@Category(Array("Pekko", "Remoting", "Aeron", "Sink")) @Label("Return from task runner")
 final class AeronSinkReturnFromTaskRunner(@Timespan(Timespan.NANOSECONDS) val nanosSinceTaskStartTime: Long)
     extends Event
 
@@ -286,7 +286,7 @@ final class AeronSinkReturnFromTaskRunner(@Timespan(Timespan.NANOSECONDS) val na
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Aeron", "Source")) @Label("Started")
+@Category(Array("Pekko", "Remoting", "Aeron", "Source")) @Label("Started")
 final class AeronSourceStarted(val channel: String, val streamId: Int) extends Event
 
 /**
@@ -294,7 +294,7 @@ final class AeronSourceStarted(val channel: String, val streamId: Int) extends E
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Aeron", "Source")) @Label("Stopped")
+@Category(Array("Pekko", "Remoting", "Aeron", "Source")) @Label("Stopped")
 final class AeronSourceStopped(val channel: String, val streamId: Int) extends Event
 
 /**
@@ -303,7 +303,7 @@ final class AeronSourceStopped(val channel: String, val streamId: Int) extends E
 @InternalApi
 @Enabled(false) // hi frequency event
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Aeron", "Source")) @Label("Received")
+@Category(Array("Pekko", "Remoting", "Aeron", "Source")) @Label("Received")
 final class AeronSourceReceived(@DataAmount() val size: Int) extends Event
 
 /**
@@ -312,7 +312,7 @@ final class AeronSourceReceived(@DataAmount() val size: Int) extends Event
 @InternalApi
 @Enabled(false) // hi frequency event
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Aeron", "Source")) @Label("Delegate to task runner")
+@Category(Array("Pekko", "Remoting", "Aeron", "Source")) @Label("Delegate to task runner")
 final class AeronSourceDelegateToTaskRunner(val countBeforeDelegate: Long) extends Event
 
 /**
@@ -321,7 +321,7 @@ final class AeronSourceDelegateToTaskRunner(val countBeforeDelegate: Long) exten
 @InternalApi
 @Enabled(false) // hi frequency event
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Aeron", "Source")) @Label("Return from task runner")
+@Category(Array("Pekko", "Remoting", "Aeron", "Source")) @Label("Return from task runner")
 final class AeronSourceReturnFromTaskRunner(@Timespan(Timespan.NANOSECONDS) val nanosSinceTaskStartTime: Long)
     extends Event
 
@@ -332,7 +332,7 @@ final class AeronSourceReturnFromTaskRunner(@Timespan(Timespan.NANOSECONDS) val
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Compression")) @Label("ActorRef advertisement")
+@Category(Array("Pekko", "Remoting", "Compression")) @Label("ActorRef advertisement")
 final class CompressionActorRefAdvertisement(val uid: Long) extends Event
 
 /**
@@ -340,7 +340,7 @@ final class CompressionActorRefAdvertisement(val uid: Long) extends Event
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Compression")) @Label("ClassManifest advertisement")
+@Category(Array("Pekko", "Remoting", "Compression")) @Label("ClassManifest advertisement")
 final class CompressionClassManifestAdvertisement(val uid: Long) extends Event
 
 // tcp outbound events
@@ -350,7 +350,7 @@ final class CompressionClassManifestAdvertisement(val uid: Long) extends Event
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Tcp", "Outbound")) @Label("Connected")
+@Category(Array("Pekko", "Remoting", "Tcp", "Outbound")) @Label("Connected")
 final class TcpOutboundConnected(_remoteAddress: Address, val streamName: String) extends Event {
   val remoteAddress = _remoteAddress.toString
 }
@@ -361,7 +361,7 @@ final class TcpOutboundConnected(_remoteAddress: Address, val streamName: String
 @InternalApi
 @Enabled(false) // hi frequency event
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Tcp", "Outbound")) @Label("Sent")
+@Category(Array("Pekko", "Remoting", "Tcp", "Outbound")) @Label("Sent")
 final class TcpOutboundSent(@DataAmount() val size: Int) extends Event
 
 // tcp inbound events
@@ -371,7 +371,7 @@ final class TcpOutboundSent(@DataAmount() val size: Int) extends Event
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Tcp", "Inbound")) @Label("Bound")
+@Category(Array("Pekko", "Remoting", "Tcp", "Inbound")) @Label("Bound")
 final class TcpInboundBound(val bindHost: String, _address: InetSocketAddress) extends Event {
   val address = JFREventUtils.stringOf(_address)
 }
@@ -381,7 +381,7 @@ final class TcpInboundBound(val bindHost: String, _address: InetSocketAddress) e
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Tcp", "Inbound")) @Label("Unbound")
+@Category(Array("Pekko", "Remoting", "Tcp", "Inbound")) @Label("Unbound")
 final class TcpInboundUnbound(_localAddress: UniqueAddress) extends Event {
   val localAddress = _localAddress.toString()
 }
@@ -391,7 +391,7 @@ final class TcpInboundUnbound(_localAddress: UniqueAddress) extends Event {
  */
 @InternalApi
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Tcp", "Inbound")) @Label("Connected")
+@Category(Array("Pekko", "Remoting", "Tcp", "Inbound")) @Label("Connected")
 final class TcpInboundConnected(_remoteAddress: InetSocketAddress) extends Event {
   val remoteAddress = JFREventUtils.stringOf(_remoteAddress)
 }
@@ -402,5 +402,5 @@ final class TcpInboundConnected(_remoteAddress: InetSocketAddress) extends Event
 @InternalApi
 @Enabled(false) // hi frequency event
 @StackTrace(false)
-@Category(Array("Akka", "Remoting", "Tcp", "Inbound")) @Label("Received")
+@Category(Array("Pekko", "Remoting", "Tcp", "Inbound")) @Label("Received")
 final class TcpInboundReceived(@DataAmount() val size: Int) extends Event
diff --git a/remote/src/main/scala/org/apache/pekko/remote/MessageSerializer.scala b/remote/src/main/scala/org/apache/pekko/remote/MessageSerializer.scala
index 40d187831e..3fe1fbb180 100644
--- a/remote/src/main/scala/org/apache/pekko/remote/MessageSerializer.scala
+++ b/remote/src/main/scala/org/apache/pekko/remote/MessageSerializer.scala
@@ -34,7 +34,7 @@ private[pekko] object MessageSerializer {
   class SerializationException(msg: String, cause: Throwable) extends RuntimeException(msg, cause)
 
   /**
-   * Uses Akka Serialization for the specified ActorSystem to transform the given MessageProtocol to a message
+   * Uses Pekko Serialization for the specified ActorSystem to transform the given MessageProtocol to a message
    */
   def deserialize(system: ExtendedActorSystem, messageProtocol: SerializedMessage): AnyRef = {
     SerializationExtension(system)
@@ -46,7 +46,7 @@ private[pekko] object MessageSerializer {
   }
 
   /**
-   * Uses Akka Serialization for the specified ActorSystem to transform the given message to a MessageProtocol
+   * Uses Pekko Serialization for the specified ActorSystem to transform the given message to a MessageProtocol
    * Throws `NotSerializableException` if serializer was not configured for the message type.
    * Throws `MessageSerializer.SerializationException` if exception was thrown from `toBinary` of the
    * serializer.
diff --git a/remote/src/test/scala/org/apache/pekko/remote/artery/tcp/ssl/PemManagersProviderSpec.scala b/remote/src/test/scala/org/apache/pekko/remote/artery/tcp/ssl/PemManagersProviderSpec.scala
index 7770dc46f4..2ca909d73c 100644
--- a/remote/src/test/scala/org/apache/pekko/remote/artery/tcp/ssl/PemManagersProviderSpec.scala
+++ b/remote/src/test/scala/org/apache/pekko/remote/artery/tcp/ssl/PemManagersProviderSpec.scala
@@ -26,7 +26,7 @@ class PemManagersProviderSpec extends AnyWordSpec with Matchers {
 
   "A PemManagersProvider" must {
 
-    "load stores reading files setup in config (akka-pki samples)" in {
+    "load stores reading files setup in config (pekko-pki samples)" in {
       // These set of certificates are valid PEMs but are invalid for pekko-remote
       // use. Either the key length, certificate usage limitations (via the UsageKeyExtensions),
       // or the fact that the key's certificate is self-signed cause one of the following
diff --git a/remote/src/test/scala/org/apache/pekko/remote/serialization/PrimitivesSerializationSpec.scala b/remote/src/test/scala/org/apache/pekko/remote/serialization/PrimitivesSerializationSpec.scala
index 52774408a4..2bee82ad3b 100644
--- a/remote/src/test/scala/org/apache/pekko/remote/serialization/PrimitivesSerializationSpec.scala
+++ b/remote/src/test/scala/org/apache/pekko/remote/serialization/PrimitivesSerializationSpec.scala
@@ -97,7 +97,7 @@ class PrimitivesSerializationSpec extends PekkoSpec(PrimitivesSerializationSpec.
       }
 
     "have right serializer id" in {
-      // checking because moved to akka-actor
+      // checking because moved to pekko-actor
       serializerFor(1L.asInstanceOf[AnyRef]).identifier === 18
     }
 
@@ -120,7 +120,7 @@ class PrimitivesSerializationSpec extends PekkoSpec(PrimitivesSerializationSpec.
     }
 
     "have right serializer id" in {
-      // checking because moved to akka-actor
+      // checking because moved to pekko-actor
       serializerFor(1L.asInstanceOf[AnyRef]).identifier === 19
     }
   }
@@ -144,7 +144,7 @@ class PrimitivesSerializationSpec extends PekkoSpec(PrimitivesSerializationSpec.
       }
 
     "have right serializer id" in {
-      // checking because moved to akka-actor
+      // checking because moved to pekko-actor
       serializerFor(1L.asInstanceOf[AnyRef]).identifier === 20
     }
 
@@ -173,7 +173,7 @@ class PrimitivesSerializationSpec extends PekkoSpec(PrimitivesSerializationSpec.
     }
 
     "have right serializer id" in {
-      // checking because moved to akka-actor
+      // checking because moved to pekko-actor
       serializerFor(1L.asInstanceOf[AnyRef]).identifier === 21
     }
 
diff --git a/serialization-jackson/src/main/scala/org/apache/pekko/serialization/jackson/JacksonModule.scala b/serialization-jackson/src/main/scala/org/apache/pekko/serialization/jackson/JacksonModule.scala
index 7d5afeff0c..d09339535d 100644
--- a/serialization-jackson/src/main/scala/org/apache/pekko/serialization/jackson/JacksonModule.scala
+++ b/serialization-jackson/src/main/scala/org/apache/pekko/serialization/jackson/JacksonModule.scala
@@ -37,7 +37,7 @@ import pekko.annotation.InternalApi
 @InternalApi private[pekko] object JacksonModule {
 
   lazy val version: Version = {
-    val groupId = "com.typesafe.akka"
+    val groupId = "org.apache.pekko"
     val artifactId = "serialization-jackson"
     val version = pekko.Version.current
     VersionUtil.parseVersion(version, groupId, artifactId)
diff --git a/serialization-jackson/src/main/scala/org/apache/pekko/serialization/jackson/JacksonObjectMapperProvider.scala b/serialization-jackson/src/main/scala/org/apache/pekko/serialization/jackson/JacksonObjectMapperProvider.scala
index 2d8ef45eb6..6b9376c198 100644
--- a/serialization-jackson/src/main/scala/org/apache/pekko/serialization/jackson/JacksonObjectMapperProvider.scala
+++ b/serialization-jackson/src/main/scala/org/apache/pekko/serialization/jackson/JacksonObjectMapperProvider.scala
@@ -281,10 +281,10 @@ object JacksonObjectMapperProvider extends ExtensionId[JacksonObjectMapperProvid
   private def isModuleEnabled(fqcn: String, dynamicAccess: DynamicAccess): Boolean =
     fqcn match {
       case "org.apache.pekko.serialization.jackson.PekkoTypedJacksonModule" =>
-        // akka-actor-typed dependency is "provided" and may not be included
+        // pekko-actor-typed dependency is "provided" and may not be included
         dynamicAccess.classIsOnClasspath("org.apache.pekko.actor.typed.ActorRef")
       case "org.apache.pekko.serialization.jackson.PekkoStreamJacksonModule" =>
-        // akka-stream dependency is "provided" and may not be included
+        // pekko-stream dependency is "provided" and may not be included
         dynamicAccess.classIsOnClasspath("org.apache.pekko.stream.Graph")
       case _ => true
     }
diff --git a/serialization-jackson/src/main/scala/org/apache/pekko/serialization/jackson/JacksonSerializer.scala b/serialization-jackson/src/main/scala/org/apache/pekko/serialization/jackson/JacksonSerializer.scala
index 84dc2e8a24..6d1d1e722b 100644
--- a/serialization-jackson/src/main/scala/org/apache/pekko/serialization/jackson/JacksonSerializer.scala
+++ b/serialization-jackson/src/main/scala/org/apache/pekko/serialization/jackson/JacksonSerializer.scala
@@ -142,7 +142,7 @@ import pekko.util.OptionVal
 /**
  * INTERNAL API: only public by configuration
  *
- * Akka serializer for Jackson with JSON.
+ * Pekko serializer for Jackson with JSON.
  */
 @InternalApi private[pekko] final class JacksonJsonSerializer(system: ExtendedActorSystem, bindingName: String)
     extends JacksonSerializer(
@@ -153,7 +153,7 @@ import pekko.util.OptionVal
 /**
  * INTERNAL API: only public by configuration
  *
- * Akka serializer for Jackson with CBOR.
+ * Pekko serializer for Jackson with CBOR.
  */
 @InternalApi private[pekko] final class JacksonCborSerializer(system: ExtendedActorSystem, bindingName: String)
     extends JacksonSerializer(
diff --git a/serialization-jackson/src/test/scala/org/apache/pekko/serialization/jackson/JacksonSerializerSpec.scala b/serialization-jackson/src/test/scala/org/apache/pekko/serialization/jackson/JacksonSerializerSpec.scala
index 71bd942e32..eab692c2a2 100644
--- a/serialization-jackson/src/test/scala/org/apache/pekko/serialization/jackson/JacksonSerializerSpec.scala
+++ b/serialization-jackson/src/test/scala/org/apache/pekko/serialization/jackson/JacksonSerializerSpec.scala
@@ -129,7 +129,7 @@ object ScalaTestMessages {
       extends TestMessage
   // #jackson-scala-enumeration
 
-  // delegate to AkkaSerialization
+  // delegate to PekkoSerialization
   object HasPekkoSerializer {
     def apply(description: String): HasPekkoSerializer = new HasPekkoSerializer(description)
   }
@@ -163,7 +163,7 @@ object ScalaTestMessages {
   final case class WithPekkoSerializer(
       @JsonDeserialize(`using` = classOf[PekkoSerializationDeserializer])
       @JsonSerialize(`using` = classOf[PekkoSerializationSerializer])
-      akkaSerializer: HasPekkoSerializer)
+      pekkoSerializer: HasPekkoSerializer)
       extends TestMessage
 }
 
@@ -1278,7 +1278,7 @@ abstract class JacksonSerializerSpec(serializerName: String)
       }
     }
 
-    "delegate to akka serialization" in {
+    "delegate to pekko serialization" in {
       checkSerialization(WithPekkoSerializer(HasPekkoSerializer("cat")))
     }
 
diff --git a/stream-tests-tck/src/test/scala-jdk9-only/org/apache/pekko/stream/tck/IterablePublisherViaJavaFlowPublisherTest.scala b/stream-tests-tck/src/test/scala-jdk9-only/org/apache/pekko/stream/tck/IterablePublisherViaJavaFlowPublisherTest.scala
index 97fc96dde4..03f504141e 100644
--- a/stream-tests-tck/src/test/scala-jdk9-only/org/apache/pekko/stream/tck/IterablePublisherViaJavaFlowPublisherTest.scala
+++ b/stream-tests-tck/src/test/scala-jdk9-only/org/apache/pekko/stream/tck/IterablePublisherViaJavaFlowPublisherTest.scala
@@ -26,10 +26,10 @@ class IterablePublisherViaJavaFlowPublisherTest extends PekkoPublisherVerificati
     val sourceViaJavaFlowPublisher: JavaFlow.Publisher[Int] = Source(iterable(elements))
       .runWith(JavaFlowSupport.Sink.asPublisher(fanout = false))
 
-    val javaFlowPublisherIntoAkkaSource: Source[Int, NotUsed] =
+    val javaFlowPublisherIntoPekkoSource: Source[Int, NotUsed] =
       JavaFlowSupport.Source.fromPublisher(sourceViaJavaFlowPublisher)
 
-    javaFlowPublisherIntoAkkaSource
+    javaFlowPublisherIntoPekkoSource
       .runWith(Sink.asPublisher(false)) // back as RS Publisher
   }
 
diff --git a/stream-tests-tck/src/test/scala/org/apache/pekko/stream/tck/PekkoIdentityProcessorVerification.scala b/stream-tests-tck/src/test/scala/org/apache/pekko/stream/tck/PekkoIdentityProcessorVerification.scala
index fa159b7d28..ebc5e5ed93 100644
--- a/stream-tests-tck/src/test/scala/org/apache/pekko/stream/tck/PekkoIdentityProcessorVerification.scala
+++ b/stream-tests-tck/src/test/scala/org/apache/pekko/stream/tck/PekkoIdentityProcessorVerification.scala
@@ -50,7 +50,7 @@ abstract class PekkoIdentityProcessorVerification[T](env: TestEnvironment, publi
     }
   }
 
-  /** By default Akka Publishers do not support Fanout! */
+  /** By default, Pekko Publishers do not support Fanout! */
   override def maxSupportedSubscribers: Long = 1L
 
   override lazy val publisherExecutorService: ExecutorService =


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@pekko.apache.org
For additional commands, e-mail: commits-help@pekko.apache.org