You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pekko.apache.org by fa...@apache.org on 2023/02/05 09:42:38 UTC

[incubator-pekko-connectors-kafka] branch main updated: Pekko migration (#17)

This is an automated email from the ASF dual-hosted git repository.

fanningpj pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-pekko-connectors-kafka.git


The following commit(s) were added to refs/heads/main by this push:
     new a98a7bd2 Pekko migration (#17)
a98a7bd2 is described below

commit a98a7bd23175fb9dc7666d207f77560f317f0739
Author: Seeta Ramayya <35...@users.noreply.github.com>
AuthorDate: Sun Feb 5 10:42:33 2023 +0100

    Pekko migration (#17)
    
    * first migration
    
    * temp fix in build.sbt
    
    * migrated pekko kafka
    
    * package folders adjusted for core module (main)
    
    * package folders adjusted for tests/test
    
    * package folders adjusted for tests/it
    
    * package folders adjusted for tests/main
    
    * package folders adjusted for testkit (main)
    
    * package folders adjusted for benchmarks (it)
    
    * package folders adjusted for benchmarks (main)
    
    * package folders adjusted for cluster-sharding (main)
    
    * package folders adjusted for testkit java classes
    
    * Some more akka import adjustments in scala classes
    
    * Some more akka import adjustments in java classes
    
    * - Finally, replaced akka word with org.apache.pekko everywhere in .scala files
    - Added temporary copy & paste class from alpakka (till migration completes)
    
    * Finally, replaced akka word with org.apache.pekko everywhere in .java files
    
    * adjusted logback as well
    
    * PR comments
    
    * SCM and contributor data adjusted
    
    * formatted files
    
    * fixed the documentation build
    
    * fixed pekko discovery check
    
    * Fixed another unit test
    
    * one more test fix
    
    * Renamed package names in logback
    
    * Fixed all unit tests (build should be green)
    
    * Renamed Alpakka... -> PekkoConnectors...
    
    * Renamed Alpakka... -> PekkoConnectors...
    
    * Removed unnecessary TODO
    
    * Removed dependency on lightbend stream-alpakka-csv
    
    * - Using package configuration mapping instead of default scaladoc.base_url configuration
    - Adjusted pekko extref links
    
    * All doc links in build.sbt are adjusted
---
 .scalafmt.conf                                     |   1 +
 .../pekko}/kafka/benchmarks/BatchedConsumer.scala  |  10 +-
 .../pekko}/kafka/benchmarks/Benchmarks.scala       |  24 +-
 .../kafka/benchmarks/NoCommitBackpressure.scala    |   8 +-
 .../PekkoConnectorsCommittableProducer.scala}      |  26 +-
 .../apache/pekko}/kafka/benchmarks/Producer.scala  |  15 +-
 .../apache/pekko}/kafka/benchmarks/SpecBase.scala  |   4 +-
 .../pekko}/kafka/benchmarks/Transactions.scala     |  10 +-
 benchmarks/src/main/resources/logback.xml          |   4 +-
 .../pekko/kafka/benchmarks/CsvFormatter.scala      | 126 ++++
 .../pekko}/kafka/benchmarks/FixtureGen.scala       |   4 +-
 .../pekko}/kafka/benchmarks/InflightMetrics.scala  |  12 +-
 .../kafka/benchmarks/KafkaConsumerBenchmarks.scala |   2 +-
 .../kafka/benchmarks/KafkaConsumerFixtureGen.scala |   4 +-
 .../kafka/benchmarks/KafkaProducerBenchmarks.scala |   2 +-
 .../kafka/benchmarks/KafkaProducerFixtureGen.scala |   6 +-
 .../benchmarks/KafkaTransactionBenchmarks.scala    |   4 +-
 .../benchmarks/KafkaTransactionFixtureGen.scala    |   4 +-
 .../PekkoConnectorsCommittableSinkFixtures.scala}  |  42 +-
 .../kafka/benchmarks/PerfFixtureHelpers.scala      |   2 +-
 .../ReactiveKafkaConsumerBenchmarks.scala          |  20 +-
 .../benchmarks/ReactiveKafkaConsumerFixtures.scala |  18 +-
 .../ReactiveKafkaProducerBenchmarks.scala          |  16 +-
 .../benchmarks/ReactiveKafkaProducerFixtures.scala |  18 +-
 .../ReactiveKafkaTransactionBenchmarks.scala       |  12 +-
 .../ReactiveKafkaTransactionFixtures.scala         |  20 +-
 .../apache/pekko}/kafka/benchmarks/Timed.scala     |  42 +-
 .../kafka/benchmarks/app/RunTestCommand.scala      |   4 +-
 build.sbt                                          | 100 +--
 .../cluster/sharding/KafkaClusterSharding.scala    |  76 +--
 .../pekko}/kafka/CommitTimeoutException.scala      |   2 +-
 .../apache/pekko}/kafka/CommitterSettings.scala    |  16 +-
 .../pekko}/kafka/ConnectionCheckerSettings.scala   |   4 +-
 .../apache/pekko}/kafka/ConsumerFailed.scala       |   2 +-
 .../apache/pekko}/kafka/ConsumerMessage.scala      |   8 +-
 .../apache/pekko}/kafka/ConsumerSettings.scala     |  32 +-
 .../pekko}/kafka/KafkaConnectionFailed.scala       |   2 +-
 .../apache/pekko}/kafka/KafkaConsumerActor.scala   |  10 +-
 .../apache/pekko}/kafka/Metadata.scala             |   4 +-
 .../kafka/OffsetResetProtectionSettings.scala      |   6 +-
 .../apache/pekko}/kafka/ProducerMessage.scala      |   4 +-
 .../apache/pekko}/kafka/ProducerSettings.scala     |  26 +-
 .../apache/pekko}/kafka/RestrictedConsumer.scala   |   6 +-
 .../apache/pekko}/kafka/Subscriptions.scala        |  28 +-
 .../kafka/internal/BaseSingleSourceLogic.scala     |  18 +-
 .../kafka/internal/CommitCollectorStage.scala      |  16 +-
 .../kafka/internal/CommitObservationLogic.scala    |  15 +-
 .../pekko}/kafka/internal/CommitTrigger.scala      |   4 +-
 .../pekko}/kafka/internal/CommittableSources.scala |  40 +-
 .../internal/CommittingProducerSinkStage.scala     |  20 +-
 .../pekko}/kafka/internal/ConfigSettings.scala     |   6 +-
 .../pekko}/kafka/internal/ConnectionChecker.scala  |  10 +-
 .../kafka/internal/ConsumerProgressTracking.scala  |   4 +-
 .../kafka/internal/ConsumerResetProtection.scala   |  12 +-
 .../kafka/internal/ControlImplementations.scala    |  22 +-
 .../kafka/internal/DefaultProducerStage.scala      |  22 +-
 .../pekko}/kafka/internal/DeferredProducer.scala   |  12 +-
 .../kafka/internal/ExternalSingleSourceLogic.scala |  10 +-
 .../pekko}/kafka/internal/KafkaConsumerActor.scala |  24 +-
 .../pekko}/kafka/internal/KafkaSourceStage.scala   |  10 +-
 .../pekko}/kafka/internal/LoggingWithId.scala      |   8 +-
 .../pekko}/kafka/internal/MessageBuilder.scala     |  10 +-
 .../internal/PartitionAssignmentHelpers.scala      |  16 +-
 .../pekko}/kafka/internal/PlainSources.scala       |  20 +-
 .../pekko}/kafka/internal/ProducerStage.scala      |  10 +-
 .../pekko}/kafka/internal/SingleSourceLogic.scala  |  18 +-
 .../pekko}/kafka/internal/SourceLogicBuffer.scala  |   6 +-
 .../kafka/internal/SourceLogicSubscription.scala   |  16 +-
 .../pekko}/kafka/internal/SubSourceLogic.scala     |  38 +-
 .../internal/TransactionalProducerStage.scala      |  22 +-
 .../kafka/internal/TransactionalSources.scala      |  38 +-
 .../apache/pekko}/kafka/javadsl/Committer.scala    |  18 +-
 .../apache/pekko}/kafka/javadsl/Consumer.scala     |  24 +-
 .../pekko}/kafka/javadsl/DiscoverySupport.scala    |   8 +-
 .../pekko}/kafka/javadsl/MetadataClient.scala      |  16 +-
 .../kafka/javadsl/PartitionAssignmentHandler.scala |  14 +-
 .../apache/pekko}/kafka/javadsl/Producer.scala     |  70 +-
 .../apache/pekko}/kafka/javadsl/SendProducer.scala |  16 +-
 .../pekko}/kafka/javadsl/Transactional.scala       |  27 +-
 .../apache/pekko}/kafka/scaladsl/Committer.scala   |  18 +-
 .../apache/pekko}/kafka/scaladsl/Consumer.scala    |  20 +-
 .../pekko}/kafka/scaladsl/DiscoverySupport.scala   |  18 +-
 .../pekko}/kafka/scaladsl/MetadataClient.scala     |  14 +-
 .../scaladsl/PartitionAssignmentHandler.scala      |  14 +-
 .../apache/pekko}/kafka/scaladsl/Producer.scala    |  70 +-
 .../pekko}/kafka/scaladsl/SendProducer.scala       |  18 +-
 .../pekko}/kafka/scaladsl/Transactional.scala      |  20 +-
 docs/src/main/paradox/atleastonce.md               |   2 +-
 docs/src/main/paradox/cluster-sharding.md          |  24 +-
 docs/src/main/paradox/consumer-metadata.md         |  36 +-
 docs/src/main/paradox/consumer-rebalance.md        |   4 +-
 docs/src/main/paradox/consumer.md                  |  48 +-
 docs/src/main/paradox/debugging.md                 |   2 +-
 docs/src/main/paradox/discovery.md                 |  12 +-
 docs/src/main/paradox/errorhandling.md             |   2 +-
 docs/src/main/paradox/home.md                      |  10 +-
 docs/src/main/paradox/index.md                     |   2 +-
 docs/src/main/paradox/producer.md                  |  10 +-
 docs/src/main/paradox/send-producer.md             |   4 +-
 docs/src/main/paradox/testing-testcontainers.md    |  12 +-
 docs/src/main/paradox/transactions.md              |   2 +-
 project/VersionGenerator.scala                     |   2 +-
 .../apache/pekko/kafka/testkit}/KafkaTest.java     |  11 +-
 .../testkit}/TestcontainersKafkaJunit4Test.java    |  13 +-
 .../kafka/testkit}/TestcontainersKafkaTest.java    |  12 +-
 .../testkit/internal/KafkaContainerCluster.java    | 759 +++++++++++----------
 .../internal/PekkoConnectorsKafkaContainer.java}   |  20 +-
 .../testkit/internal/SchemaRegistryContainer.java  |   4 +-
 .../kafka/testkit/javadsl/BaseKafkaTest.java       |  28 +-
 .../kafka/testkit/javadsl/KafkaJunit4Test.java     |  10 +-
 .../kafka/testkit/ConsumerResultFactory.scala      |  14 +-
 .../kafka/testkit/KafkaTestkitSettings.scala       |   4 +-
 .../KafkaTestkitTestcontainersSettings.scala       |  26 +-
 .../kafka/testkit/ProducerResultFactory.scala      |   8 +-
 .../kafka/testkit/internal/KafkaTestKit.scala      |   8 +-
 .../testkit/internal/KafkaTestKitChecks.scala      |   2 +-
 .../testkit/internal/TestFrameworkInterface.scala  |   2 +-
 .../testkit/internal/TestcontainersKafka.scala     |  10 +-
 .../testkit/javadsl/ConsumerControlFactory.scala   |  14 +-
 .../testkit/scaladsl/ConsumerControlFactory.scala  |  14 +-
 .../pekko}/kafka/testkit/scaladsl/KafkaSpec.scala  |  28 +-
 .../testkit/scaladsl/ScalatestKafkaSpec.scala      |   4 +-
 .../testkit/scaladsl/TestcontainersKafkaLike.scala |  12 +-
 .../scaladsl/TestcontainersKafkaPerClassLike.scala |   4 +-
 tests/src/it/resources/logback-test.xml            |  10 +-
 .../apache/pekko}/kafka/IntegrationTests.scala     |   6 +-
 .../kafka/PartitionedSourceFailoverSpec.scala      |  14 +-
 .../pekko}/kafka/PlainSourceFailoverSpec.scala     |  12 +-
 .../kafka/TransactionsPartitionedSourceSpec.scala  |  16 +-
 .../pekko}/kafka/TransactionsSourceSpec.scala      |  18 +-
 .../apache/pekko}/kafka/KafkaPorts.scala           |   2 +-
 .../src/test/java/docs/javadsl/AssignmentTest.java |  26 +-
 .../test/java/docs/javadsl/AtLeastOnceTest.java    |  30 +-
 .../java/docs/javadsl/ClusterShardingExample.java  |  38 +-
 .../java/docs/javadsl/ConsumerExampleTest.java     |  51 +-
 .../java/docs/javadsl/ConsumerSettingsTest.java    |  10 +-
 .../test/java/docs/javadsl/FetchMetadataTest.java  |  18 +-
 .../test/java/docs/javadsl/MetadataClientTest.java |  14 +-
 .../java/docs/javadsl/ProducerSettingsTest.java    |  10 +-
 tests/src/test/java/docs/javadsl/ProducerTest.java |  20 +-
 .../javadsl/SchemaRegistrySerializationTest.java   |  24 +-
 .../test/java/docs/javadsl/SendProducerTest.java   |  14 +-
 .../test/java/docs/javadsl/SerializationTest.java  |  26 +-
 .../test/java/docs/javadsl/TestkitSamplesTest.java |  34 +-
 .../docs/javadsl/TestkitTestcontainersTest.java    |   6 +-
 .../java/docs/javadsl/TransactionsExampleTest.java |  22 +-
 tests/src/test/resources/application.conf          |  25 +-
 tests/src/test/resources/logback-test.xml          |  10 +-
 .../test/scala/docs/scaladsl/AssignmentSpec.scala  |  12 +-
 .../src/test/scala/docs/scaladsl/AtLeastOnce.scala |  18 +-
 .../docs/scaladsl/ClusterShardingExample.scala     |  28 +-
 .../test/scala/docs/scaladsl/ConsumerExample.scala |  34 +-
 .../test/scala/docs/scaladsl/DocsSpecBase.scala    |   8 +-
 .../test/scala/docs/scaladsl/FetchMetadata.scala   |  12 +-
 .../scala/docs/scaladsl/PartitionExamples.scala    |  18 +-
 .../test/scala/docs/scaladsl/ProducerExample.scala |  14 +-
 .../scaladsl/SchemaRegistrySerializationSpec.scala |  16 +-
 .../scala/docs/scaladsl/SendProducerSpec.scala     |  12 +-
 .../scala/docs/scaladsl/SerializationSpec.scala    |  16 +-
 .../scala/docs/scaladsl/TestkitSamplesSpec.scala   |  20 +-
 .../scala/docs/scaladsl/TransactionsExample.scala  |  23 +-
 .../apache/pekko}/kafka/ConfigSettingsSpec.scala   |   6 +-
 .../apache/pekko}/kafka/ConsumerSettingsSpec.scala |  16 +-
 .../apache/pekko}/kafka/ProducerSettingsSpec.scala |  17 +-
 .../apache/pekko}/kafka/Repeated.scala             |   2 +-
 .../apache/pekko}/kafka/TransactionsOps.scala      |  22 +-
 .../kafka/internal/CommitCollectorStageSpec.scala  |  32 +-
 .../internal/CommittingProducerSinkSpec.scala      |  34 +-
 .../kafka/internal/CommittingWithMockSpec.scala    |  26 +-
 .../kafka/internal/ConnectionCheckerSpec.scala     |  16 +-
 .../pekko}/kafka/internal/ConsumerDummy.scala      |   4 +-
 .../pekko}/kafka/internal/ConsumerMock.scala       |   6 +-
 .../internal/ConsumerProgressTrackingSpec.scala    |   4 +-
 .../internal/ConsumerResetProtectionSpec.scala     |  18 +-
 .../pekko}/kafka/internal/ConsumerSpec.scala       |  26 +-
 .../pekko}/kafka/internal/EnhancedConfigSpec.scala |   4 +-
 .../kafka/internal/OffsetAggregationSpec.scala     |   4 +-
 .../kafka/internal/PartitionedSourceSpec.scala     |  22 +-
 .../pekko}/kafka/internal/ProducerSpec.scala       |  26 +-
 .../pekko}/kafka/internal/SubscriptionsSpec.scala  |   8 +-
 .../apache/pekko}/kafka/javadsl/ControlSpec.scala  |  10 +-
 .../kafka/scaladsl/CommittableSinkSpec.scala       |  16 +-
 .../pekko}/kafka/scaladsl/CommittingSpec.scala     |  26 +-
 .../kafka/scaladsl/ConnectionCheckerSpec.scala     |  16 +-
 .../apache/pekko}/kafka/scaladsl/ControlSpec.scala |   8 +-
 .../pekko}/kafka/scaladsl/IntegrationSpec.scala    |  24 +-
 .../pekko}/kafka/scaladsl/MetadataClientSpec.scala |   6 +-
 .../kafka/scaladsl/MisconfiguredConsumerSpec.scala |  16 +-
 .../kafka/scaladsl/MisconfiguredProducerSpec.scala |  14 +-
 .../pekko}/kafka/scaladsl/MultiConsumerSpec.scala  |  10 +-
 .../kafka/scaladsl/PartitionedSourcesSpec.scala    |  22 +-
 .../pekko}/kafka/scaladsl/RebalanceExtSpec.scala   |  22 +-
 .../pekko}/kafka/scaladsl/RebalanceSpec.scala      |  36 +-
 .../pekko}/kafka/scaladsl/ReconnectSpec.scala      |  12 +-
 .../kafka/scaladsl/RetentionPeriodSpec.scala       |  16 +-
 .../apache/pekko}/kafka/scaladsl/SpecBase.scala    |  14 +-
 .../pekko}/kafka/scaladsl/TimestampSpec.scala      |  10 +-
 .../pekko}/kafka/scaladsl/TransactionsSpec.scala   |  18 +-
 .../pekko}/kafka/tests/CapturingAppender.scala     |  14 +-
 .../apache/pekko}/kafka/tests/LogbackUtil.scala    |   6 +-
 .../tests/javadsl/LogCapturingExtension.scala      |   4 +-
 .../kafka/tests/javadsl/LogCapturingJunit4.scala   |   4 +-
 .../pekko}/kafka/tests/scaladsl/LogCapturing.scala |   4 +-
 203 files changed, 2144 insertions(+), 1961 deletions(-)

diff --git a/.scalafmt.conf b/.scalafmt.conf
index 1aae8a44..3714a620 100644
--- a/.scalafmt.conf
+++ b/.scalafmt.conf
@@ -35,6 +35,7 @@ optIn.configStyleArguments               = false
 danglingParentheses.preset               = false
 spaces.inImportCurlyBraces               = true
 rewrite.neverInfix.excludeFilters        = [
+  at
   and
   min
   max
diff --git a/benchmarks/src/it/scala/akka/kafka/benchmarks/BatchedConsumer.scala b/benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/BatchedConsumer.scala
similarity index 86%
rename from benchmarks/src/it/scala/akka/kafka/benchmarks/BatchedConsumer.scala
rename to benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/BatchedConsumer.scala
index 360ddc16..891a00a8 100644
--- a/benchmarks/src/it/scala/akka/kafka/benchmarks/BatchedConsumer.scala
+++ b/benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/BatchedConsumer.scala
@@ -3,11 +3,11 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
+package org.apache.pekko.kafka.benchmarks
 
-import akka.kafka.benchmarks.BenchmarksBase.{ topic_1000_100, topic_1000_5000, topic_1000_5000_8 }
-import akka.kafka.benchmarks.Timed.runPerfTest
-import akka.kafka.benchmarks.app.RunTestCommand
+import org.apache.pekko.kafka.benchmarks.BenchmarksBase.{ topic_1000_100, topic_1000_5000, topic_1000_5000_8 }
+import org.apache.pekko.kafka.benchmarks.Timed.runPerfTest
+import org.apache.pekko.kafka.benchmarks.app.RunTestCommand
 
 class ApacheKafkaBatchedConsumer extends BenchmarksBase() {
   it should "bench with small messages" in {
@@ -36,7 +36,7 @@ class ApacheKafkaBatchedConsumer extends BenchmarksBase() {
   }
 }
 
-class AlpakkaKafkaBatchedConsumer extends BenchmarksBase() {
+class PekkoConnectorsKafkaBatchedConsumer extends BenchmarksBase() {
 
   it should "bench with small messages" in {
     val cmd = RunTestCommand("alpakka-kafka-batched-consumer", bootstrapServers, topic_1000_100)
diff --git a/benchmarks/src/it/scala/akka/kafka/benchmarks/Benchmarks.scala b/benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/Benchmarks.scala
similarity index 88%
rename from benchmarks/src/it/scala/akka/kafka/benchmarks/Benchmarks.scala
rename to benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/Benchmarks.scala
index 048b0722..a72ddb8e 100644
--- a/benchmarks/src/it/scala/akka/kafka/benchmarks/Benchmarks.scala
+++ b/benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/Benchmarks.scala
@@ -3,15 +3,15 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
-
-import akka.kafka.benchmarks.BenchmarksBase._
-import akka.kafka.benchmarks.InflightMetrics._
-import akka.kafka.benchmarks.PerfFixtureHelpers.FilledTopic
-import akka.kafka.benchmarks.Timed.{ runPerfTest, runPerfTestInflightMetrics }
-import akka.kafka.benchmarks.app.RunTestCommand
-import akka.kafka.testkit.KafkaTestkitTestcontainersSettings
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaLike
+package org.apache.pekko.kafka.benchmarks
+
+import org.apache.pekko.kafka.benchmarks.BenchmarksBase._
+import org.apache.pekko.kafka.benchmarks.InflightMetrics._
+import org.apache.pekko.kafka.benchmarks.PerfFixtureHelpers.FilledTopic
+import org.apache.pekko.kafka.benchmarks.Timed.{ runPerfTest, runPerfTestInflightMetrics }
+import org.apache.pekko.kafka.benchmarks.app.RunTestCommand
+import org.apache.pekko.kafka.testkit.KafkaTestkitTestcontainersSettings
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike
 import com.typesafe.config.Config
 
 object BenchmarksBase {
@@ -61,7 +61,7 @@ class ApacheKafkaConsumerNokafka extends BenchmarksBase() {
   }
 }
 
-class AlpakkaKafkaConsumerNokafka extends BenchmarksBase() {
+class PekkoConnectorsKafkaConsumerNokafka extends BenchmarksBase() {
   it should "bench" in {
     val cmd = RunTestCommand("alpakka-kafka-plain-consumer-nokafka", bootstrapServers, topic_2000_100)
     runPerfTest(cmd,
@@ -83,7 +83,7 @@ class ApacheKafkaPlainConsumer extends BenchmarksBase() {
   }
 }
 
-class AlpakkaKafkaPlainConsumer extends BenchmarksBase() {
+class PekkoConnectorsKafkaPlainConsumer extends BenchmarksBase() {
   it should "bench" in {
     val cmd = RunTestCommand("alpakka-kafka-plain-consumer", bootstrapServers, topic_2000_100)
     runPerfTest(cmd, ReactiveKafkaConsumerFixtures.plainSources(cmd), ReactiveKafkaConsumerBenchmarks.consumePlain)
@@ -127,7 +127,7 @@ class ApacheKafkaAtMostOnceConsumer extends BenchmarksBase() {
   }
 }
 
-class AlpakkaKafkaAtMostOnceConsumer extends BenchmarksBase() {
+class PekkoConnectorsKafkaAtMostOnceConsumer extends BenchmarksBase() {
   it should "bench" in {
     val cmd = RunTestCommand("alpakka-kafka-at-most-once-consumer", bootstrapServers, topic_50_100)
     runPerfTest(cmd,
diff --git a/benchmarks/src/it/scala/akka/kafka/benchmarks/NoCommitBackpressure.scala b/benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/NoCommitBackpressure.scala
similarity index 91%
rename from benchmarks/src/it/scala/akka/kafka/benchmarks/NoCommitBackpressure.scala
rename to benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/NoCommitBackpressure.scala
index 786e3cf1..d7e66849 100644
--- a/benchmarks/src/it/scala/akka/kafka/benchmarks/NoCommitBackpressure.scala
+++ b/benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/NoCommitBackpressure.scala
@@ -3,10 +3,10 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
+package org.apache.pekko.kafka.benchmarks
 
-import akka.kafka.benchmarks.Timed.runPerfTest
-import akka.kafka.benchmarks.app.RunTestCommand
+import org.apache.pekko.kafka.benchmarks.Timed.runPerfTest
+import org.apache.pekko.kafka.benchmarks.app.RunTestCommand
 
 import BenchmarksBase._
 
@@ -38,7 +38,7 @@ class RawKafkaCommitEveryPollConsumer extends BenchmarksBase() {
 //  }
 }
 
-class AlpakkaCommitAndForgetConsumer extends BenchmarksBase() {
+class PekkoConnectorsCommitAndForgetConsumer extends BenchmarksBase() {
   val prefix = "alpakka-kafka-commit-and-forget-"
 
   it should "bench with small messages" in {
diff --git a/benchmarks/src/it/scala/akka/kafka/benchmarks/AlpakkaCommittableProducer.scala b/benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/PekkoConnectorsCommittableProducer.scala
similarity index 58%
rename from benchmarks/src/it/scala/akka/kafka/benchmarks/AlpakkaCommittableProducer.scala
rename to benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/PekkoConnectorsCommittableProducer.scala
index 553b74f6..4713f41b 100644
--- a/benchmarks/src/it/scala/akka/kafka/benchmarks/AlpakkaCommittableProducer.scala
+++ b/benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/PekkoConnectorsCommittableProducer.scala
@@ -3,45 +3,45 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
+package org.apache.pekko.kafka.benchmarks
 
-import akka.kafka.benchmarks.BenchmarksBase.{ topic_100_100, topic_100_5000 }
-import akka.kafka.benchmarks.Timed.runPerfTest
-import akka.kafka.benchmarks.app.RunTestCommand
+import org.apache.pekko.kafka.benchmarks.BenchmarksBase.{ topic_100_100, topic_100_5000 }
+import org.apache.pekko.kafka.benchmarks.Timed.runPerfTest
+import org.apache.pekko.kafka.benchmarks.app.RunTestCommand
 
 /**
  * Compares the `CommittingProducerSinkStage` with the composed implementation of `Producer.flexiFlow` and `Committer.sink`.
  */
-class AlpakkaCommittableProducer extends BenchmarksBase() {
+class PekkoConnectorsCommittableProducer extends BenchmarksBase() {
   it should "bench composed sink with 100b messages" in {
     val cmd = RunTestCommand("alpakka-committable-producer-composed", bootstrapServers, topic_100_100)
     runPerfTest(
       cmd,
-      AlpakkaCommittableSinkFixtures.composedSink(cmd),
-      AlpakkaCommittableSinkBenchmarks.run)
+      PekkoConnectorsCommittableSinkFixtures.composedSink(cmd),
+      PekkoConnectorsCommittableSinkBenchmarks.run)
   }
 
   it should "bench composed sink with 5000b messages" in {
     val cmd = RunTestCommand("alpakka-committable-producer-composed-5000b", bootstrapServers, topic_100_5000)
     runPerfTest(
       cmd,
-      AlpakkaCommittableSinkFixtures.composedSink(cmd),
-      AlpakkaCommittableSinkBenchmarks.run)
+      PekkoConnectorsCommittableSinkFixtures.composedSink(cmd),
+      PekkoConnectorsCommittableSinkBenchmarks.run)
   }
 
   it should "bench `Producer.committableSink` with 100b messages" in {
     val cmd = RunTestCommand("alpakka-committable-producer", bootstrapServers, topic_100_100)
     runPerfTest(
       cmd,
-      AlpakkaCommittableSinkFixtures.producerSink(cmd),
-      AlpakkaCommittableSinkBenchmarks.run)
+      PekkoConnectorsCommittableSinkFixtures.producerSink(cmd),
+      PekkoConnectorsCommittableSinkBenchmarks.run)
   }
 
   it should "bench `Producer.committableSink` with 5000b messages" in {
     val cmd = RunTestCommand("alpakka-committable-producer-5000b", bootstrapServers, topic_100_5000)
     runPerfTest(
       cmd,
-      AlpakkaCommittableSinkFixtures.producerSink(cmd),
-      AlpakkaCommittableSinkBenchmarks.run)
+      PekkoConnectorsCommittableSinkFixtures.producerSink(cmd),
+      PekkoConnectorsCommittableSinkBenchmarks.run)
   }
 }
diff --git a/benchmarks/src/it/scala/akka/kafka/benchmarks/Producer.scala b/benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/Producer.scala
similarity index 86%
rename from benchmarks/src/it/scala/akka/kafka/benchmarks/Producer.scala
rename to benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/Producer.scala
index 18598934..d6441081 100644
--- a/benchmarks/src/it/scala/akka/kafka/benchmarks/Producer.scala
+++ b/benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/Producer.scala
@@ -3,11 +3,16 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
+package org.apache.pekko.kafka.benchmarks
 
-import akka.kafka.benchmarks.BenchmarksBase.{ topic_2000_100, topic_2000_500, topic_2000_5000, topic_2000_5000_8 }
-import akka.kafka.benchmarks.Timed.runPerfTest
-import akka.kafka.benchmarks.app.RunTestCommand
+import org.apache.pekko.kafka.benchmarks.BenchmarksBase.{
+  topic_2000_100,
+  topic_2000_500,
+  topic_2000_5000,
+  topic_2000_5000_8
+}
+import org.apache.pekko.kafka.benchmarks.Timed.runPerfTest
+import org.apache.pekko.kafka.benchmarks.app.RunTestCommand
 
 class ApacheKafkaPlainProducer extends BenchmarksBase() {
   private val prefix = "apache-kafka-plain-producer"
@@ -34,7 +39,7 @@ class ApacheKafkaPlainProducer extends BenchmarksBase() {
   }
 }
 
-class AlpakkaKafkaPlainProducer extends BenchmarksBase() {
+class PekkoConnectorsKafkaPlainProducer extends BenchmarksBase() {
   private val prefix = "alpakka-kafka-plain-producer"
 
   it should "bench with small messages" in {
diff --git a/benchmarks/src/it/scala/akka/kafka/benchmarks/SpecBase.scala b/benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/SpecBase.scala
similarity index 82%
rename from benchmarks/src/it/scala/akka/kafka/benchmarks/SpecBase.scala
rename to benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/SpecBase.scala
index deab03fe..40583a5d 100644
--- a/benchmarks/src/it/scala/akka/kafka/benchmarks/SpecBase.scala
+++ b/benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/SpecBase.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
+package org.apache.pekko.kafka.benchmarks
 
-import akka.kafka.testkit.scaladsl.ScalatestKafkaSpec
+import org.apache.pekko.kafka.testkit.scaladsl.ScalatestKafkaSpec
 import org.scalatest.concurrent.{ Eventually, ScalaFutures }
 import org.scalatest.flatspec.AnyFlatSpecLike
 import org.scalatest.matchers.should.Matchers
diff --git a/benchmarks/src/it/scala/akka/kafka/benchmarks/Transactions.scala b/benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/Transactions.scala
similarity index 83%
rename from benchmarks/src/it/scala/akka/kafka/benchmarks/Transactions.scala
rename to benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/Transactions.scala
index 5c0cf7ca..a2348a84 100644
--- a/benchmarks/src/it/scala/akka/kafka/benchmarks/Transactions.scala
+++ b/benchmarks/src/it/scala/org/apache/pekko/kafka/benchmarks/Transactions.scala
@@ -3,11 +3,11 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
+package org.apache.pekko.kafka.benchmarks
 
-import akka.kafka.benchmarks.BenchmarksBase.{ topic_100_100, topic_100_5000 }
-import akka.kafka.benchmarks.Timed.runPerfTest
-import akka.kafka.benchmarks.app.RunTestCommand
+import org.apache.pekko.kafka.benchmarks.BenchmarksBase.{ topic_100_100, topic_100_5000 }
+import org.apache.pekko.kafka.benchmarks.Timed.runPerfTest
+import org.apache.pekko.kafka.benchmarks.app.RunTestCommand
 import scala.concurrent.duration._
 
 class ApacheKafkaTransactions extends BenchmarksBase() {
@@ -26,7 +26,7 @@ class ApacheKafkaTransactions extends BenchmarksBase() {
   }
 }
 
-class AlpakkaKafkaTransactions extends BenchmarksBase() {
+class PekkoConnectorsKafkaTransactions extends BenchmarksBase() {
   it should "bench with small messages" in {
     val cmd = RunTestCommand("alpakka-kafka-transactions", bootstrapServers, topic_100_100)
     runPerfTest(
diff --git a/benchmarks/src/main/resources/logback.xml b/benchmarks/src/main/resources/logback.xml
index 0ad2f6e8..2121742e 100644
--- a/benchmarks/src/main/resources/logback.xml
+++ b/benchmarks/src/main/resources/logback.xml
@@ -13,8 +13,8 @@
     </appender>
 
     <logger name="org.apache" level="WARN"/>
-    <logger name="akka" level="WARN"/>
-    <logger name="akka.kafka.benchmarks" level="INFO"/>
+    <logger name="org.apache.pekko" level="WARN"/>
+    <logger name="org.apache.pekko.kafka.benchmarks" level="INFO"/>
     <logger name="org.apache.kafka.common.utils.AppInfoParser" level="ERROR"/>
     <logger name="org.apache.kafka.clients.NetworkClient" level="ERROR"/>
 
diff --git a/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/CsvFormatter.scala b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/CsvFormatter.scala
new file mode 100644
index 00000000..b711f7d1
--- /dev/null
+++ b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/CsvFormatter.scala
@@ -0,0 +1,126 @@
+/*
+ * Copyright (C) 2014 - 2016 Softwaremill <https://softwaremill.com>
+ * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
+ */
+
+package org.apache.pekko.kafka.benchmarks
+
+import org.apache.pekko.util.ByteString
+
+import java.nio.charset.{ Charset, StandardCharsets }
+import scala.collection.immutable
+
+private[benchmarks] sealed trait CsvQuotingStyle
+
+object CsvQuotingStyle {
+
+  /** Quote only fields requiring quotes */
+  case object Required extends CsvQuotingStyle
+
+  /** Quote all fields */
+  case object Always extends CsvQuotingStyle
+}
+
+// TODO: This needs to be deleted after migrating alpakka to pekko.
+// This is just temporary base to see everything compiles and tests will pass without any issue
+private[benchmarks] class CsvFormatter(delimiter: Char,
+    quoteChar: Char,
+    escapeChar: Char,
+    endOfLine: String,
+    quotingStyle: CsvQuotingStyle,
+    charset: Charset = StandardCharsets.UTF_8) {
+
+  private[this] val charsetName = charset.name()
+
+  private[this] val delimiterBs = ByteString(String.valueOf(delimiter), charsetName)
+  private[this] val quoteBs = ByteString(String.valueOf(quoteChar), charsetName)
+  private[this] val duplicatedQuote = ByteString(String.valueOf(Array(quoteChar, quoteChar)), charsetName)
+  private[this] val duplicatedEscape = ByteString(String.valueOf(Array(escapeChar, escapeChar)), charsetName)
+  private[this] val endOfLineBs = ByteString(endOfLine, charsetName)
+
+  def toCsv(fields: immutable.Iterable[Any]): ByteString =
+    if (fields.nonEmpty) nonEmptyToCsv(fields)
+    else endOfLineBs
+
+  private def nonEmptyToCsv(fields: immutable.Iterable[Any]) = {
+    val builder = ByteString.createBuilder
+
+    def splitAndDuplicateQuotesAndEscapes(field: String, splitAt: Int) = {
+
+      @inline def indexOfQuoteOrEscape(lastIndex: Int) = {
+        var index = lastIndex
+        var found = -1
+        while (index < field.length && found == -1) {
+          val char = field(index)
+          if (char == quoteChar || char == escapeChar) found = index
+          index += 1
+        }
+        found
+      }
+
+      var lastIndex = 0
+      var index = splitAt
+      while (index > -1) {
+        builder ++= ByteString.apply(field.substring(lastIndex, index), charsetName)
+        val char = field.charAt(index)
+        if (char == quoteChar) {
+          builder ++= duplicatedQuote
+        } else {
+          builder ++= duplicatedEscape
+        }
+        lastIndex = index + 1
+        index = indexOfQuoteOrEscape(lastIndex)
+      }
+      if (lastIndex < field.length) {
+        builder ++= ByteString(field.substring(lastIndex), charsetName)
+      }
+    }
+
+    def append(field: String) = {
+      val (quoteIt, splitAt) = requiresQuotesOrSplit(field)
+      if (quoteIt || quotingStyle == CsvQuotingStyle.Always) {
+        builder ++= quoteBs
+        if (splitAt != -1) {
+          splitAndDuplicateQuotesAndEscapes(field, splitAt)
+        } else {
+          builder ++= ByteString(field, charsetName)
+        }
+        builder ++= quoteBs
+      } else {
+        builder ++= ByteString(field, charsetName)
+      }
+    }
+
+    val iterator = fields.iterator
+    var hasNext = iterator.hasNext
+    while (hasNext) {
+      val next = iterator.next()
+      if (next != null) {
+        append(next.toString)
+      }
+      hasNext = iterator.hasNext
+      if (hasNext) {
+        builder ++= delimiterBs
+      }
+    }
+    builder ++= endOfLineBs
+    builder.result()
+  }
+
+  private def requiresQuotesOrSplit(field: String): (Boolean, Int) = {
+    var quotes = CsvQuotingStyle.Always == quotingStyle
+    var split = -1
+    var index = 0
+    while (index < field.length && !(quotes && split != -1)) {
+      val char = field(index)
+      if (char == `quoteChar` || char == `escapeChar`) {
+        quotes = true
+        split = index
+      } else if (char == '\r' || char == '\n' || char == `delimiter`) {
+        quotes = true
+      }
+      index += 1
+    }
+    (quotes, split)
+  }
+}
diff --git a/benchmarks/src/main/scala/akka/kafka/benchmarks/FixtureGen.scala b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/FixtureGen.scala
similarity index 68%
rename from benchmarks/src/main/scala/akka/kafka/benchmarks/FixtureGen.scala
rename to benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/FixtureGen.scala
index d59684d0..7b084713 100644
--- a/benchmarks/src/main/scala/akka/kafka/benchmarks/FixtureGen.scala
+++ b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/FixtureGen.scala
@@ -3,8 +3,8 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
+package org.apache.pekko.kafka.benchmarks
 
-import akka.kafka.benchmarks.app.RunTestCommand
+import org.apache.pekko.kafka.benchmarks.app.RunTestCommand
 
 case class FixtureGen[F](command: RunTestCommand, generate: Int => F)
diff --git a/benchmarks/src/main/scala/akka/kafka/benchmarks/InflightMetrics.scala b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/InflightMetrics.scala
similarity index 97%
rename from benchmarks/src/main/scala/akka/kafka/benchmarks/InflightMetrics.scala
rename to benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/InflightMetrics.scala
index e0b880cd..6ca00b04 100644
--- a/benchmarks/src/main/scala/akka/kafka/benchmarks/InflightMetrics.scala
+++ b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/InflightMetrics.scala
@@ -3,15 +3,15 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
+package org.apache.pekko.kafka.benchmarks
 
 import java.lang.management.{ BufferPoolMXBean, ManagementFactory, MemoryType }
 
-import akka.NotUsed
-import akka.actor.Cancellable
-import akka.kafka.scaladsl.Consumer.Control
-import akka.stream.Materializer
-import akka.stream.scaladsl.{ Keep, Sink, Source }
+import org.apache.pekko.NotUsed
+import org.apache.pekko.actor.Cancellable
+import org.apache.pekko.kafka.scaladsl.Consumer.Control
+import org.apache.pekko.stream.Materializer
+import org.apache.pekko.stream.scaladsl.{ Keep, Sink, Source }
 import com.codahale.metrics.{ Histogram, MetricRegistry }
 import javax.management.remote.{ JMXConnectorFactory, JMXServiceURL }
 import javax.management.{ Attribute, MBeanServerConnection, ObjectName }
diff --git a/benchmarks/src/main/scala/akka/kafka/benchmarks/KafkaConsumerBenchmarks.scala b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/KafkaConsumerBenchmarks.scala
similarity index 99%
rename from benchmarks/src/main/scala/akka/kafka/benchmarks/KafkaConsumerBenchmarks.scala
rename to benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/KafkaConsumerBenchmarks.scala
index f80970dd..2a8b8d8b 100644
--- a/benchmarks/src/main/scala/akka/kafka/benchmarks/KafkaConsumerBenchmarks.scala
+++ b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/KafkaConsumerBenchmarks.scala
@@ -3,7 +3,7 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
+package org.apache.pekko.kafka.benchmarks
 
 import java.time.Duration
 import java.util
diff --git a/benchmarks/src/main/scala/akka/kafka/benchmarks/KafkaConsumerFixtureGen.scala b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/KafkaConsumerFixtureGen.scala
similarity index 93%
rename from benchmarks/src/main/scala/akka/kafka/benchmarks/KafkaConsumerFixtureGen.scala
rename to benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/KafkaConsumerFixtureGen.scala
index c0575f16..48ddb76f 100644
--- a/benchmarks/src/main/scala/akka/kafka/benchmarks/KafkaConsumerFixtureGen.scala
+++ b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/KafkaConsumerFixtureGen.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
+package org.apache.pekko.kafka.benchmarks
 
-import akka.kafka.benchmarks.app.RunTestCommand
+import org.apache.pekko.kafka.benchmarks.app.RunTestCommand
 import org.apache.kafka.clients.consumer.{ ConsumerConfig, KafkaConsumer }
 import org.apache.kafka.common.serialization.{ ByteArrayDeserializer, StringDeserializer }
 
diff --git a/benchmarks/src/main/scala/akka/kafka/benchmarks/KafkaProducerBenchmarks.scala b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/KafkaProducerBenchmarks.scala
similarity index 97%
rename from benchmarks/src/main/scala/akka/kafka/benchmarks/KafkaProducerBenchmarks.scala
rename to benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/KafkaProducerBenchmarks.scala
index 9b6ca58d..7dc72648 100644
--- a/benchmarks/src/main/scala/akka/kafka/benchmarks/KafkaProducerBenchmarks.scala
+++ b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/KafkaProducerBenchmarks.scala
@@ -3,7 +3,7 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
+package org.apache.pekko.kafka.benchmarks
 
 import com.codahale.metrics.Meter
 import com.typesafe.scalalogging.LazyLogging
diff --git a/benchmarks/src/main/scala/akka/kafka/benchmarks/KafkaProducerFixtureGen.scala b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/KafkaProducerFixtureGen.scala
similarity index 85%
rename from benchmarks/src/main/scala/akka/kafka/benchmarks/KafkaProducerFixtureGen.scala
rename to benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/KafkaProducerFixtureGen.scala
index 09570f60..875e01c9 100644
--- a/benchmarks/src/main/scala/akka/kafka/benchmarks/KafkaProducerFixtureGen.scala
+++ b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/KafkaProducerFixtureGen.scala
@@ -3,10 +3,10 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
+package org.apache.pekko.kafka.benchmarks
 
-import akka.kafka.benchmarks.PerfFixtureHelpers.FilledTopic
-import akka.kafka.benchmarks.app.RunTestCommand
+import org.apache.pekko.kafka.benchmarks.PerfFixtureHelpers.FilledTopic
+import org.apache.pekko.kafka.benchmarks.app.RunTestCommand
 import org.apache.kafka.clients.producer.KafkaProducer
 
 case class KafkaProducerTestFixture(topic: String,
diff --git a/benchmarks/src/main/scala/akka/kafka/benchmarks/KafkaTransactionBenchmarks.scala b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/KafkaTransactionBenchmarks.scala
similarity index 96%
rename from benchmarks/src/main/scala/akka/kafka/benchmarks/KafkaTransactionBenchmarks.scala
rename to benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/KafkaTransactionBenchmarks.scala
index 20a925c7..249b7eee 100644
--- a/benchmarks/src/main/scala/akka/kafka/benchmarks/KafkaTransactionBenchmarks.scala
+++ b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/KafkaTransactionBenchmarks.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
+package org.apache.pekko.kafka.benchmarks
 
-import akka.kafka.benchmarks.KafkaConsumerBenchmarks.pollTimeoutMs
+import org.apache.pekko.kafka.benchmarks.KafkaConsumerBenchmarks.pollTimeoutMs
 import com.codahale.metrics.Meter
 import com.typesafe.scalalogging.LazyLogging
 import org.apache.kafka.clients.consumer._
diff --git a/benchmarks/src/main/scala/akka/kafka/benchmarks/KafkaTransactionFixtureGen.scala b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/KafkaTransactionFixtureGen.scala
similarity index 96%
rename from benchmarks/src/main/scala/akka/kafka/benchmarks/KafkaTransactionFixtureGen.scala
rename to benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/KafkaTransactionFixtureGen.scala
index ad5c2187..47e21af2 100644
--- a/benchmarks/src/main/scala/akka/kafka/benchmarks/KafkaTransactionFixtureGen.scala
+++ b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/KafkaTransactionFixtureGen.scala
@@ -3,11 +3,11 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
+package org.apache.pekko.kafka.benchmarks
 
 import java.util.Locale
 
-import akka.kafka.benchmarks.app.RunTestCommand
+import org.apache.pekko.kafka.benchmarks.app.RunTestCommand
 import org.apache.kafka.clients.consumer.{ ConsumerConfig, KafkaConsumer }
 import org.apache.kafka.clients.producer.{ KafkaProducer, ProducerConfig }
 import org.apache.kafka.common.IsolationLevel
diff --git a/benchmarks/src/main/scala/akka/kafka/benchmarks/AlpakkaCommittableSinkFixtures.scala b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/PekkoConnectorsCommittableSinkFixtures.scala
similarity index 74%
rename from benchmarks/src/main/scala/akka/kafka/benchmarks/AlpakkaCommittableSinkFixtures.scala
rename to benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/PekkoConnectorsCommittableSinkFixtures.scala
index 9e7ce55f..c23bd82f 100644
--- a/benchmarks/src/main/scala/akka/kafka/benchmarks/AlpakkaCommittableSinkFixtures.scala
+++ b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/PekkoConnectorsCommittableSinkFixtures.scala
@@ -3,18 +3,18 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
-
-import akka.Done
-import akka.actor.ActorSystem
-import akka.kafka.ConsumerMessage.{ Committable, CommittableMessage }
-import akka.kafka.ProducerMessage.Envelope
-import akka.kafka.benchmarks.app.RunTestCommand
-import akka.kafka.scaladsl.Consumer.{ Control, DrainingControl }
-import akka.kafka.scaladsl.{ Committer, Consumer, Producer }
-import akka.kafka._
-import akka.stream.Materializer
-import akka.stream.scaladsl.{ Keep, Sink, Source }
+package org.apache.pekko.kafka.benchmarks
+
+import org.apache.pekko.Done
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.kafka.ConsumerMessage.{ Committable, CommittableMessage }
+import org.apache.pekko.kafka.ProducerMessage.Envelope
+import org.apache.pekko.kafka.benchmarks.app.RunTestCommand
+import org.apache.pekko.kafka.scaladsl.Consumer.{ Control, DrainingControl }
+import org.apache.pekko.kafka.scaladsl.{ Committer, Consumer, Producer }
+import org.apache.pekko.kafka._
+import org.apache.pekko.stream.Materializer
+import org.apache.pekko.stream.scaladsl.{ Keep, Sink, Source }
 import com.codahale.metrics.Meter
 import com.typesafe.scalalogging.LazyLogging
 import org.apache.kafka.clients.consumer.ConsumerConfig
@@ -30,13 +30,13 @@ import scala.concurrent.duration._
 import scala.concurrent.{ Await, Future, Promise }
 import scala.util.Success
 
-case class AlpakkaCommittableSinkTestFixture[SOut, FIn](sourceTopic: String,
+case class PekkoConnectorsCommittableSinkTestFixture[SOut, FIn](sourceTopic: String,
     sinkTopic: String,
     msgCount: Int,
     source: Source[SOut, Control],
     sink: Sink[FIn, Future[Done]])
 
-object AlpakkaCommittableSinkFixtures extends PerfFixtureHelpers {
+object PekkoConnectorsCommittableSinkFixtures extends PerfFixtureHelpers {
   type Key = Array[Byte]
   type Val = String
   type Message = CommittableMessage[Key, Val]
@@ -55,7 +55,7 @@ object AlpakkaCommittableSinkFixtures extends PerfFixtureHelpers {
       .withBootstrapServers(kafkaHost)
 
   def producerSink(c: RunTestCommand)(implicit actorSystem: ActorSystem) =
-    FixtureGen[AlpakkaCommittableSinkTestFixture[Message, ProducerMessage]](
+    FixtureGen[PekkoConnectorsCommittableSinkTestFixture[Message, ProducerMessage]](
       c,
       msgCount => {
         fillTopic(c.filledTopic, c.kafkaHost)
@@ -67,7 +67,7 @@ object AlpakkaCommittableSinkFixtures extends PerfFixtureHelpers {
         val sink: Sink[ProducerMessage, Future[Done]] =
           Producer.committableSink(createProducerSettings(c.kafkaHost), CommitterSettings(actorSystem))
 
-        AlpakkaCommittableSinkTestFixture[Message, ProducerMessage](c.filledTopic.topic,
+        PekkoConnectorsCommittableSinkTestFixture[Message, ProducerMessage](c.filledTopic.topic,
           sinkTopic,
           msgCount,
           source,
@@ -75,7 +75,7 @@ object AlpakkaCommittableSinkFixtures extends PerfFixtureHelpers {
       })
 
   def composedSink(c: RunTestCommand)(implicit actorSystem: ActorSystem) =
-    FixtureGen[AlpakkaCommittableSinkTestFixture[Message, ProducerMessage]](
+    FixtureGen[PekkoConnectorsCommittableSinkTestFixture[Message, ProducerMessage]](
       c,
       msgCount => {
         fillTopic(c.filledTopic, c.kafkaHost)
@@ -90,7 +90,7 @@ object AlpakkaCommittableSinkFixtures extends PerfFixtureHelpers {
             .map(_.passThrough)
             .toMat(Committer.sink(CommitterSettings(actorSystem)))(Keep.right)
 
-        AlpakkaCommittableSinkTestFixture[Message, ProducerMessage](c.filledTopic.topic,
+        PekkoConnectorsCommittableSinkTestFixture[Message, ProducerMessage](c.filledTopic.topic,
           sinkTopic,
           msgCount,
           source,
@@ -98,11 +98,11 @@ object AlpakkaCommittableSinkFixtures extends PerfFixtureHelpers {
       })
 }
 
-object AlpakkaCommittableSinkBenchmarks extends LazyLogging {
-  import AlpakkaCommittableSinkFixtures.{ Message, ProducerMessage }
+object PekkoConnectorsCommittableSinkBenchmarks extends LazyLogging {
+  import PekkoConnectorsCommittableSinkFixtures.{ Message, ProducerMessage }
 
   val streamingTimeout: FiniteDuration = 30.minutes
-  type Fixture = AlpakkaCommittableSinkTestFixture[Message, ProducerMessage]
+  type Fixture = PekkoConnectorsCommittableSinkTestFixture[Message, ProducerMessage]
 
   def run(fixture: Fixture, meter: Meter)(implicit mat: Materializer): Unit = {
     logger.debug("Creating and starting a stream")
diff --git a/benchmarks/src/main/scala/akka/kafka/benchmarks/PerfFixtureHelpers.scala b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/PerfFixtureHelpers.scala
similarity index 98%
rename from benchmarks/src/main/scala/akka/kafka/benchmarks/PerfFixtureHelpers.scala
rename to benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/PerfFixtureHelpers.scala
index 87003e9b..40de6ada 100644
--- a/benchmarks/src/main/scala/akka/kafka/benchmarks/PerfFixtureHelpers.scala
+++ b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/PerfFixtureHelpers.scala
@@ -3,7 +3,7 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
+package org.apache.pekko.kafka.benchmarks
 
 import java.time.Duration
 import java.util
diff --git a/benchmarks/src/main/scala/akka/kafka/benchmarks/ReactiveKafkaConsumerBenchmarks.scala b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/ReactiveKafkaConsumerBenchmarks.scala
similarity index 90%
rename from benchmarks/src/main/scala/akka/kafka/benchmarks/ReactiveKafkaConsumerBenchmarks.scala
rename to benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/ReactiveKafkaConsumerBenchmarks.scala
index 56aa35fc..a974f65d 100644
--- a/benchmarks/src/main/scala/akka/kafka/benchmarks/ReactiveKafkaConsumerBenchmarks.scala
+++ b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/ReactiveKafkaConsumerBenchmarks.scala
@@ -3,19 +3,19 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
+package org.apache.pekko.kafka.benchmarks
 
 import java.util.concurrent.atomic.AtomicInteger
 
-import akka.actor.ActorSystem
-import akka.dispatch.ExecutionContexts
-import akka.kafka.ConsumerMessage.CommittableMessage
-import akka.kafka.benchmarks.InflightMetrics.{ BrokerMetricRequest, ConsumerMetricRequest }
-import akka.kafka.scaladsl.Committer
-import akka.kafka.scaladsl.Consumer.DrainingControl
-import akka.kafka.{ CommitDelivery, CommitterSettings }
-import akka.stream.Materializer
-import akka.stream.scaladsl.{ Keep, Sink, Source }
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.dispatch.ExecutionContexts
+import org.apache.pekko.kafka.ConsumerMessage.CommittableMessage
+import org.apache.pekko.kafka.benchmarks.InflightMetrics.{ BrokerMetricRequest, ConsumerMetricRequest }
+import org.apache.pekko.kafka.scaladsl.Committer
+import org.apache.pekko.kafka.scaladsl.Consumer.DrainingControl
+import org.apache.pekko.kafka.{ CommitDelivery, CommitterSettings }
+import org.apache.pekko.stream.Materializer
+import org.apache.pekko.stream.scaladsl.{ Keep, Sink, Source }
 import com.codahale.metrics.Meter
 import com.typesafe.scalalogging.LazyLogging
 import org.apache.kafka.clients.consumer.ConsumerRecord
diff --git a/benchmarks/src/main/scala/akka/kafka/benchmarks/ReactiveKafkaConsumerFixtures.scala b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/ReactiveKafkaConsumerFixtures.scala
similarity index 83%
rename from benchmarks/src/main/scala/akka/kafka/benchmarks/ReactiveKafkaConsumerFixtures.scala
rename to benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/ReactiveKafkaConsumerFixtures.scala
index 12671c59..2adf6234 100644
--- a/benchmarks/src/main/scala/akka/kafka/benchmarks/ReactiveKafkaConsumerFixtures.scala
+++ b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/ReactiveKafkaConsumerFixtures.scala
@@ -3,15 +3,15 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
-
-import akka.actor.ActorSystem
-import akka.kafka.ConsumerMessage.CommittableMessage
-import akka.kafka.benchmarks.app.RunTestCommand
-import akka.kafka.scaladsl.Consumer
-import akka.kafka.scaladsl.Consumer.Control
-import akka.kafka.{ ConsumerSettings, Subscriptions }
-import akka.stream.scaladsl.Source
+package org.apache.pekko.kafka.benchmarks
+
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.kafka.ConsumerMessage.CommittableMessage
+import org.apache.pekko.kafka.benchmarks.app.RunTestCommand
+import org.apache.pekko.kafka.scaladsl.Consumer
+import org.apache.pekko.kafka.scaladsl.Consumer.Control
+import org.apache.pekko.kafka.{ ConsumerSettings, Subscriptions }
+import org.apache.pekko.stream.scaladsl.Source
 import org.apache.kafka.clients.consumer.{ ConsumerConfig, ConsumerRecord }
 import org.apache.kafka.common.serialization.{ ByteArrayDeserializer, StringDeserializer }
 
diff --git a/benchmarks/src/main/scala/akka/kafka/benchmarks/ReactiveKafkaProducerBenchmarks.scala b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/ReactiveKafkaProducerBenchmarks.scala
similarity index 81%
rename from benchmarks/src/main/scala/akka/kafka/benchmarks/ReactiveKafkaProducerBenchmarks.scala
rename to benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/ReactiveKafkaProducerBenchmarks.scala
index 8c891c3c..9584b1d1 100644
--- a/benchmarks/src/main/scala/akka/kafka/benchmarks/ReactiveKafkaProducerBenchmarks.scala
+++ b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/ReactiveKafkaProducerBenchmarks.scala
@@ -3,14 +3,14 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
-
-import akka.kafka.ConsumerMessage.CommittableMessage
-import akka.kafka.ProducerMessage
-import akka.kafka.ProducerMessage.{ Result, Results }
-import akka.kafka.benchmarks.ReactiveKafkaProducerFixtures.ReactiveKafkaProducerTestFixture
-import akka.stream.Materializer
-import akka.stream.scaladsl.{ Sink, Source }
+package org.apache.pekko.kafka.benchmarks
+
+import org.apache.pekko.kafka.ConsumerMessage.CommittableMessage
+import org.apache.pekko.kafka.ProducerMessage
+import org.apache.pekko.kafka.ProducerMessage.{ Result, Results }
+import org.apache.pekko.kafka.benchmarks.ReactiveKafkaProducerFixtures.ReactiveKafkaProducerTestFixture
+import org.apache.pekko.stream.Materializer
+import org.apache.pekko.stream.scaladsl.{ Sink, Source }
 import com.codahale.metrics.Meter
 import com.typesafe.scalalogging.LazyLogging
 import org.apache.kafka.clients.producer.ProducerRecord
diff --git a/benchmarks/src/main/scala/akka/kafka/benchmarks/ReactiveKafkaProducerFixtures.scala b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/ReactiveKafkaProducerFixtures.scala
similarity index 82%
rename from benchmarks/src/main/scala/akka/kafka/benchmarks/ReactiveKafkaProducerFixtures.scala
rename to benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/ReactiveKafkaProducerFixtures.scala
index 5d5b1506..f83820e8 100644
--- a/benchmarks/src/main/scala/akka/kafka/benchmarks/ReactiveKafkaProducerFixtures.scala
+++ b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/ReactiveKafkaProducerFixtures.scala
@@ -3,15 +3,15 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
-
-import akka.NotUsed
-import akka.actor.ActorSystem
-import akka.kafka.ProducerMessage.{ Envelope, Results }
-import akka.kafka.ProducerSettings
-import akka.kafka.benchmarks.app.RunTestCommand
-import akka.kafka.scaladsl.Producer
-import akka.stream.scaladsl.Flow
+package org.apache.pekko.kafka.benchmarks
+
+import org.apache.pekko.NotUsed
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.kafka.ProducerMessage.{ Envelope, Results }
+import org.apache.pekko.kafka.ProducerSettings
+import org.apache.pekko.kafka.benchmarks.app.RunTestCommand
+import org.apache.pekko.kafka.scaladsl.Producer
+import org.apache.pekko.stream.scaladsl.Flow
 import org.apache.kafka.clients.consumer.ConsumerRecord
 import org.apache.kafka.common.serialization.{ ByteArraySerializer, StringSerializer }
 
diff --git a/benchmarks/src/main/scala/akka/kafka/benchmarks/ReactiveKafkaTransactionBenchmarks.scala b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/ReactiveKafkaTransactionBenchmarks.scala
similarity index 86%
rename from benchmarks/src/main/scala/akka/kafka/benchmarks/ReactiveKafkaTransactionBenchmarks.scala
rename to benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/ReactiveKafkaTransactionBenchmarks.scala
index 7a5c6de2..9c4ae4ca 100644
--- a/benchmarks/src/main/scala/akka/kafka/benchmarks/ReactiveKafkaTransactionBenchmarks.scala
+++ b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/ReactiveKafkaTransactionBenchmarks.scala
@@ -3,13 +3,13 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
+package org.apache.pekko.kafka.benchmarks
 
-import akka.kafka.ProducerMessage
-import akka.kafka.ProducerMessage.{ Result, Results }
-import akka.kafka.benchmarks.ReactiveKafkaTransactionFixtures._
-import akka.stream.Materializer
-import akka.stream.scaladsl.{ Keep, Sink }
+import org.apache.pekko.kafka.ProducerMessage
+import org.apache.pekko.kafka.ProducerMessage.{ Result, Results }
+import org.apache.pekko.kafka.benchmarks.ReactiveKafkaTransactionFixtures._
+import org.apache.pekko.stream.Materializer
+import org.apache.pekko.stream.scaladsl.{ Keep, Sink }
 import com.codahale.metrics.Meter
 import com.typesafe.scalalogging.LazyLogging
 import org.apache.kafka.clients.producer.ProducerRecord
diff --git a/benchmarks/src/main/scala/akka/kafka/benchmarks/ReactiveKafkaTransactionFixtures.scala b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/ReactiveKafkaTransactionFixtures.scala
similarity index 82%
rename from benchmarks/src/main/scala/akka/kafka/benchmarks/ReactiveKafkaTransactionFixtures.scala
rename to benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/ReactiveKafkaTransactionFixtures.scala
index c7989271..0577c990 100644
--- a/benchmarks/src/main/scala/akka/kafka/benchmarks/ReactiveKafkaTransactionFixtures.scala
+++ b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/ReactiveKafkaTransactionFixtures.scala
@@ -3,17 +3,17 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
+package org.apache.pekko.kafka.benchmarks
 
-import akka.NotUsed
-import akka.actor.ActorSystem
-import akka.kafka.ConsumerMessage.TransactionalMessage
-import akka.kafka.ProducerMessage.{ Envelope, Results }
-import akka.kafka.benchmarks.app.RunTestCommand
-import akka.kafka.scaladsl.Consumer.Control
-import akka.kafka.scaladsl.Transactional
-import akka.kafka.{ ConsumerMessage, ConsumerSettings, ProducerSettings, Subscriptions }
-import akka.stream.scaladsl.{ Flow, Source }
+import org.apache.pekko.NotUsed
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.kafka.ConsumerMessage.TransactionalMessage
+import org.apache.pekko.kafka.ProducerMessage.{ Envelope, Results }
+import org.apache.pekko.kafka.benchmarks.app.RunTestCommand
+import org.apache.pekko.kafka.scaladsl.Consumer.Control
+import org.apache.pekko.kafka.scaladsl.Transactional
+import org.apache.pekko.kafka.{ ConsumerMessage, ConsumerSettings, ProducerSettings, Subscriptions }
+import org.apache.pekko.stream.scaladsl.{ Flow, Source }
 import org.apache.kafka.clients.consumer.ConsumerConfig
 import org.apache.kafka.common.serialization.{
   ByteArrayDeserializer,
diff --git a/benchmarks/src/main/scala/akka/kafka/benchmarks/Timed.scala b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/Timed.scala
similarity index 73%
rename from benchmarks/src/main/scala/akka/kafka/benchmarks/Timed.scala
rename to benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/Timed.scala
index 1aac253e..14c98bdd 100644
--- a/benchmarks/src/main/scala/akka/kafka/benchmarks/Timed.scala
+++ b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/Timed.scala
@@ -3,19 +3,21 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks
+package org.apache.pekko.kafka.benchmarks
 
-import java.nio.file.Paths
-import java.util.concurrent.{ ForkJoinPool, TimeUnit }
-
-import akka.kafka.benchmarks.InflightMetrics.{ BrokerMetricRequest, ConsumerMetricRequest }
-import akka.kafka.benchmarks.app.RunTestCommand
-import akka.stream.Materializer
-import akka.stream.alpakka.csv.scaladsl.CsvFormatting
-import akka.stream.scaladsl.{ FileIO, Sink, Source }
 import com.codahale.metrics._
 import com.typesafe.scalalogging.LazyLogging
+import org.apache.pekko.NotUsed
+import org.apache.pekko.kafka.benchmarks.InflightMetrics.{ BrokerMetricRequest, ConsumerMetricRequest }
+import org.apache.pekko.kafka.benchmarks.app.RunTestCommand
+import org.apache.pekko.stream.Materializer
+import org.apache.pekko.stream.scaladsl.{ FileIO, Flow, Sink, Source }
+import org.apache.pekko.util.ByteString
 
+import java.nio.charset.{ Charset, StandardCharsets }
+import java.nio.file.Paths
+import java.util.concurrent.{ ForkJoinPool, TimeUnit }
+import scala.collection.immutable
 import scala.concurrent.duration._
 import scala.concurrent.{ Await, ExecutionContext, Future }
 
@@ -44,14 +46,32 @@ object Timed extends LazyLogging {
     val metricsReportDetailPath = benchmarkReportBasePath.resolve(Paths.get(s"$testName-inflight-metrics-details.csv"))
     require(inflight.size > 1, "At least 2 records (a header and a data row) are required to make a report.")
     val summary = Source(List(inflight.head, inflight.last))
-      .via(CsvFormatting.format())
+      .via(format())
       .alsoTo(Sink.foreach(bs => logger.info(bs.utf8String)))
       .runWith(FileIO.toPath(metricsReportPath))
-    val details = Source(inflight).via(CsvFormatting.format()).runWith(FileIO.toPath(metricsReportDetailPath))
+    val details = Source(inflight).via(format()).runWith(FileIO.toPath(metricsReportDetailPath))
     implicit val ec: ExecutionContext = mat.executionContext
     Await.result(Future.sequence(List(summary, details)), 10.seconds)
   }
 
+  private def format[T <: immutable.Iterable[String]](
+      delimiter: Char = ',',
+      quoteChar: Char = '"',
+      escapeChar: Char = '\\',
+      endOfLine: String = "\r\n",
+      quotingStyle: CsvQuotingStyle = CsvQuotingStyle.Required,
+      charset: Charset = StandardCharsets.UTF_8,
+      byteOrderMark: Option[ByteString] = None): Flow[T, ByteString, NotUsed] = {
+    val formatter =
+      new CsvFormatter(delimiter, quoteChar, escapeChar, endOfLine, quotingStyle, charset)
+    byteOrderMark.fold {
+      Flow[T].map(formatter.toCsv).named("CsvFormatting")
+    } { bom =>
+      Flow[T].map(formatter.toCsv).named("CsvFormatting").prepend(Source.single(bom))
+    }
+
+  }
+
   def runPerfTest[F](command: RunTestCommand, fixtureGen: FixtureGen[F], testBody: (F, Meter) => Unit): Unit = {
     val name = command.testName
     val msgCount = command.msgCount
diff --git a/benchmarks/src/main/scala/akka/kafka/benchmarks/app/RunTestCommand.scala b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/app/RunTestCommand.scala
similarity index 78%
rename from benchmarks/src/main/scala/akka/kafka/benchmarks/app/RunTestCommand.scala
rename to benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/app/RunTestCommand.scala
index 13ed0a38..4e935d98 100644
--- a/benchmarks/src/main/scala/akka/kafka/benchmarks/app/RunTestCommand.scala
+++ b/benchmarks/src/main/scala/org/apache/pekko/kafka/benchmarks/app/RunTestCommand.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.benchmarks.app
+package org.apache.pekko.kafka.benchmarks.app
 
-import akka.kafka.benchmarks.PerfFixtureHelpers.FilledTopic
+import org.apache.pekko.kafka.benchmarks.PerfFixtureHelpers.FilledTopic
 
 case class RunTestCommand(testName: String, kafkaHost: String, filledTopic: FilledTopic) {
 
diff --git a/build.sbt b/build.sbt
index 84318b28..d4821d24 100644
--- a/build.sbt
+++ b/build.sbt
@@ -8,14 +8,14 @@ val Nightly = sys.env.get("EVENT_NAME").contains("schedule")
 // align in release.yml
 val Scala213 = "2.13.8"
 
-val AkkaBinaryVersionForDocs = "2.6"
-val akkaVersion = "2.6.19"
+val pekkoVersionForDocs = "current"
+val pekkoVersion = "0.0.0+26546-767209a8-SNAPSHOT"
 
 // Keep .scala-steward.conf pin in sync
 val kafkaVersion = "3.0.1"
 val KafkaVersionForDocs = "30"
 // This should align with the ScalaTest version used in the Akka 2.6.x testkit
-// https://github.com/akka/akka/blob/main/project/Dependencies.scala#L41
+// https://github.com/apache/incubator-pekko/blob/main/project/Dependencies.scala#L70
 val scalatestVersion = "3.1.4"
 val testcontainersVersion = "1.16.3"
 val slf4jVersion = "1.7.36"
@@ -31,7 +31,8 @@ val confluentLibsExclusionRules = Seq(
 
 ThisBuild / resolvers ++= Seq(
   // for Jupiter interface (JUnit 5)
-  Resolver.jcenterRepo)
+  Resolver.jcenterRepo,
+  "Apache Snapshot Repo" at "https://repository.apache.org/content/groups/snapshots/")
 
 TaskKey[Unit]("verifyCodeFmt") := {
   javafmtCheckAll.all(ScopeFilter(inAnyProject)).result.value.toEither.left.foreach { _ =>
@@ -44,18 +45,20 @@ addCommandAlias("verifyCodeStyle", "headerCheck; verifyCodeFmt")
 addCommandAlias("verifyDocs", ";+doc ;unidoc ;docs/paradoxBrowse")
 
 val commonSettings = Def.settings(
-  organization := "com.typesafe.akka",
-  organizationName := "Lightbend Inc.",
-  organizationHomepage := Some(url("https://www.lightbend.com/")),
-  homepage := Some(url("https://doc.akka.io/docs/alpakka-kafka/current")),
-  scmInfo := Some(ScmInfo(url("https://github.com/akka/alpakka-kafka"), "git@github.com:akka/alpakka-kafka.git")),
-  developers += Developer("contributors",
-    "Contributors",
-    "",
-    url("https://github.com/akka/alpakka-kafka/graphs/contributors")),
+  organization := "org.apache.pekko",
+  organizationName := "Apache Software Foundation",
+  organizationHomepage := Some(url("https://www.apache.org")),
+  homepage := Some(url("https://pekko.apache.org/docs/pekko-connectors-kafka/current/")),
+  scmInfo := Some(ScmInfo(url("https://github.com/apache/incubator-pekko-connectors-kafka"),
+    "git@github.com:apache/incubator-pekko-connectors-kafka.git")),
+  developers += Developer(
+    "pekko-connectors-kafka",
+    "Apache Pekko Connectors Kafka Contributors",
+    "dev@pekko.apache.org",
+    url("https://github.com/apache/incubator-pekko-connectors-kafka/graphs/contributors")),
   startYear := Some(2014),
   licenses := Seq("Apache-2.0" -> url("https://opensource.org/licenses/Apache-2.0")),
-  description := "Alpakka is a Reactive Enterprise Integration library for Java and Scala, based on Reactive Streams and Akka.",
+  description := "Apache Pekko kafka connector is a Reactive Enterprise Integration library for Java and Scala, based on Reactive Streams and Pekko.",
   crossScalaVersions := Seq(Scala213),
   scalaVersion := Scala213,
   crossVersion := CrossVersion.binary,
@@ -72,7 +75,7 @@ val commonSettings = Def.settings(
   Compile / doc / scalacOptions := scalacOptions.value ++ Seq(
     "-Wconf:cat=scaladoc:i",
     "-doc-title",
-    "Alpakka Kafka",
+    "Apache Pekko Kafka Connector",
     "-doc-version",
     version.value,
     "-sourcepath",
@@ -81,10 +84,10 @@ val commonSettings = Def.settings(
     "akka.pattern:scala", // for some reason Scaladoc creates this
     "-doc-source-url", {
       val branch = if (isSnapshot.value) "master" else s"v${version.value}"
-      s"https://github.com/akka/alpakka-kafka/tree/${branch}€{FILE_PATH_EXT}#L€{FILE_LINE}"
+      s"https://github.com/apache/incubator-pekko-connectors-kafka/tree/${branch}€{FILE_PATH_EXT}#L€{FILE_LINE}"
     },
     "-doc-canonical-base-url",
-    "https://doc.akka.io/api/alpakka-kafka/current/"),
+    "https://pekko.apache.org/api/pekko-connectors-kafka/current/"),
   Compile / doc / scalacOptions -= "-Xfatal-warnings",
   // show full stack traces and test case durations
   testOptions += Tests.Argument(TestFrameworks.ScalaTest, "-oDF"),
@@ -126,8 +129,8 @@ lazy val `pekko-connectors-kafka` =
             |  testkit - framework for testing the connector
             |
             |Other modules:
-            |  docs - the sources for generating https://doc.akka.io/docs/alpakka-kafka/current
-            |  benchmarks - compare direct Kafka API usage with Alpakka Kafka
+            |  docs - the sources for generating https://pekko.apache.org/docs/pekko-connectors-kafka/current/
+            |  benchmarks - compare direct Kafka API usage with Apache Pekko Kafka Connector
             |
             |Useful sbt tasks:
             |
@@ -150,7 +153,7 @@ lazy val `pekko-connectors-kafka` =
             |  tests/testOnly -- -t "A consume-transform-produce cycle must complete in happy-path scenario"
             |    run a single test with an exact name (use -z for partial match)
             |
-            |  benchmarks/IntegrationTest/testOnly *.AlpakkaKafkaPlainConsumer
+            |  benchmarks/IntegrationTest/testOnly *.PekkoConnectorsKafkaPlainConsumer
             |    run a single benchmark backed by Docker containers
           """.stripMargin)
     .aggregate(core, testkit, `cluster-sharding`, tests, benchmarks, docs)
@@ -163,13 +166,13 @@ lazy val core = project
   .settings(MetaInfLicenseNoticeCopy.settings)
   .settings(
     name := "pekko-connectors-kafka",
-    AutomaticModuleName.settings("akka.stream.alpakka.kafka"),
+    AutomaticModuleName.settings("org.apache.pekko.kafka"),
     libraryDependencies ++= Seq(
-      "com.typesafe.akka" %% "akka-stream" % akkaVersion,
-      "com.typesafe.akka" %% "akka-discovery" % akkaVersion % Provided,
+      "org.apache.pekko" %% "pekko-stream" % pekkoVersion,
+      "org.apache.pekko" %% "pekko-discovery" % pekkoVersion % Provided,
       "org.apache.kafka" % "kafka-clients" % kafkaVersion),
     mimaPreviousArtifacts := Set.empty, // temporarily disable mima checks
-    mimaBinaryIssueFilters += ProblemFilters.exclude[Problem]("akka.kafka.internal.*"))
+    mimaBinaryIssueFilters += ProblemFilters.exclude[Problem]("org.apache.pekko.kafka.internal.*"))
 
 lazy val testkit = project
   .dependsOn(core)
@@ -179,16 +182,16 @@ lazy val testkit = project
   .settings(MetaInfLicenseNoticeCopy.settings)
   .settings(
     name := "pekko-connectors-kafka-testkit",
-    AutomaticModuleName.settings("akka.stream.alpakka.kafka.testkit"),
+    AutomaticModuleName.settings("org.apache.pekko.kafka.testkit"),
     JupiterKeys.junitJupiterVersion := "5.8.2",
     libraryDependencies ++= Seq(
-      "com.typesafe.akka" %% "akka-stream-testkit" % akkaVersion,
+      "org.apache.pekko" %% "pekko-stream-testkit" % pekkoVersion,
       "org.testcontainers" % "kafka" % testcontainersVersion % Provided,
       "org.scalatest" %% "scalatest" % scalatestVersion % Provided,
       "junit" % "junit" % "4.13.2" % Provided,
       "org.junit.jupiter" % "junit-jupiter-api" % JupiterKeys.junitJupiterVersion.value % Provided),
     mimaPreviousArtifacts := Set.empty, // temporarily disable mima checks
-    mimaBinaryIssueFilters += ProblemFilters.exclude[Problem]("akka.kafka.testkit.internal.*"))
+    mimaBinaryIssueFilters += ProblemFilters.exclude[Problem]("org.apache.pekko.kafka.testkit.internal.*"))
 
 lazy val `cluster-sharding` = project
   .in(file("./cluster-sharding"))
@@ -199,9 +202,9 @@ lazy val `cluster-sharding` = project
   .settings(MetaInfLicenseNoticeCopy.settings)
   .settings(
     name := "pekko-connectors-kafka-cluster-sharding",
-    AutomaticModuleName.settings("akka.stream.alpakka.kafka.cluster.sharding"),
+    AutomaticModuleName.settings("org.apache.pekko.kafka.cluster.sharding"),
     libraryDependencies ++= Seq(
-      "com.typesafe.akka" %% "akka-cluster-sharding-typed" % akkaVersion),
+      "org.apache.pekko" %% "pekko-cluster-sharding-typed" % pekkoVersion),
     mimaPreviousArtifacts := Set.empty // temporarily disable mima checks
   )
 
@@ -216,7 +219,7 @@ lazy val tests = project
   .settings(
     name := "pekko-connectors-kafka-tests",
     libraryDependencies ++= Seq(
-      "com.typesafe.akka" %% "akka-discovery" % akkaVersion,
+      "org.apache.pekko" %% "pekko-discovery" % pekkoVersion,
       "com.google.protobuf" % "protobuf-java" % "3.19.1", // use the same version as in scalapb
       ("io.confluent" % "kafka-avro-serializer" % confluentAvroSerializerVersion % Test).excludeAll(
         confluentLibsExclusionRules: _*),
@@ -231,7 +234,7 @@ lazy val tests = project
       "org.hamcrest" % "hamcrest-library" % "2.2" % Test,
       "org.hamcrest" % "hamcrest" % "2.2" % Test,
       "net.aichler" % "jupiter-interface" % JupiterKeys.jupiterVersion.value % Test,
-      "com.typesafe.akka" %% "akka-slf4j" % akkaVersion % Test,
+      "org.apache.pekko" %% "pekko-slf4j" % pekkoVersion % Test,
       "ch.qos.logback" % "logback-classic" % "1.2.11" % Test,
       "org.slf4j" % "log4j-over-slf4j" % slf4jVersion % Test,
       // Schema registry uses Glassfish which uses java.util.logging
@@ -239,22 +242,27 @@ lazy val tests = project
       "org.mockito" % "mockito-core" % "4.6.1" % Test,
       "com.thesamet.scalapb" %% "scalapb-runtime" % "0.10.11" % Test),
     resolvers ++= Seq(
-      "Confluent Maven Repo".at("https://packages.confluent.io/maven/")),
+      "Confluent Maven Repo" at "https://packages.confluent.io/maven/",
+      "Apache Snapshot Repo" at "https://repository.apache.org/content/groups/snapshots/"),
     publish / skip := true,
     Test / fork := true,
     Test / parallelExecution := false,
     IntegrationTest / parallelExecution := false)
 
+lazy val pekkoAPI = "https://pekko.apache.org/api"
+lazy val pekkoDocs = "https://pekko.apache.org/docs"
+
 lazy val docs = project
-  .enablePlugins(AkkaParadoxPlugin, ParadoxSitePlugin, PreprocessPlugin, PublishRsyncPlugin)
+  .enablePlugins(ParadoxPlugin, ParadoxSitePlugin, PreprocessPlugin, PublishRsyncPlugin)
   .disablePlugins(MimaPlugin)
   .settings(commonSettings)
   .settings(
     name := "Apache Pekko Kafka Connector",
     publish / skip := true,
+    Compile / paradox / name := "Pekko",
     makeSite := makeSite.dependsOn(LocalRootProject / ScalaUnidoc / doc).value,
     previewPath := (Paradox / siteSubdirName).value,
-    Preprocess / siteSubdirName := s"api/alpakka-kafka/${projectInfoVersion.value}",
+    Preprocess / siteSubdirName := s"api/pekko-connectors-kafka/${projectInfoVersion.value}",
     Preprocess / sourceDirectory := (LocalRootProject / ScalaUnidoc / unidoc / target).value,
     Preprocess / preprocessRules := Seq(
       ("\\.java\\.scala".r, _ => ".java"),
@@ -265,21 +273,18 @@ lazy val docs = project
       // Add Java module name https://github.com/ThoughtWorksInc/sbt-api-mappings/issues/58
       ("https://docs\\.oracle\\.com/en/java/javase/11/docs/api/".r,
         _ => "https://docs\\.oracle\\.com/en/java/javase/11/docs/api/")),
-    Paradox / siteSubdirName := s"docs/alpakka-kafka/${projectInfoVersion.value}",
+    Paradox / siteSubdirName := s"docs/pekko-connectors-kafka/${projectInfoVersion.value}",
     paradoxGroups := Map("Language" -> Seq("Java", "Scala")),
     paradoxProperties ++= Map(
       "image.base_url" -> "images/",
       "confluent.version" -> confluentAvroSerializerVersion,
       "scalatest.version" -> scalatestVersion,
-      "scaladoc.akka.kafka.base_url" -> s"/${(Preprocess / siteSubdirName).value}/",
-      "javadoc.akka.kafka.base_url" -> "",
-      // Akka
-      "akka.version" -> akkaVersion,
-      "extref.akka.base_url" -> s"https://doc.akka.io/docs/akka/$AkkaBinaryVersionForDocs/%s",
-      "scaladoc.akka.base_url" -> s"https://doc.akka.io/api/akka/$AkkaBinaryVersionForDocs/",
-      "javadoc.akka.base_url" -> s"https://doc.akka.io/japi/akka/$AkkaBinaryVersionForDocs/",
-      "javadoc.akka.link_style" -> "direct",
-      "extref.akka-management.base_url" -> s"https://doc.akka.io/docs/akka-management/current/%s",
+      "pekko.version" -> pekkoVersion,
+      "extref.pekko.base_url" -> s"$pekkoDocs/pekko/$pekkoVersionForDocs/%s",
+      "scaladoc.org.apache.pekko.base_url" -> s"$pekkoAPI/pekko/$pekkoVersionForDocs/",
+      "javadoc.org.apache.pekko.base_url" -> s"$pekkoAPI/pekko/$pekkoVersionForDocs/",
+      "javadoc.pekko.link_style" -> "direct",
+      "extref.pekko-management.base_url" -> s"$pekkoDocs/pekko-management/$pekkoVersionForDocs/%s",
       // Kafka
       "kafka.version" -> kafkaVersion,
       "extref.kafka.base_url" -> s"https://kafka.apache.org/$KafkaVersionForDocs/%s",
@@ -296,7 +301,7 @@ lazy val docs = project
       "testcontainers.version" -> testcontainersVersion,
       "javadoc.org.testcontainers.containers.base_url" -> s"https://www.javadoc.io/doc/org.testcontainers/testcontainers/$testcontainersVersion/",
       "javadoc.org.testcontainers.containers.link_style" -> "direct"),
-    apidocRootPackage := "akka",
+    apidocRootPackage := "org.apache.pekko",
     paradoxRoots := List("index.html"),
     resolvers += Resolver.jcenterRepo,
     publishRsyncArtifacts += makeSite.value -> "www/",
@@ -320,8 +325,7 @@ lazy val benchmarks = project
       "io.dropwizard.metrics" % "metrics-core" % "4.2.11",
       "ch.qos.logback" % "logback-classic" % "1.2.11",
       "org.slf4j" % "log4j-over-slf4j" % slf4jVersion,
-      "com.lightbend.akka" %% "akka-stream-alpakka-csv" % "3.0.4",
       "org.testcontainers" % "kafka" % testcontainersVersion % IntegrationTest,
-      "com.typesafe.akka" %% "akka-slf4j" % akkaVersion % IntegrationTest,
-      "com.typesafe.akka" %% "akka-stream-testkit" % akkaVersion % IntegrationTest,
+      "org.apache.pekko" %% "pekko-slf4j" % pekkoVersion % IntegrationTest,
+      "org.apache.pekko" %% "pekko-stream-testkit" % pekkoVersion % IntegrationTest,
       "org.scalatest" %% "scalatest" % scalatestVersion % IntegrationTest))
diff --git a/cluster-sharding/src/main/scala/akka/kafka/cluster/sharding/KafkaClusterSharding.scala b/cluster-sharding/src/main/scala/org/apache/pekko/kafka/cluster/sharding/KafkaClusterSharding.scala
similarity index 78%
rename from cluster-sharding/src/main/scala/akka/kafka/cluster/sharding/KafkaClusterSharding.scala
rename to cluster-sharding/src/main/scala/org/apache/pekko/kafka/cluster/sharding/KafkaClusterSharding.scala
index 8aba23e9..149d541b 100644
--- a/cluster-sharding/src/main/scala/akka/kafka/cluster/sharding/KafkaClusterSharding.scala
+++ b/cluster-sharding/src/main/scala/org/apache/pekko/kafka/cluster/sharding/KafkaClusterSharding.scala
@@ -3,29 +3,29 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.cluster.sharding
+package org.apache.pekko.kafka.cluster.sharding
 
 import java.util.concurrent.{ CompletionStage, ConcurrentHashMap }
 import java.util.concurrent.atomic.AtomicInteger
 
-import akka.actor.typed.Behavior
-import akka.actor.typed.scaladsl.Behaviors
-import akka.actor.typed.scaladsl.adapter._
-import akka.actor.{ ActorSystem, ClassicActorSystemProvider, ExtendedActorSystem, Extension, ExtensionId }
-import akka.annotation.{ ApiMayChange, InternalApi }
-import akka.cluster.sharding.external.ExternalShardAllocation
-import akka.cluster.sharding.typed.scaladsl.EntityTypeKey
-import akka.cluster.sharding.typed.{ ShardingEnvelope, ShardingMessageExtractor }
-import akka.cluster.typed.Cluster
-import akka.kafka.scaladsl.MetadataClient
-import akka.kafka._
-import akka.util.Timeout._
+import org.apache.pekko.actor.typed.Behavior
+import org.apache.pekko.actor.typed.scaladsl.Behaviors
+import org.apache.pekko.actor.typed.scaladsl.adapter._
+import org.apache.pekko.actor.{ ActorSystem, ClassicActorSystemProvider, ExtendedActorSystem, Extension, ExtensionId }
+import org.apache.pekko.annotation.{ ApiMayChange, InternalApi }
+import org.apache.pekko.cluster.sharding.external.ExternalShardAllocation
+import org.apache.pekko.cluster.sharding.typed.scaladsl.EntityTypeKey
+import org.apache.pekko.cluster.sharding.typed.{ ShardingEnvelope, ShardingMessageExtractor }
+import org.apache.pekko.cluster.typed.Cluster
+import org.apache.pekko.kafka.scaladsl.MetadataClient
+import org.apache.pekko.kafka._
+import org.apache.pekko.util.Timeout._
 import org.apache.kafka.common.utils.Utils
 
 import scala.concurrent.duration._
 import scala.concurrent.{ ExecutionContextExecutor, Future }
 import scala.util.{ Failure, Success }
-import akka.util.JavaDurationConverters._
+import org.apache.pekko.util.JavaDurationConverters._
 import org.slf4j.LoggerFactory
 
 import scala.compat.java8.FutureConverters._
@@ -42,7 +42,7 @@ final class KafkaClusterSharding(system: ExtendedActorSystem) extends Extension
   /**
    * API MAY CHANGE
    *
-   * Asynchronously return a [[akka.cluster.sharding.typed.ShardingMessageExtractor]] with a default hashing strategy
+   * Asynchronously return a [[org.apache.pekko.cluster.sharding.typed.ShardingMessageExtractor]] with a default hashing strategy
    * based on Apache Kafka's [[org.apache.kafka.clients.producer.internals.DefaultPartitioner]].
    *
    * The number of partitions to use with the hashing strategy will be automatically determined by querying the Kafka
@@ -50,7 +50,7 @@ final class KafkaClusterSharding(system: ExtendedActorSystem) extends Extension
    * the Kafka Consumer connection required to retrieve the number of partitions. Each call to this method will result
    * in a round trip to Kafka. This method should only be called once per entity type [[M]], per local actor system.
    *
-   * All topics used in a Consumer [[akka.kafka.Subscription]] must contain the same number of partitions to ensure
+   * All topics used in a Consumer [[org.apache.pekko.kafka.Subscription]] must contain the same number of partitions to ensure
    * that entities are routed to the same Entity type.
    */
   @ApiMayChange(issue = "https://github.com/akka/alpakka-kafka/issues/1074")
@@ -64,7 +64,7 @@ final class KafkaClusterSharding(system: ExtendedActorSystem) extends Extension
    *
    * API MAY CHANGE
    *
-   * Asynchronously return a [[akka.cluster.sharding.typed.ShardingMessageExtractor]] with a default hashing strategy
+   * Asynchronously return a [[org.apache.pekko.cluster.sharding.typed.ShardingMessageExtractor]] with a default hashing strategy
    * based on Apache Kafka's [[org.apache.kafka.clients.producer.internals.DefaultPartitioner]].
    *
    * The number of partitions to use with the hashing strategy will be automatically determined by querying the Kafka
@@ -72,7 +72,7 @@ final class KafkaClusterSharding(system: ExtendedActorSystem) extends Extension
    * the Kafka Consumer connection required to retrieve the number of partitions. Each call to this method will result
    * in a round trip to Kafka. This method should only be called once per entity type [[M]], per local actor system.
    *
-   * All topics used in a Consumer [[akka.kafka.Subscription]] must contain the same number of partitions to ensure
+   * All topics used in a Consumer [[org.apache.pekko.kafka.Subscription]] must contain the same number of partitions to ensure
    * that entities are routed to the same Entity type.
    */
   @ApiMayChange(issue = "https://github.com/akka/alpakka-kafka/issues/1074")
@@ -86,12 +86,12 @@ final class KafkaClusterSharding(system: ExtendedActorSystem) extends Extension
   /**
    * API MAY CHANGE
    *
-   * Asynchronously return a [[akka.cluster.sharding.typed.ShardingMessageExtractor]] with a default hashing strategy
+   * Asynchronously return a [[org.apache.pekko.cluster.sharding.typed.ShardingMessageExtractor]] with a default hashing strategy
    * based on Apache Kafka's [[org.apache.kafka.clients.producer.internals.DefaultPartitioner]].
    *
    * The number of partitions to use with the hashing strategy is provided explicitly with [[kafkaPartitions]].
    *
-   * All topics used in a Consumer [[akka.kafka.Subscription]] must contain the same number of partitions to ensure
+   * All topics used in a Consumer [[org.apache.pekko.kafka.Subscription]] must contain the same number of partitions to ensure
    * that entities are routed to the same Entity type.
    */
   @ApiMayChange(issue = "https://github.com/akka/alpakka-kafka/issues/1074")
@@ -101,7 +101,7 @@ final class KafkaClusterSharding(system: ExtendedActorSystem) extends Extension
   /**
    * API MAY CHANGE
    *
-   * Asynchronously return a [[akka.cluster.sharding.typed.ShardingMessageExtractor]] with a default hashing strategy
+   * Asynchronously return a [[org.apache.pekko.cluster.sharding.typed.ShardingMessageExtractor]] with a default hashing strategy
    * based on Apache Kafka's [[org.apache.kafka.clients.producer.internals.DefaultPartitioner]].
    *
    * The number of partitions to use with the hashing strategy will be automatically determined by querying the Kafka
@@ -110,7 +110,7 @@ final class KafkaClusterSharding(system: ExtendedActorSystem) extends Extension
    * a field from the Entity to use as the entity id for the hashing strategy. Each call to this method will result
    * in a round trip to Kafka. This method should only be called once per entity type [[M]], per local actor system.
    *
-   * All topics used in a Consumer [[akka.kafka.Subscription]] must contain the same number of partitions to ensure
+   * All topics used in a Consumer [[org.apache.pekko.kafka.Subscription]] must contain the same number of partitions to ensure
    * that entities are routed to the same Entity type.
    */
   @ApiMayChange(issue = "https://github.com/akka/alpakka-kafka/issues/1074")
@@ -126,7 +126,7 @@ final class KafkaClusterSharding(system: ExtendedActorSystem) extends Extension
    *
    * API MAY CHANGE
    *
-   * Asynchronously return a [[akka.cluster.sharding.typed.ShardingMessageExtractor]] with a default hashing strategy
+   * Asynchronously return a [[org.apache.pekko.cluster.sharding.typed.ShardingMessageExtractor]] with a default hashing strategy
    * based on Apache Kafka's [[org.apache.kafka.clients.producer.internals.DefaultPartitioner]].
    *
    * The number of partitions to use with the hashing strategy will be automatically determined by querying the Kafka
@@ -135,7 +135,7 @@ final class KafkaClusterSharding(system: ExtendedActorSystem) extends Extension
    * a field from the Entity to use as the entity id for the hashing strategy. Each call to this method will result
    * in a round trip to Kafka. This method should only be called once per entity type [[M]], per local actor system.
    *
-   * All topics used in a Consumer [[akka.kafka.Subscription]] must contain the same number of partitions to ensure
+   * All topics used in a Consumer [[org.apache.pekko.kafka.Subscription]] must contain the same number of partitions to ensure
    * that entities are routed to the same Entity type.
    */
   @ApiMayChange(issue = "https://github.com/akka/alpakka-kafka/issues/1074")
@@ -152,12 +152,12 @@ final class KafkaClusterSharding(system: ExtendedActorSystem) extends Extension
   /**
    * API MAY CHANGE
    *
-   * Asynchronously return a [[akka.cluster.sharding.typed.ShardingMessageExtractor]] with a default hashing strategy
+   * Asynchronously return a [[org.apache.pekko.cluster.sharding.typed.ShardingMessageExtractor]] with a default hashing strategy
    * based on Apache Kafka's [[org.apache.kafka.clients.producer.internals.DefaultPartitioner]].
    *
    * The number of partitions to use with the hashing strategy is provided explicitly with [[kafkaPartitions]].
    *
-   * All topics used in a Consumer [[akka.kafka.Subscription]] must contain the same number of partitions to ensure
+   * All topics used in a Consumer [[org.apache.pekko.kafka.Subscription]] must contain the same number of partitions to ensure
    * that entities are routed to the same Entity type.
    */
   @ApiMayChange(issue = "https://github.com/akka/alpakka-kafka/issues/1074")
@@ -168,12 +168,12 @@ final class KafkaClusterSharding(system: ExtendedActorSystem) extends Extension
   /**
    * API MAY CHANGE
    *
-   * Asynchronously return a [[akka.cluster.sharding.typed.ShardingMessageExtractor]] with a default hashing strategy
+   * Asynchronously return a [[org.apache.pekko.cluster.sharding.typed.ShardingMessageExtractor]] with a default hashing strategy
    * based on Apache Kafka's [[org.apache.kafka.clients.producer.internals.DefaultPartitioner]].
    *
    * The number of partitions to use with the hashing strategy is provided explicitly with [[kafkaPartitions]].
    *
-   * All topics used in a Consumer [[akka.kafka.Subscription]] must contain the same number of partitions to ensure
+   * All topics used in a Consumer [[org.apache.pekko.kafka.Subscription]] must contain the same number of partitions to ensure
    * that entities are routed to the same Entity type.
    */
   @ApiMayChange(issue = "https://github.com/akka/alpakka-kafka/issues/1074")
@@ -200,7 +200,7 @@ final class KafkaClusterSharding(system: ExtendedActorSystem) extends Extension
   }
 
   private val rebalanceListeners =
-    new ConcurrentHashMap[EntityTypeKey[_], akka.actor.typed.ActorRef[ConsumerRebalanceEvent]]()
+    new ConcurrentHashMap[EntityTypeKey[_], org.apache.pekko.actor.typed.ActorRef[ConsumerRebalanceEvent]]()
 
   /**
    * API MAY CHANGE
@@ -210,16 +210,16 @@ final class KafkaClusterSharding(system: ExtendedActorSystem) extends Extension
    * the rebalance listener will use the [[ExternalShardAllocation]] client to update the External Sharding strategy
    * accordingly so that entities are (eventually) routed to the local Akka cluster member.
    *
-   * Returns an Akka typed [[akka.actor.typed.ActorRef]]. This must be converted to a classic actor before it can be
+   * Returns an Akka typed [[org.apache.pekko.actor.typed.ActorRef]]. This must be converted to a classic actor before it can be
    * passed to an Alpakka Kafka [[ConsumerSettings]].
    *
    * {{{
-   * import akka.actor.typed.scaladsl.adapter._
-   * val listenerClassicActorRef: akka.actor.ActorRef = listenerTypedActorRef.toClassic
+   * import org.apache.pekko.actor.typed.scaladsl.adapter._
+   * val listenerClassicActorRef: org.apache.pekko.actor.ActorRef = listenerTypedActorRef.toClassic
    * }}}
    */
   @ApiMayChange(issue = "https://github.com/akka/alpakka-kafka/issues/1074")
-  def rebalanceListener(typeKey: EntityTypeKey[_]): akka.actor.typed.ActorRef[ConsumerRebalanceEvent] = {
+  def rebalanceListener(typeKey: EntityTypeKey[_]): org.apache.pekko.actor.typed.ActorRef[ConsumerRebalanceEvent] = {
     rebalanceListeners.computeIfAbsent(typeKey,
       _ => {
         system.toTyped
@@ -237,18 +237,18 @@ final class KafkaClusterSharding(system: ExtendedActorSystem) extends Extension
    * the rebalance listener will use the [[ExternalShardAllocation]] client to update the External Sharding strategy
    * accordingly so that entities are (eventually) routed to the local Akka cluster member.
    *
-   * Returns an Akka typed [[akka.actor.typed.ActorRef]]. This must be converted to a classic actor before it can be
+   * Returns an Akka typed [[org.apache.pekko.actor.typed.ActorRef]]. This must be converted to a classic actor before it can be
    * passed to an Alpakka Kafka [[ConsumerSettings]].
    *
    * {{{
-   * import akka.actor.typed.scaladsl.adapter._
-   * val listenerClassicActorRef: akka.actor.ActorRef = listenerTypedActorRef.toClassic
+   * import org.apache.pekko.actor.typed.scaladsl.adapter._
+   * val listenerClassicActorRef: org.apache.pekko.actor.ActorRef = listenerTypedActorRef.toClassic
    * }}}
    */
   @ApiMayChange(issue = "https://github.com/akka/alpakka-kafka/issues/1074")
   def rebalanceListener(
-      typeKey: akka.cluster.sharding.typed.javadsl.EntityTypeKey[_])
-      : akka.actor.typed.ActorRef[ConsumerRebalanceEvent] = {
+      typeKey: org.apache.pekko.cluster.sharding.typed.javadsl.EntityTypeKey[_])
+      : org.apache.pekko.actor.typed.ActorRef[ConsumerRebalanceEvent] = {
     rebalanceListener(typeKey.asScala)
   }
 }
diff --git a/core/src/main/scala/akka/kafka/CommitTimeoutException.scala b/core/src/main/scala/org/apache/pekko/kafka/CommitTimeoutException.scala
similarity index 92%
rename from core/src/main/scala/akka/kafka/CommitTimeoutException.scala
rename to core/src/main/scala/org/apache/pekko/kafka/CommitTimeoutException.scala
index 9bb01a10..82a752a5 100644
--- a/core/src/main/scala/akka/kafka/CommitTimeoutException.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/CommitTimeoutException.scala
@@ -3,7 +3,7 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
 import java.util.concurrent.TimeoutException
 
diff --git a/core/src/main/scala/akka/kafka/CommitterSettings.scala b/core/src/main/scala/org/apache/pekko/kafka/CommitterSettings.scala
similarity index 91%
rename from core/src/main/scala/akka/kafka/CommitterSettings.scala
rename to core/src/main/scala/org/apache/pekko/kafka/CommitterSettings.scala
index 0bcc1932..00e2e3e0 100644
--- a/core/src/main/scala/akka/kafka/CommitterSettings.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/CommitterSettings.scala
@@ -3,11 +3,11 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 import java.util.concurrent.TimeUnit
 
-import akka.annotation.ApiMayChange
-import akka.util.JavaDurationConverters._
+import org.apache.pekko.annotation.ApiMayChange
+import org.apache.pekko.util.JavaDurationConverters._
 import com.typesafe.config.Config
 
 import scala.concurrent.duration._
@@ -104,7 +104,7 @@ object CommitterSettings {
    * Create settings from the default configuration
    * `akka.kafka.committer`.
    */
-  def apply(actorSystem: akka.actor.ActorSystem): CommitterSettings =
+  def apply(actorSystem: org.apache.pekko.actor.ActorSystem): CommitterSettings =
     apply(actorSystem.settings.config.getConfig(configPath))
 
   /**
@@ -113,7 +113,7 @@ object CommitterSettings {
    *
    * For use with the `akka.actor.typed` API.
    */
-  def apply(actorSystem: akka.actor.ClassicActorSystemProvider): CommitterSettings =
+  def apply(actorSystem: org.apache.pekko.actor.ClassicActorSystemProvider): CommitterSettings =
     apply(actorSystem.classicSystem.settings.config.getConfig(configPath))
 
   /**
@@ -133,7 +133,7 @@ object CommitterSettings {
    * Java API: Create settings from the default configuration
    * `akka.kafka.committer`.
    */
-  def create(actorSystem: akka.actor.ActorSystem): CommitterSettings =
+  def create(actorSystem: org.apache.pekko.actor.ActorSystem): CommitterSettings =
     apply(actorSystem)
 
   /**
@@ -142,7 +142,7 @@ object CommitterSettings {
    *
    * For use with the `akka.actor.typed` API.
    */
-  def create(actorSystem: akka.actor.ClassicActorSystemProvider): CommitterSettings =
+  def create(actorSystem: org.apache.pekko.actor.ClassicActorSystemProvider): CommitterSettings =
     apply(actorSystem)
 
   /**
@@ -156,7 +156,7 @@ object CommitterSettings {
 
 /**
  * Settings for committer. See `akka.kafka.committer` section in
- * reference.conf. Note that the [[akka.kafka.CommitterSettings$ companion]] object provides
+ * reference.conf. Note that the [[org.apache.pekko.kafka.CommitterSettings$ companion]] object provides
  * `apply` and `create` functions for convenient construction of the settings, together with
  * the `with` methods.
  */
diff --git a/core/src/main/scala/akka/kafka/ConnectionCheckerSettings.scala b/core/src/main/scala/org/apache/pekko/kafka/ConnectionCheckerSettings.scala
similarity index 97%
rename from core/src/main/scala/akka/kafka/ConnectionCheckerSettings.scala
rename to core/src/main/scala/org/apache/pekko/kafka/ConnectionCheckerSettings.scala
index 830af9b2..2745ab51 100644
--- a/core/src/main/scala/akka/kafka/ConnectionCheckerSettings.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/ConnectionCheckerSettings.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
-import akka.util.JavaDurationConverters._
+import org.apache.pekko.util.JavaDurationConverters._
 import com.typesafe.config.Config
 
 import scala.concurrent.duration._
diff --git a/core/src/main/scala/akka/kafka/ConsumerFailed.scala b/core/src/main/scala/org/apache/pekko/kafka/ConsumerFailed.scala
similarity index 97%
rename from core/src/main/scala/akka/kafka/ConsumerFailed.scala
rename to core/src/main/scala/org/apache/pekko/kafka/ConsumerFailed.scala
index 5be16b15..e57ec7a2 100644
--- a/core/src/main/scala/akka/kafka/ConsumerFailed.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/ConsumerFailed.scala
@@ -3,7 +3,7 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
 import scala.concurrent.duration.FiniteDuration
 
diff --git a/core/src/main/scala/akka/kafka/ConsumerMessage.scala b/core/src/main/scala/org/apache/pekko/kafka/ConsumerMessage.scala
similarity index 97%
rename from core/src/main/scala/akka/kafka/ConsumerMessage.scala
rename to core/src/main/scala/org/apache/pekko/kafka/ConsumerMessage.scala
index 6a80ccbe..4ace8621 100644
--- a/core/src/main/scala/akka/kafka/ConsumerMessage.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/ConsumerMessage.scala
@@ -3,14 +3,14 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
 import java.util.Objects
 import java.util.concurrent.CompletionStage
 
-import akka.Done
-import akka.annotation.{ DoNotInherit, InternalApi }
-import akka.kafka.internal.{ CommittableOffsetBatchImpl, CommittedMarker }
+import org.apache.pekko.Done
+import org.apache.pekko.annotation.{ DoNotInherit, InternalApi }
+import org.apache.pekko.kafka.internal.{ CommittableOffsetBatchImpl, CommittedMarker }
 import org.apache.kafka.clients.consumer.ConsumerRecord
 import org.apache.kafka.common.TopicPartition
 
diff --git a/core/src/main/scala/akka/kafka/ConsumerSettings.scala b/core/src/main/scala/org/apache/pekko/kafka/ConsumerSettings.scala
similarity index 96%
rename from core/src/main/scala/akka/kafka/ConsumerSettings.scala
rename to core/src/main/scala/org/apache/pekko/kafka/ConsumerSettings.scala
index e6cc8c4e..113aed60 100644
--- a/core/src/main/scala/akka/kafka/ConsumerSettings.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/ConsumerSettings.scala
@@ -3,14 +3,14 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
 import java.util.Optional
 import java.util.concurrent.{ CompletionStage, Executor }
 
-import akka.annotation.InternalApi
-import akka.kafka.internal._
-import akka.util.JavaDurationConverters._
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.kafka.internal._
+import org.apache.pekko.util.JavaDurationConverters._
 import com.typesafe.config.Config
 import org.apache.kafka.clients.consumer.{ Consumer, ConsumerConfig, KafkaConsumer }
 import org.apache.kafka.common.serialization.Deserializer
@@ -31,7 +31,7 @@ object ConsumerSettings {
    * Key or value deserializer can be passed explicitly or retrieved from configuration.
    */
   def apply[K, V](
-      system: akka.actor.ActorSystem,
+      system: org.apache.pekko.actor.ActorSystem,
       keyDeserializer: Option[Deserializer[K]],
       valueDeserializer: Option[Deserializer[V]]): ConsumerSettings[K, V] = {
     val config = system.settings.config.getConfig(configPath)
@@ -46,7 +46,7 @@ object ConsumerSettings {
    * For use with the `akka.actor.typed` API.
    */
   def apply[K, V](
-      system: akka.actor.ClassicActorSystemProvider,
+      system: org.apache.pekko.actor.ClassicActorSystemProvider,
       keyDeserializer: Option[Deserializer[K]],
       valueDeserializer: Option[Deserializer[V]]): ConsumerSettings[K, V] =
     apply(system.classicSystem, keyDeserializer, valueDeserializer)
@@ -117,7 +117,7 @@ object ConsumerSettings {
    * Key and value serializer must be passed explicitly.
    */
   def apply[K, V](
-      system: akka.actor.ActorSystem,
+      system: org.apache.pekko.actor.ActorSystem,
       keyDeserializer: Deserializer[K],
       valueDeserializer: Deserializer[V]): ConsumerSettings[K, V] =
     apply(system, Option(keyDeserializer), Option(valueDeserializer))
@@ -130,7 +130,7 @@ object ConsumerSettings {
    * For use with the `akka.actor.typed` API.
    */
   def apply[K, V](
-      system: akka.actor.ClassicActorSystemProvider,
+      system: org.apache.pekko.actor.ClassicActorSystemProvider,
       keyDeserializer: Deserializer[K],
       valueDeserializer: Deserializer[V]): ConsumerSettings[K, V] =
     apply(system, Option(keyDeserializer), Option(valueDeserializer))
@@ -152,7 +152,7 @@ object ConsumerSettings {
    * Key or value deserializer can be passed explicitly or retrieved from configuration.
    */
   def create[K, V](
-      system: akka.actor.ActorSystem,
+      system: org.apache.pekko.actor.ActorSystem,
       keyDeserializer: Optional[Deserializer[K]],
       valueDeserializer: Optional[Deserializer[V]]): ConsumerSettings[K, V] =
     apply(system, keyDeserializer.asScala, valueDeserializer.asScala)
@@ -165,7 +165,7 @@ object ConsumerSettings {
    * For use with the `akka.actor.typed` API.
    */
   def create[K, V](
-      system: akka.actor.ClassicActorSystemProvider,
+      system: org.apache.pekko.actor.ClassicActorSystemProvider,
       keyDeserializer: Optional[Deserializer[K]],
       valueDeserializer: Optional[Deserializer[V]]): ConsumerSettings[K, V] =
     apply(system, keyDeserializer.asScala, valueDeserializer.asScala)
@@ -187,7 +187,7 @@ object ConsumerSettings {
    * Key and value serializer must be passed explicitly.
    */
   def create[K, V](
-      system: akka.actor.ActorSystem,
+      system: org.apache.pekko.actor.ActorSystem,
       keyDeserializer: Deserializer[K],
       valueDeserializer: Deserializer[V]): ConsumerSettings[K, V] =
     apply(system, keyDeserializer, valueDeserializer)
@@ -200,7 +200,7 @@ object ConsumerSettings {
    * For use with the `akka.actor.typed` API.
    */
   def create[K, V](
-      system: akka.actor.ClassicActorSystemProvider,
+      system: org.apache.pekko.actor.ClassicActorSystemProvider,
       keyDeserializer: Deserializer[K],
       valueDeserializer: Deserializer[V]): ConsumerSettings[K, V] =
     apply(system, keyDeserializer, valueDeserializer)
@@ -228,7 +228,7 @@ object ConsumerSettings {
 
 /**
  * Settings for consumers. See `akka.kafka.consumer` section in
- * `reference.conf`. Note that the [[akka.kafka.ConsumerSettings$ companion]] object provides
+ * `reference.conf`. Note that the [[org.apache.pekko.kafka.ConsumerSettings$ companion]] object provides
  * `apply` and `create` functions for convenient construction of the settings, together with
  * the `with` methods.
  *
@@ -376,7 +376,7 @@ class ConsumerSettings[K, V] @InternalApi private[kafka] (
 
   /**
    * If offset commit requests are not completed within this timeout
-   * the returned Future is completed with [[akka.kafka.CommitTimeoutException]].
+   * the returned Future is completed with [[org.apache.pekko.kafka.CommitTimeoutException]].
    */
   def withCommitTimeout(commitTimeout: FiniteDuration): ConsumerSettings[K, V] =
     copy(commitTimeout = commitTimeout)
@@ -384,7 +384,7 @@ class ConsumerSettings[K, V] @InternalApi private[kafka] (
   /**
    * Java API:
    * If offset commit requests are not completed within this timeout
-   * the returned Future is completed with [[akka.kafka.CommitTimeoutException]].
+   * the returned Future is completed with [[org.apache.pekko.kafka.CommitTimeoutException]].
    */
   def withCommitTimeout(commitTimeout: java.time.Duration): ConsumerSettings[K, V] =
     copy(commitTimeout = commitTimeout.asScala)
@@ -404,7 +404,7 @@ class ConsumerSettings[K, V] @InternalApi private[kafka] (
 
   /**
    * Fully qualified config path which holds the dispatcher configuration
-   * to be used by the [[akka.kafka.KafkaConsumerActor]]. Some blocking may occur.
+   * to be used by the [[org.apache.pekko.kafka.KafkaConsumerActor]]. Some blocking may occur.
    */
   def withDispatcher(dispatcher: String): ConsumerSettings[K, V] =
     copy(dispatcher = dispatcher)
diff --git a/core/src/main/scala/akka/kafka/KafkaConnectionFailed.scala b/core/src/main/scala/org/apache/pekko/kafka/KafkaConnectionFailed.scala
similarity index 92%
rename from core/src/main/scala/akka/kafka/KafkaConnectionFailed.scala
rename to core/src/main/scala/org/apache/pekko/kafka/KafkaConnectionFailed.scala
index cecaca75..3ca133da 100644
--- a/core/src/main/scala/akka/kafka/KafkaConnectionFailed.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/KafkaConnectionFailed.scala
@@ -3,7 +3,7 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
 import org.apache.kafka.common.errors.TimeoutException
 
diff --git a/core/src/main/scala/akka/kafka/KafkaConsumerActor.scala b/core/src/main/scala/org/apache/pekko/kafka/KafkaConsumerActor.scala
similarity index 75%
rename from core/src/main/scala/akka/kafka/KafkaConsumerActor.scala
rename to core/src/main/scala/org/apache/pekko/kafka/KafkaConsumerActor.scala
index 43e8074f..856e518a 100644
--- a/core/src/main/scala/akka/kafka/KafkaConsumerActor.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/KafkaConsumerActor.scala
@@ -3,11 +3,11 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
-import akka.actor.{ ActorRef, NoSerializationVerificationNeeded, Props }
-import akka.annotation.InternalApi
-import akka.kafka.internal.{ KafkaConsumerActor => InternalKafkaConsumerActor }
+import org.apache.pekko.actor.{ ActorRef, NoSerializationVerificationNeeded, Props }
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.kafka.internal.{ KafkaConsumerActor => InternalKafkaConsumerActor }
 
 object KafkaConsumerActor {
 
@@ -35,7 +35,7 @@ object KafkaConsumerActor {
 
   /**
    * Creates Props for the Kafka Consumer Actor with a reference back to the owner of it
-   * which will be signalled with [[akka.actor.Status.Failure Failure(exception)]], in case the
+   * which will be signalled with [[org.apache.pekko.actor.Status.Failure Failure(exception)]], in case the
    * Kafka client instance can't be created.
    */
   def props[K, V](owner: ActorRef, settings: ConsumerSettings[K, V]): Props =
diff --git a/core/src/main/scala/akka/kafka/Metadata.scala b/core/src/main/scala/org/apache/pekko/kafka/Metadata.scala
similarity index 98%
rename from core/src/main/scala/akka/kafka/Metadata.scala
rename to core/src/main/scala/org/apache/pekko/kafka/Metadata.scala
index 2f5ba0ce..b9f384d7 100644
--- a/core/src/main/scala/akka/kafka/Metadata.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/Metadata.scala
@@ -3,11 +3,11 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
 import java.util.Optional
 
-import akka.actor.NoSerializationVerificationNeeded
+import org.apache.pekko.actor.NoSerializationVerificationNeeded
 import org.apache.kafka.clients.consumer.{ OffsetAndMetadata, OffsetAndTimestamp }
 import org.apache.kafka.common.{ PartitionInfo, TopicPartition }
 
diff --git a/core/src/main/scala/akka/kafka/OffsetResetProtectionSettings.scala b/core/src/main/scala/org/apache/pekko/kafka/OffsetResetProtectionSettings.scala
similarity index 96%
rename from core/src/main/scala/akka/kafka/OffsetResetProtectionSettings.scala
rename to core/src/main/scala/org/apache/pekko/kafka/OffsetResetProtectionSettings.scala
index d0590164..1cba0cd4 100644
--- a/core/src/main/scala/akka/kafka/OffsetResetProtectionSettings.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/OffsetResetProtectionSettings.scala
@@ -3,11 +3,11 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 import java.time.{ Duration => JDuration }
 
-import akka.annotation.InternalApi
-import akka.util.JavaDurationConverters._
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.util.JavaDurationConverters._
 import com.typesafe.config.Config
 
 import scala.concurrent.duration._
diff --git a/core/src/main/scala/akka/kafka/ProducerMessage.scala b/core/src/main/scala/org/apache/pekko/kafka/ProducerMessage.scala
similarity index 99%
rename from core/src/main/scala/akka/kafka/ProducerMessage.scala
rename to core/src/main/scala/org/apache/pekko/kafka/ProducerMessage.scala
index 294f474c..698aeaa4 100644
--- a/core/src/main/scala/akka/kafka/ProducerMessage.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/ProducerMessage.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
-import akka.NotUsed
+import org.apache.pekko.NotUsed
 import org.apache.kafka.clients.producer.{ ProducerRecord, RecordMetadata }
 
 import scala.collection.immutable
diff --git a/core/src/main/scala/akka/kafka/ProducerSettings.scala b/core/src/main/scala/org/apache/pekko/kafka/ProducerSettings.scala
similarity index 95%
rename from core/src/main/scala/akka/kafka/ProducerSettings.scala
rename to core/src/main/scala/org/apache/pekko/kafka/ProducerSettings.scala
index 793c8aa9..de49a5b6 100644
--- a/core/src/main/scala/akka/kafka/ProducerSettings.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/ProducerSettings.scala
@@ -3,13 +3,13 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
 import java.util.Optional
 import java.util.concurrent.{ CompletionStage, Executor }
 
-import akka.annotation.InternalApi
-import akka.kafka.internal.ConfigSettings
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.kafka.internal.ConfigSettings
 import com.typesafe.config.Config
 import org.apache.kafka.clients.producer.{ KafkaProducer, Producer, ProducerConfig }
 import org.apache.kafka.common.serialization.Serializer
@@ -17,7 +17,7 @@ import org.apache.kafka.common.serialization.Serializer
 import scala.jdk.CollectionConverters._
 import scala.compat.java8.OptionConverters._
 import scala.concurrent.duration._
-import akka.util.JavaDurationConverters._
+import org.apache.pekko.util.JavaDurationConverters._
 
 import scala.concurrent.{ ExecutionContext, Future }
 import scala.compat.java8.FutureConverters._
@@ -32,7 +32,7 @@ object ProducerSettings {
    * Key or value serializer can be passed explicitly or retrieved from configuration.
    */
   def apply[K, V](
-      system: akka.actor.ActorSystem,
+      system: org.apache.pekko.actor.ActorSystem,
       keySerializer: Option[Serializer[K]],
       valueSerializer: Option[Serializer[V]]): ProducerSettings[K, V] =
     apply(system.settings.config.getConfig(configPath), keySerializer, valueSerializer)
@@ -45,7 +45,7 @@ object ProducerSettings {
    * For use with the `akka.actor.typed` API.
    */
   def apply[K, V](
-      system: akka.actor.ClassicActorSystemProvider,
+      system: org.apache.pekko.actor.ClassicActorSystemProvider,
       keySerializer: Option[Serializer[K]],
       valueSerializer: Option[Serializer[V]]): ProducerSettings[K, V] =
     apply(system.classicSystem, keySerializer, valueSerializer)
@@ -92,7 +92,7 @@ object ProducerSettings {
    * Key and value serializer must be passed explicitly.
    */
   def apply[K, V](
-      system: akka.actor.ActorSystem,
+      system: org.apache.pekko.actor.ActorSystem,
       keySerializer: Serializer[K],
       valueSerializer: Serializer[V]): ProducerSettings[K, V] =
     apply(system, Option(keySerializer), Option(valueSerializer))
@@ -105,7 +105,7 @@ object ProducerSettings {
    * For use with the `akka.actor.typed` API.
    */
   def apply[K, V](
-      system: akka.actor.ClassicActorSystemProvider,
+      system: org.apache.pekko.actor.ClassicActorSystemProvider,
       keySerializer: Serializer[K],
       valueSerializer: Serializer[V]): ProducerSettings[K, V] =
     apply(system, Option(keySerializer), Option(valueSerializer))
@@ -127,7 +127,7 @@ object ProducerSettings {
    * Key or value serializer can be passed explicitly or retrieved from configuration.
    */
   def create[K, V](
-      system: akka.actor.ActorSystem,
+      system: org.apache.pekko.actor.ActorSystem,
       keySerializer: Optional[Serializer[K]],
       valueSerializer: Optional[Serializer[V]]): ProducerSettings[K, V] =
     apply(system, keySerializer.asScala, valueSerializer.asScala)
@@ -140,7 +140,7 @@ object ProducerSettings {
    * For use with the `akka.actor.typed` API.
    */
   def create[K, V](
-      system: akka.actor.ClassicActorSystemProvider,
+      system: org.apache.pekko.actor.ClassicActorSystemProvider,
       keySerializer: Optional[Serializer[K]],
       valueSerializer: Optional[Serializer[V]]): ProducerSettings[K, V] =
     apply(system, keySerializer.asScala, valueSerializer.asScala)
@@ -162,7 +162,7 @@ object ProducerSettings {
    * Key and value serializer must be passed explicitly.
    */
   def create[K, V](
-      system: akka.actor.ActorSystem,
+      system: org.apache.pekko.actor.ActorSystem,
       keySerializer: Serializer[K],
       valueSerializer: Serializer[V]): ProducerSettings[K, V] =
     apply(system, keySerializer, valueSerializer)
@@ -175,7 +175,7 @@ object ProducerSettings {
    * For use with the `akka.actor.typed` API.
    */
   def create[K, V](
-      system: akka.actor.ClassicActorSystemProvider,
+      system: org.apache.pekko.actor.ClassicActorSystemProvider,
       keySerializer: Serializer[K],
       valueSerializer: Serializer[V]): ProducerSettings[K, V] =
     apply(system, keySerializer, valueSerializer)
@@ -202,7 +202,7 @@ object ProducerSettings {
 
 /**
  * Settings for producers. See `akka.kafka.producer` section in
- * reference.conf. Note that the [[akka.kafka.ProducerSettings$ companion]] object provides
+ * reference.conf. Note that the [[org.apache.pekko.kafka.ProducerSettings$ companion]] object provides
  * `apply` and `create` functions for convenient construction of the settings, together with
  * the `with` methods.
  *
diff --git a/core/src/main/scala/akka/kafka/RestrictedConsumer.scala b/core/src/main/scala/org/apache/pekko/kafka/RestrictedConsumer.scala
similarity index 95%
rename from core/src/main/scala/akka/kafka/RestrictedConsumer.scala
rename to core/src/main/scala/org/apache/pekko/kafka/RestrictedConsumer.scala
index 90570049..2dec2531 100644
--- a/core/src/main/scala/akka/kafka/RestrictedConsumer.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/RestrictedConsumer.scala
@@ -3,15 +3,15 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
-import akka.annotation.ApiMayChange
+import org.apache.pekko.annotation.ApiMayChange
 import org.apache.kafka.clients.consumer.{ Consumer, OffsetAndMetadata, OffsetAndTimestamp }
 import org.apache.kafka.common.TopicPartition
 
 /**
  * Offers parts of the [[org.apache.kafka.clients.consumer.Consumer]] API which becomes available to
- * the [[akka.kafka.scaladsl.PartitionAssignmentHandler]] callbacks.
+ * the [[org.apache.pekko.kafka.scaladsl.PartitionAssignmentHandler]] callbacks.
  */
 @ApiMayChange
 final class RestrictedConsumer(consumer: Consumer[_, _], duration: java.time.Duration) {
diff --git a/core/src/main/scala/akka/kafka/Subscriptions.scala b/core/src/main/scala/org/apache/pekko/kafka/Subscriptions.scala
similarity index 89%
rename from core/src/main/scala/akka/kafka/Subscriptions.scala
rename to core/src/main/scala/org/apache/pekko/kafka/Subscriptions.scala
index 82079796..0d863e61 100644
--- a/core/src/main/scala/akka/kafka/Subscriptions.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/Subscriptions.scala
@@ -3,12 +3,12 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
-import akka.actor.ActorRef
-import akka.annotation.{ ApiMayChange, InternalApi }
-import akka.kafka.internal.PartitionAssignmentHelpers
-import akka.kafka.internal.PartitionAssignmentHelpers.EmptyPartitionAssignmentHandler
+import org.apache.pekko.actor.ActorRef
+import org.apache.pekko.annotation.{ ApiMayChange, InternalApi }
+import org.apache.pekko.kafka.internal.PartitionAssignmentHelpers
+import org.apache.pekko.kafka.internal.PartitionAssignmentHelpers.EmptyPartitionAssignmentHandler
 import org.apache.kafka.common.TopicPartition
 
 import scala.annotation.varargs
@@ -16,10 +16,10 @@ import scala.jdk.CollectionConverters._
 
 sealed trait Subscription {
 
-  /** ActorRef which is to receive [[akka.kafka.ConsumerRebalanceEvent]] signals when rebalancing happens */
+  /** ActorRef which is to receive [[org.apache.pekko.kafka.ConsumerRebalanceEvent]] signals when rebalancing happens */
   def rebalanceListener: Option[ActorRef]
 
-  /** Configure this actor ref to receive [[akka.kafka.ConsumerRebalanceEvent]] signals */
+  /** Configure this actor ref to receive [[org.apache.pekko.kafka.ConsumerRebalanceEvent]] signals */
   def withRebalanceListener(ref: ActorRef): Subscription
 
   def renderStageAttribute: String
@@ -46,13 +46,13 @@ sealed trait ManualSubscription extends Subscription {
  */
 sealed trait AutoSubscription extends Subscription {
 
-  /** ActorRef which is to receive [[akka.kafka.ConsumerRebalanceEvent]] signals when rebalancing happens */
+  /** ActorRef which is to receive [[org.apache.pekko.kafka.ConsumerRebalanceEvent]] signals when rebalancing happens */
   def rebalanceListener: Option[ActorRef]
 
   @InternalApi
   def partitionAssignmentHandler: scaladsl.PartitionAssignmentHandler
 
-  /** Configure this actor ref to receive [[akka.kafka.ConsumerRebalanceEvent]] signals */
+  /** Configure this actor ref to receive [[org.apache.pekko.kafka.ConsumerRebalanceEvent]] signals */
   def withRebalanceListener(ref: ActorRef): AutoSubscription
 
   @ApiMayChange(issue = "https://github.com/akka/alpakka-kafka/issues/985")
@@ -80,7 +80,7 @@ final case class TopicPartitionsRevoked(sub: Subscription, topicPartitions: Set[
 object Subscriptions {
 
   /** INTERNAL API */
-  @akka.annotation.InternalApi
+  @org.apache.pekko.annotation.InternalApi
   private[kafka] final case class TopicSubscription(
       tps: Set[String],
       rebalanceListener: Option[ActorRef],
@@ -101,7 +101,7 @@ object Subscriptions {
   }
 
   /** INTERNAL API */
-  @akka.annotation.InternalApi
+  @org.apache.pekko.annotation.InternalApi
   private[kafka] final case class TopicSubscriptionPattern(
       pattern: String,
       rebalanceListener: Option[ActorRef],
@@ -121,14 +121,14 @@ object Subscriptions {
   }
 
   /** INTERNAL API */
-  @akka.annotation.InternalApi
+  @org.apache.pekko.annotation.InternalApi
   private[kafka] final case class Assignment(tps: Set[TopicPartition]) extends ManualSubscription {
     def withRebalanceListener(ref: ActorRef): Assignment = this
     def renderStageAttribute: String = s"${tps.mkString(" ")}"
   }
 
   /** INTERNAL API */
-  @akka.annotation.InternalApi
+  @org.apache.pekko.annotation.InternalApi
   private[kafka] final case class AssignmentWithOffset(tps: Map[TopicPartition, Long]) extends ManualSubscription {
     def withRebalanceListener(ref: ActorRef): AssignmentWithOffset = this
     def renderStageAttribute: String =
@@ -136,7 +136,7 @@ object Subscriptions {
   }
 
   /** INTERNAL API */
-  @akka.annotation.InternalApi
+  @org.apache.pekko.annotation.InternalApi
   private[kafka] final case class AssignmentOffsetsForTimes(timestampsToSearch: Map[TopicPartition, Long])
       extends ManualSubscription {
     def withRebalanceListener(ref: ActorRef): AssignmentOffsetsForTimes = this
diff --git a/core/src/main/scala/akka/kafka/internal/BaseSingleSourceLogic.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/BaseSingleSourceLogic.scala
similarity index 88%
rename from core/src/main/scala/akka/kafka/internal/BaseSingleSourceLogic.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/BaseSingleSourceLogic.scala
index fbc5c7c8..fd078148 100644
--- a/core/src/main/scala/akka/kafka/internal/BaseSingleSourceLogic.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/BaseSingleSourceLogic.scala
@@ -3,15 +3,15 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
-
-import akka.actor.{ ActorRef, Status, Terminated }
-import akka.annotation.InternalApi
-import akka.kafka.Subscriptions.{ Assignment, AssignmentOffsetsForTimes, AssignmentWithOffset }
-import akka.kafka.{ ConsumerFailed, ManualSubscription }
-import akka.stream.SourceShape
-import akka.stream.stage.GraphStageLogic.StageActor
-import akka.stream.stage.{ AsyncCallback, GraphStageLogic, OutHandler }
+package org.apache.pekko.kafka.internal
+
+import org.apache.pekko.actor.{ ActorRef, Status, Terminated }
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.kafka.Subscriptions.{ Assignment, AssignmentOffsetsForTimes, AssignmentWithOffset }
+import org.apache.pekko.kafka.{ ConsumerFailed, ManualSubscription }
+import org.apache.pekko.stream.SourceShape
+import org.apache.pekko.stream.stage.GraphStageLogic.StageActor
+import org.apache.pekko.stream.stage.{ AsyncCallback, GraphStageLogic, OutHandler }
 import org.apache.kafka.common.TopicPartition
 
 import scala.annotation.tailrec
diff --git a/core/src/main/scala/akka/kafka/internal/CommitCollectorStage.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/CommitCollectorStage.scala
similarity index 92%
rename from core/src/main/scala/akka/kafka/internal/CommitCollectorStage.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/CommitCollectorStage.scala
index 64e75a60..9d230370 100644
--- a/core/src/main/scala/akka/kafka/internal/CommitCollectorStage.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/CommitCollectorStage.scala
@@ -3,13 +3,13 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
-import akka.annotation.InternalApi
-import akka.kafka.CommitterSettings
-import akka.kafka.ConsumerMessage.{ Committable, CommittableOffsetBatch }
-import akka.stream._
-import akka.stream.stage._
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.kafka.CommitterSettings
+import org.apache.pekko.kafka.ConsumerMessage.{ Committable, CommittableOffsetBatch }
+import org.apache.pekko.stream._
+import org.apache.pekko.stream.stage._
 
 /**
  * INTERNAL API.
@@ -44,7 +44,7 @@ private final class CommitCollectorStageLogic(
 
   // Context propagation is needed to notify Lightbend Telemetry to keep the context in case of a deferred downstream
   // push call that might not happen during onPush but later onTimer, onPull, or only during the next onPush call.
-  private val contextPropagation = akka.stream.impl.ContextPropagation()
+  private val contextPropagation = org.apache.pekko.stream.impl.ContextPropagation()
   private var contextSuspended = false
 
   override protected def logSource: Class[_] = classOf[CommitCollectorStageLogic]
@@ -157,6 +157,6 @@ private final class CommitCollectorStageLogic(
   }
 }
 
-private[akka] object CommitCollectorStage {
+private[pekko] object CommitCollectorStage {
   val CommitNow = "flowStageCommit"
 }
diff --git a/core/src/main/scala/akka/kafka/internal/CommitObservationLogic.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/CommitObservationLogic.scala
similarity index 89%
rename from core/src/main/scala/akka/kafka/internal/CommitObservationLogic.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/CommitObservationLogic.scala
index 2dd6a1a0..97e476d8 100644
--- a/core/src/main/scala/akka/kafka/internal/CommitObservationLogic.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/CommitObservationLogic.scala
@@ -3,12 +3,17 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
-import akka.kafka.CommitWhen.OffsetFirstObserved
-import akka.kafka.CommitterSettings
-import akka.kafka.ConsumerMessage.{ Committable, CommittableOffset, CommittableOffsetBatch, GroupTopicPartition }
-import akka.stream.stage.GraphStageLogic
+import org.apache.pekko.kafka.CommitWhen.OffsetFirstObserved
+import org.apache.pekko.kafka.CommitterSettings
+import org.apache.pekko.kafka.ConsumerMessage.{
+  Committable,
+  CommittableOffset,
+  CommittableOffsetBatch,
+  GroupTopicPartition
+}
+import org.apache.pekko.stream.stage.GraphStageLogic
 
 /**
  * Shared commit observation logic between [[GraphStageLogic]] that facilitate offset commits,
diff --git a/core/src/main/scala/akka/kafka/internal/CommitTrigger.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/CommitTrigger.scala
similarity index 89%
rename from core/src/main/scala/akka/kafka/internal/CommitTrigger.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/CommitTrigger.scala
index 94892479..9c30c755 100644
--- a/core/src/main/scala/akka/kafka/internal/CommitTrigger.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/CommitTrigger.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
-private[akka] object CommitTrigger {
+private[pekko] object CommitTrigger {
   sealed trait TriggerdBy
   case object BatchSize extends TriggerdBy {
     override def toString: String = "batch size"
diff --git a/core/src/main/scala/akka/kafka/internal/CommittableSources.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/CommittableSources.scala
similarity index 89%
rename from core/src/main/scala/akka/kafka/internal/CommittableSources.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/CommittableSources.scala
index c05dc420..9c91a89b 100644
--- a/core/src/main/scala/akka/kafka/internal/CommittableSources.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/CommittableSources.scala
@@ -3,22 +3,22 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
-
-import akka.actor.ActorRef
-import akka.annotation.InternalApi
-import akka.dispatch.ExecutionContexts
-import akka.kafka.ConsumerMessage.{ CommittableMessage, CommittableOffset }
-import akka.kafka._
-import akka.kafka.internal.KafkaConsumerActor.Internal.{ Commit, CommitSingle, CommitWithoutReply }
-import akka.kafka.internal.SubSourceLogic._
-import akka.kafka.scaladsl.Consumer.Control
-import akka.pattern.AskTimeoutException
-import akka.stream.SourceShape
-import akka.stream.scaladsl.Source
-import akka.stream.stage.{ AsyncCallback, GraphStageLogic }
-import akka.util.Timeout
-import akka.{ Done, NotUsed }
+package org.apache.pekko.kafka.internal
+
+import org.apache.pekko.actor.ActorRef
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.dispatch.ExecutionContexts
+import org.apache.pekko.kafka.ConsumerMessage.{ CommittableMessage, CommittableOffset }
+import org.apache.pekko.kafka._
+import org.apache.pekko.kafka.internal.KafkaConsumerActor.Internal.{ Commit, CommitSingle, CommitWithoutReply }
+import org.apache.pekko.kafka.internal.SubSourceLogic._
+import org.apache.pekko.kafka.scaladsl.Consumer.Control
+import org.apache.pekko.pattern.AskTimeoutException
+import org.apache.pekko.stream.SourceShape
+import org.apache.pekko.stream.scaladsl.Source
+import org.apache.pekko.stream.stage.{ AsyncCallback, GraphStageLogic }
+import org.apache.pekko.util.Timeout
+import org.apache.pekko.{ Done, NotUsed }
 import org.apache.kafka.clients.consumer.{ ConsumerConfig, ConsumerRecord, OffsetAndMetadata }
 import org.apache.kafka.common.TopicPartition
 import org.apache.kafka.common.requests.OffsetFetchResponse
@@ -177,9 +177,9 @@ private[kafka] object KafkaAsyncConsumerCommitterRef {
 /**
  * Internal API.
  *
- * Sends [[akka.kafka.internal.KafkaConsumerActor.Internal.Commit]],
- * [[akka.kafka.internal.KafkaConsumerActor.Internal.CommitSingle]] and
- * [[akka.kafka.internal.KafkaConsumerActor.Internal.CommitWithoutReply]] messages to the consumer actor.
+ * Sends [[org.apache.pekko.kafka.internal.KafkaConsumerActor.Internal.Commit]],
+ * [[org.apache.pekko.kafka.internal.KafkaConsumerActor.Internal.CommitSingle]] and
+ * [[org.apache.pekko.kafka.internal.KafkaConsumerActor.Internal.CommitWithoutReply]] messages to the consumer actor.
  */
 @InternalApi
 private[kafka] class KafkaAsyncConsumerCommitterRef(private val consumerActor: ActorRef,
@@ -198,7 +198,7 @@ private[kafka] class KafkaAsyncConsumerCommitterRef(private val consumerActor: A
   }
 
   private def sendWithReply(msg: AnyRef): Future[Done] = {
-    import akka.pattern.ask
+    import org.apache.pekko.pattern.ask
     consumerActor
       .ask(msg)(Timeout(commitTimeout))
       .map(_ => Done)(ExecutionContexts.parasitic)
diff --git a/core/src/main/scala/akka/kafka/internal/CommittingProducerSinkStage.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/CommittingProducerSinkStage.scala
similarity index 94%
rename from core/src/main/scala/akka/kafka/internal/CommittingProducerSinkStage.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/CommittingProducerSinkStage.scala
index 617c2c92..4d130cf4 100644
--- a/core/src/main/scala/akka/kafka/internal/CommittingProducerSinkStage.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/CommittingProducerSinkStage.scala
@@ -3,19 +3,19 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
 import java.util.concurrent.atomic.AtomicInteger
 
-import akka.Done
-import akka.annotation.InternalApi
-import akka.kafka.ConsumerMessage.{ Committable, CommittableOffsetBatch }
-import akka.kafka.ProducerMessage._
-import akka.kafka.{ CommitDelivery, CommitterSettings, ProducerSettings }
-import akka.stream.ActorAttributes.SupervisionStrategy
-import akka.stream.Supervision.Decider
-import akka.stream.stage._
-import akka.stream.{ Attributes, Inlet, SinkShape, Supervision }
+import org.apache.pekko.Done
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.kafka.ConsumerMessage.{ Committable, CommittableOffsetBatch }
+import org.apache.pekko.kafka.ProducerMessage._
+import org.apache.pekko.kafka.{ CommitDelivery, CommitterSettings, ProducerSettings }
+import org.apache.pekko.stream.ActorAttributes.SupervisionStrategy
+import org.apache.pekko.stream.Supervision.Decider
+import org.apache.pekko.stream.stage._
+import org.apache.pekko.stream.{ Attributes, Inlet, SinkShape, Supervision }
 import org.apache.kafka.clients.producer.{ Callback, RecordMetadata }
 
 import scala.concurrent.{ Future, Promise }
diff --git a/core/src/main/scala/akka/kafka/internal/ConfigSettings.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/ConfigSettings.scala
similarity index 91%
rename from core/src/main/scala/akka/kafka/internal/ConfigSettings.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/ConfigSettings.scala
index b406985a..8f5ae20f 100644
--- a/core/src/main/scala/akka/kafka/internal/ConfigSettings.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/ConfigSettings.scala
@@ -3,17 +3,17 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
 import java.util
 
-import akka.annotation.InternalApi
+import org.apache.pekko.annotation.InternalApi
 import com.typesafe.config.{ Config, ConfigObject }
 
 import scala.annotation.tailrec
 import scala.jdk.CollectionConverters._
 import scala.concurrent.duration.Duration
-import akka.util.JavaDurationConverters._
+import org.apache.pekko.util.JavaDurationConverters._
 
 /**
  * INTERNAL API
diff --git a/core/src/main/scala/akka/kafka/internal/ConnectionChecker.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/ConnectionChecker.scala
similarity index 89%
rename from core/src/main/scala/akka/kafka/internal/ConnectionChecker.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/ConnectionChecker.scala
index cb1fbe19..08f028d1 100644
--- a/core/src/main/scala/akka/kafka/internal/ConnectionChecker.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/ConnectionChecker.scala
@@ -3,12 +3,12 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
-import akka.actor.{ Actor, ActorLogging, Props, Timers }
-import akka.annotation.InternalApi
-import akka.event.LoggingReceive
-import akka.kafka.{ ConnectionCheckerSettings, KafkaConnectionFailed, Metadata }
+import org.apache.pekko.actor.{ Actor, ActorLogging, Props, Timers }
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.event.LoggingReceive
+import org.apache.pekko.kafka.{ ConnectionCheckerSettings, KafkaConnectionFailed, Metadata }
 import org.apache.kafka.common.errors.TimeoutException
 
 import scala.concurrent.duration.FiniteDuration
diff --git a/core/src/main/scala/akka/kafka/internal/ConsumerProgressTracking.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/ConsumerProgressTracking.scala
similarity index 98%
rename from core/src/main/scala/akka/kafka/internal/ConsumerProgressTracking.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/ConsumerProgressTracking.scala
index 64f63ad3..c7ffd93b 100644
--- a/core/src/main/scala/akka/kafka/internal/ConsumerProgressTracking.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/ConsumerProgressTracking.scala
@@ -3,8 +3,8 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
-import akka.annotation.InternalApi
+package org.apache.pekko.kafka.internal
+import org.apache.pekko.annotation.InternalApi
 import org.apache.kafka.clients.consumer.{ Consumer, ConsumerRecords, OffsetAndMetadata }
 import org.apache.kafka.common.TopicPartition
 
diff --git a/core/src/main/scala/akka/kafka/internal/ConsumerResetProtection.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/ConsumerResetProtection.scala
similarity index 96%
rename from core/src/main/scala/akka/kafka/internal/ConsumerResetProtection.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/ConsumerResetProtection.scala
index 99f291cc..2cc5dd6b 100644
--- a/core/src/main/scala/akka/kafka/internal/ConsumerResetProtection.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/ConsumerResetProtection.scala
@@ -3,15 +3,15 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
 import java.util
 
-import akka.actor.ActorRef
-import akka.annotation.InternalApi
-import akka.event.LoggingAdapter
-import akka.kafka.OffsetResetProtectionSettings
-import akka.kafka.internal.KafkaConsumerActor.Internal.Seek
+import org.apache.pekko.actor.ActorRef
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.event.LoggingAdapter
+import org.apache.pekko.kafka.OffsetResetProtectionSettings
+import org.apache.pekko.kafka.internal.KafkaConsumerActor.Internal.Seek
 import org.apache.kafka.clients.consumer.{ ConsumerRecord, ConsumerRecords, OffsetAndMetadata }
 import org.apache.kafka.common.TopicPartition
 
diff --git a/core/src/main/scala/akka/kafka/internal/ControlImplementations.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/ControlImplementations.scala
similarity index 86%
rename from core/src/main/scala/akka/kafka/internal/ControlImplementations.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/ControlImplementations.scala
index a77470a2..029e92e6 100644
--- a/core/src/main/scala/akka/kafka/internal/ControlImplementations.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/ControlImplementations.scala
@@ -3,18 +3,18 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 import java.util.concurrent.{ CompletionStage, Executor }
 
-import akka.Done
-import akka.actor.ActorRef
-import akka.annotation.InternalApi
-import akka.dispatch.ExecutionContexts
-import akka.kafka.internal.KafkaConsumerActor.Internal.{ ConsumerMetrics, RequestMetrics }
-import akka.kafka.{ javadsl, scaladsl }
-import akka.stream.SourceShape
-import akka.stream.stage.GraphStageLogic
-import akka.util.Timeout
+import org.apache.pekko.Done
+import org.apache.pekko.actor.ActorRef
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.dispatch.ExecutionContexts
+import org.apache.pekko.kafka.internal.KafkaConsumerActor.Internal.{ ConsumerMetrics, RequestMetrics }
+import org.apache.pekko.kafka.{ javadsl, scaladsl }
+import org.apache.pekko.stream.SourceShape
+import org.apache.pekko.stream.stage.GraphStageLogic
+import org.apache.pekko.util.Timeout
 import org.apache.kafka.common.{ Metric, MetricName }
 
 import scala.jdk.CollectionConverters._
@@ -78,7 +78,7 @@ private trait MetricsControl extends scaladsl.Consumer.Control {
   // FIXME: this can't be accessed until the stream has materialized because the `def executionContext` implementation
   // takes the executioncontext from the materializer. should it throw an exception, or block, until materialization?
   def metrics: Future[Map[MetricName, Metric]] = {
-    import akka.pattern.ask
+    import org.apache.pekko.pattern.ask
 
     import scala.concurrent.duration._
     consumerFuture
diff --git a/core/src/main/scala/akka/kafka/internal/DefaultProducerStage.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/DefaultProducerStage.scala
similarity index 92%
rename from core/src/main/scala/akka/kafka/internal/DefaultProducerStage.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/DefaultProducerStage.scala
index 97f9e10a..0630a8e9 100644
--- a/core/src/main/scala/akka/kafka/internal/DefaultProducerStage.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/DefaultProducerStage.scala
@@ -3,17 +3,17 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
-
-import akka.Done
-import akka.annotation.InternalApi
-import akka.kafka.ProducerMessage._
-import akka.kafka.ProducerSettings
-import akka.kafka.internal.ProducerStage.ProducerCompletionState
-import akka.stream.ActorAttributes.SupervisionStrategy
-import akka.stream.Supervision.Decider
-import akka.stream.stage._
-import akka.stream.{ Attributes, FlowShape, Supervision }
+package org.apache.pekko.kafka.internal
+
+import org.apache.pekko.Done
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.kafka.ProducerMessage._
+import org.apache.pekko.kafka.ProducerSettings
+import org.apache.pekko.kafka.internal.ProducerStage.ProducerCompletionState
+import org.apache.pekko.stream.ActorAttributes.SupervisionStrategy
+import org.apache.pekko.stream.Supervision.Decider
+import org.apache.pekko.stream.stage._
+import org.apache.pekko.stream.{ Attributes, FlowShape, Supervision }
 import org.apache.kafka.clients.producer.{ Callback, ProducerRecord, RecordMetadata }
 
 import scala.concurrent.{ ExecutionContext, Future, Promise }
diff --git a/core/src/main/scala/akka/kafka/internal/DeferredProducer.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/DeferredProducer.scala
similarity index 92%
rename from core/src/main/scala/akka/kafka/internal/DeferredProducer.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/DeferredProducer.scala
index 3960a677..bdc504f1 100644
--- a/core/src/main/scala/akka/kafka/internal/DeferredProducer.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/DeferredProducer.scala
@@ -3,13 +3,13 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
-import akka.annotation.InternalApi
-import akka.dispatch.ExecutionContexts
-import akka.kafka.ProducerSettings
-import akka.stream.stage._
-import akka.util.JavaDurationConverters._
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.dispatch.ExecutionContexts
+import org.apache.pekko.kafka.ProducerSettings
+import org.apache.pekko.stream.stage._
+import org.apache.pekko.util.JavaDurationConverters._
 import org.apache.kafka.clients.producer.Producer
 
 import scala.util.control.NonFatal
diff --git a/core/src/main/scala/akka/kafka/internal/ExternalSingleSourceLogic.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/ExternalSingleSourceLogic.scala
similarity index 81%
rename from core/src/main/scala/akka/kafka/internal/ExternalSingleSourceLogic.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/ExternalSingleSourceLogic.scala
index d821c803..9b834cbc 100644
--- a/core/src/main/scala/akka/kafka/internal/ExternalSingleSourceLogic.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/ExternalSingleSourceLogic.scala
@@ -3,12 +3,12 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
-import akka.actor.ActorRef
-import akka.annotation.InternalApi
-import akka.kafka.ManualSubscription
-import akka.stream.SourceShape
+import org.apache.pekko.actor.ActorRef
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.kafka.ManualSubscription
+import org.apache.pekko.stream.SourceShape
 
 import scala.concurrent.Future
 
diff --git a/core/src/main/scala/akka/kafka/internal/KafkaConsumerActor.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/KafkaConsumerActor.scala
similarity index 98%
rename from core/src/main/scala/akka/kafka/internal/KafkaConsumerActor.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/KafkaConsumerActor.scala
index 1d364ba1..8118ba7a 100644
--- a/core/src/main/scala/akka/kafka/internal/KafkaConsumerActor.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/KafkaConsumerActor.scala
@@ -3,14 +3,14 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
 import java.util.concurrent.atomic.AtomicInteger
 import java.util.concurrent.locks.LockSupport
 import java.util.regex.Pattern
-import akka.Done
-import akka.actor.Status.Failure
-import akka.actor.{
+import org.apache.pekko.Done
+import org.apache.pekko.actor.Status.Failure
+import org.apache.pekko.actor.{
   Actor,
   ActorRef,
   DeadLetterSuppression,
@@ -20,12 +20,12 @@ import akka.actor.{
   Terminated,
   Timers
 }
-import akka.annotation.InternalApi
-import akka.util.JavaDurationConverters._
-import akka.event.LoggingReceive
-import akka.kafka.KafkaConsumerActor.{ StopLike, StoppingException }
-import akka.kafka._
-import akka.kafka.scaladsl.PartitionAssignmentHandler
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.util.JavaDurationConverters._
+import org.apache.pekko.event.LoggingReceive
+import org.apache.pekko.kafka.KafkaConsumerActor.{ StopLike, StoppingException }
+import org.apache.pekko.kafka._
+import org.apache.pekko.kafka.scaladsl.PartitionAssignmentHandler
 import org.apache.kafka.clients.consumer._
 import org.apache.kafka.common.errors.RebalanceInProgressException
 import org.apache.kafka.common.{ Metric, MetricName, TopicPartition }
@@ -60,7 +60,7 @@ import scala.util.control.NonFatal
     final case class RegisterSubStage(tps: Set[TopicPartition]) extends NoSerializationVerificationNeeded
     final case class Seek(tps: Map[TopicPartition, Long]) extends NoSerializationVerificationNeeded
     final case class RequestMessages(requestId: Int, tps: Set[TopicPartition]) extends NoSerializationVerificationNeeded
-    val Stop = akka.kafka.KafkaConsumerActor.Stop
+    val Stop = org.apache.pekko.kafka.KafkaConsumerActor.Stop
     final case class StopFromStage(stageId: String) extends StopLike
     final case class Commit(tp: TopicPartition, offsetAndMetadata: OffsetAndMetadata)
         extends NoSerializationVerificationNeeded
@@ -398,7 +398,7 @@ import scala.util.control.NonFatal
         owner.foreach(_ ! Failure(e))
         throw e
       case None =>
-        import akka.pattern.pipe
+        import org.apache.pekko.pattern.pipe
         implicit val ec: ExecutionContext = context.dispatcher
         context.become(expectSettings)
         updateSettings.pipeTo(self)
diff --git a/core/src/main/scala/akka/kafka/internal/KafkaSourceStage.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/KafkaSourceStage.scala
similarity index 73%
rename from core/src/main/scala/akka/kafka/internal/KafkaSourceStage.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/KafkaSourceStage.scala
index 36da33ba..161493bd 100644
--- a/core/src/main/scala/akka/kafka/internal/KafkaSourceStage.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/KafkaSourceStage.scala
@@ -3,12 +3,12 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
-import akka.annotation.InternalApi
-import akka.kafka.scaladsl.Consumer._
-import akka.stream._
-import akka.stream.stage.{ GraphStageLogic, GraphStageWithMaterializedValue }
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.kafka.scaladsl.Consumer._
+import org.apache.pekko.stream._
+import org.apache.pekko.stream.stage.{ GraphStageLogic, GraphStageWithMaterializedValue }
 
 /**
  * INTERNAL API
diff --git a/core/src/main/scala/akka/kafka/internal/LoggingWithId.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/LoggingWithId.scala
similarity index 91%
rename from core/src/main/scala/akka/kafka/internal/LoggingWithId.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/LoggingWithId.scala
index 4a1942b6..761a3bf9 100644
--- a/core/src/main/scala/akka/kafka/internal/LoggingWithId.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/LoggingWithId.scala
@@ -3,11 +3,11 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
-import akka.actor.{ Actor, ActorLogging }
-import akka.event.LoggingAdapter
-import akka.stream.stage.{ GraphStageLogic, StageLogging }
+import org.apache.pekko.actor.{ Actor, ActorLogging }
+import org.apache.pekko.event.LoggingAdapter
+import org.apache.pekko.stream.stage.{ GraphStageLogic, StageLogging }
 
 /**
  * Generate a short random UID for something.
diff --git a/core/src/main/scala/akka/kafka/internal/MessageBuilder.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/MessageBuilder.scala
similarity index 97%
rename from core/src/main/scala/akka/kafka/internal/MessageBuilder.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/MessageBuilder.scala
index 0aa036be..13fed0e8 100644
--- a/core/src/main/scala/akka/kafka/internal/MessageBuilder.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/MessageBuilder.scala
@@ -3,13 +3,13 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 import java.util.concurrent.CompletionStage
 
-import akka.Done
-import akka.annotation.InternalApi
-import akka.kafka.ConsumerMessage
-import akka.kafka.ConsumerMessage.{
+import org.apache.pekko.Done
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.kafka.ConsumerMessage
+import org.apache.pekko.kafka.ConsumerMessage.{
   CommittableMessage,
   CommittableOffsetMetadata,
   GroupTopicPartition,
diff --git a/core/src/main/scala/akka/kafka/internal/PartitionAssignmentHelpers.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/PartitionAssignmentHelpers.scala
similarity index 91%
rename from core/src/main/scala/akka/kafka/internal/PartitionAssignmentHelpers.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/PartitionAssignmentHelpers.scala
index 0f953799..07c55e53 100644
--- a/core/src/main/scala/akka/kafka/internal/PartitionAssignmentHelpers.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/PartitionAssignmentHelpers.scala
@@ -3,14 +3,14 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
-
-import akka.actor.ActorRef
-import akka.annotation.InternalApi
-import akka.kafka.scaladsl.PartitionAssignmentHandler
-import akka.kafka.javadsl
-import akka.kafka.{ AutoSubscription, RestrictedConsumer, TopicPartitionsAssigned, TopicPartitionsRevoked }
-import akka.stream.stage.AsyncCallback
+package org.apache.pekko.kafka.internal
+
+import org.apache.pekko.actor.ActorRef
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.kafka.scaladsl.PartitionAssignmentHandler
+import org.apache.pekko.kafka.javadsl
+import org.apache.pekko.kafka.{ AutoSubscription, RestrictedConsumer, TopicPartitionsAssigned, TopicPartitionsRevoked }
+import org.apache.pekko.stream.stage.AsyncCallback
 import org.apache.kafka.common.TopicPartition
 
 import scala.jdk.CollectionConverters._
diff --git a/core/src/main/scala/akka/kafka/internal/PlainSources.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/PlainSources.scala
similarity index 83%
rename from core/src/main/scala/akka/kafka/internal/PlainSources.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/PlainSources.scala
index 79c09290..95ae1591 100644
--- a/core/src/main/scala/akka/kafka/internal/PlainSources.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/PlainSources.scala
@@ -3,17 +3,17 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
-import akka.NotUsed
-import akka.actor.ActorRef
-import akka.annotation.InternalApi
-import akka.kafka.scaladsl.Consumer.Control
-import akka.kafka.{ AutoSubscription, ConsumerSettings, ManualSubscription, Subscription }
-import akka.kafka.internal.SubSourceLogic._
-import akka.stream.SourceShape
-import akka.stream.scaladsl.Source
-import akka.stream.stage.{ AsyncCallback, GraphStageLogic }
+import org.apache.pekko.NotUsed
+import org.apache.pekko.actor.ActorRef
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.kafka.scaladsl.Consumer.Control
+import org.apache.pekko.kafka.{ AutoSubscription, ConsumerSettings, ManualSubscription, Subscription }
+import org.apache.pekko.kafka.internal.SubSourceLogic._
+import org.apache.pekko.stream.SourceShape
+import org.apache.pekko.stream.scaladsl.Source
+import org.apache.pekko.stream.stage.{ AsyncCallback, GraphStageLogic }
 import org.apache.kafka.clients.consumer.ConsumerRecord
 import org.apache.kafka.common.TopicPartition
 
diff --git a/core/src/main/scala/akka/kafka/internal/ProducerStage.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/ProducerStage.scala
similarity index 79%
rename from core/src/main/scala/akka/kafka/internal/ProducerStage.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/ProducerStage.scala
index c06cc560..6aa99c8c 100644
--- a/core/src/main/scala/akka/kafka/internal/ProducerStage.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/ProducerStage.scala
@@ -3,12 +3,12 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
-import akka.annotation.InternalApi
-import akka.kafka.ProducerMessage._
-import akka.kafka.ProducerSettings
-import akka.stream._
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.kafka.ProducerMessage._
+import org.apache.pekko.kafka.ProducerSettings
+import org.apache.pekko.stream._
 
 import scala.concurrent.Future
 
diff --git a/core/src/main/scala/akka/kafka/internal/SingleSourceLogic.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/SingleSourceLogic.scala
similarity index 85%
rename from core/src/main/scala/akka/kafka/internal/SingleSourceLogic.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/SingleSourceLogic.scala
index d20a84f9..d0b53a89 100644
--- a/core/src/main/scala/akka/kafka/internal/SingleSourceLogic.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/SingleSourceLogic.scala
@@ -3,14 +3,14 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
-
-import akka.actor.{ ActorRef, ExtendedActorSystem, Terminated }
-import akka.annotation.InternalApi
-import akka.kafka.internal.KafkaConsumerActor.Internal.Messages
-import akka.kafka.scaladsl.PartitionAssignmentHandler
-import akka.kafka.{ ConsumerSettings, RestrictedConsumer, Subscription }
-import akka.stream.SourceShape
+package org.apache.pekko.kafka.internal
+
+import org.apache.pekko.actor.{ ActorRef, ExtendedActorSystem, Terminated }
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.kafka.internal.KafkaConsumerActor.Internal.Messages
+import org.apache.pekko.kafka.scaladsl.PartitionAssignmentHandler
+import org.apache.pekko.kafka.{ ConsumerSettings, RestrictedConsumer, Subscription }
+import org.apache.pekko.stream.SourceShape
 import org.apache.kafka.common.TopicPartition
 
 import scala.concurrent.{ Future, Promise }
@@ -34,7 +34,7 @@ import scala.concurrent.{ Future, Promise }
   final def createConsumerActor(): ActorRef = {
     val extendedActorSystem = materializer.system.asInstanceOf[ExtendedActorSystem]
     val actor =
-      extendedActorSystem.systemActorOf(akka.kafka.KafkaConsumerActor.props(sourceActor.ref, settings),
+      extendedActorSystem.systemActorOf(org.apache.pekko.kafka.KafkaConsumerActor.props(sourceActor.ref, settings),
         s"kafka-consumer-$actorNumber")
     consumerPromise.success(actor)
     actor
diff --git a/core/src/main/scala/akka/kafka/internal/SourceLogicBuffer.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/SourceLogicBuffer.scala
similarity index 91%
rename from core/src/main/scala/akka/kafka/internal/SourceLogicBuffer.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/SourceLogicBuffer.scala
index 3c36f5a9..8437662c 100644
--- a/core/src/main/scala/akka/kafka/internal/SourceLogicBuffer.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/SourceLogicBuffer.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
-import akka.annotation.InternalApi
-import akka.stream.stage.{ AsyncCallback, GraphStageLogic }
+package org.apache.pekko.kafka.internal
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.stream.stage.{ AsyncCallback, GraphStageLogic }
 import org.apache.kafka.clients.consumer.ConsumerRecord
 import org.apache.kafka.common.TopicPartition
 
diff --git a/core/src/main/scala/akka/kafka/internal/SourceLogicSubscription.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/SourceLogicSubscription.scala
similarity index 82%
rename from core/src/main/scala/akka/kafka/internal/SourceLogicSubscription.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/SourceLogicSubscription.scala
index 36ed17da..2b98dedf 100644
--- a/core/src/main/scala/akka/kafka/internal/SourceLogicSubscription.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/SourceLogicSubscription.scala
@@ -3,14 +3,14 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
-import akka.actor.ActorRef
-import akka.annotation.InternalApi
-import akka.kafka.{ AutoSubscription, ManualSubscription, Subscription }
-import akka.kafka.Subscriptions._
-import akka.kafka.scaladsl.PartitionAssignmentHandler
-import akka.stream.stage.GraphStageLogic.StageActor
-import akka.stream.stage.{ AsyncCallback, GraphStageLogic }
+package org.apache.pekko.kafka.internal
+import org.apache.pekko.actor.ActorRef
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.kafka.{ AutoSubscription, ManualSubscription, Subscription }
+import org.apache.pekko.kafka.Subscriptions._
+import org.apache.pekko.kafka.scaladsl.PartitionAssignmentHandler
+import org.apache.pekko.stream.stage.GraphStageLogic.StageActor
+import org.apache.pekko.stream.stage.{ AsyncCallback, GraphStageLogic }
 import org.apache.kafka.common.TopicPartition
 
 /**
diff --git a/core/src/main/scala/akka/kafka/internal/SubSourceLogic.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/SubSourceLogic.scala
similarity index 93%
rename from core/src/main/scala/akka/kafka/internal/SubSourceLogic.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/SubSourceLogic.scala
index f292eba3..32dd3896 100644
--- a/core/src/main/scala/akka/kafka/internal/SubSourceLogic.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/SubSourceLogic.scala
@@ -3,23 +3,23 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
-
-import akka.NotUsed
-import akka.actor.Status
-import akka.actor.{ ActorRef, ExtendedActorSystem, Terminated }
-import akka.annotation.InternalApi
-import akka.kafka.internal.KafkaConsumerActor.Internal.RegisterSubStage
-import akka.kafka.internal.SubSourceLogic._
-import akka.kafka.{ AutoSubscription, ConsumerFailed, ConsumerSettings, RestrictedConsumer }
-import akka.kafka.scaladsl.Consumer.Control
-import akka.kafka.scaladsl.PartitionAssignmentHandler
-import akka.pattern.{ ask, AskTimeoutException }
-import akka.stream.scaladsl.Source
-import akka.stream.stage.GraphStageLogic.StageActor
-import akka.stream.stage._
-import akka.stream.{ Attributes, Outlet, SourceShape }
-import akka.util.Timeout
+package org.apache.pekko.kafka.internal
+
+import org.apache.pekko.NotUsed
+import org.apache.pekko.actor.Status
+import org.apache.pekko.actor.{ ActorRef, ExtendedActorSystem, Terminated }
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.kafka.internal.KafkaConsumerActor.Internal.RegisterSubStage
+import org.apache.pekko.kafka.internal.SubSourceLogic._
+import org.apache.pekko.kafka.{ AutoSubscription, ConsumerFailed, ConsumerSettings, RestrictedConsumer }
+import org.apache.pekko.kafka.scaladsl.Consumer.Control
+import org.apache.pekko.kafka.scaladsl.PartitionAssignmentHandler
+import org.apache.pekko.pattern.{ ask, AskTimeoutException }
+import org.apache.pekko.stream.scaladsl.Source
+import org.apache.pekko.stream.stage.GraphStageLogic.StageActor
+import org.apache.pekko.stream.stage._
+import org.apache.pekko.stream.{ Attributes, Outlet, SourceShape }
+import org.apache.pekko.util.Timeout
 import org.apache.kafka.common.TopicPartition
 
 import scala.annotation.tailrec
@@ -86,7 +86,7 @@ private class SubSourceLogic[K, V, Msg](
     }
     consumerActor = {
       val extendedActorSystem = materializer.system.asInstanceOf[ExtendedActorSystem]
-      extendedActorSystem.systemActorOf(akka.kafka.KafkaConsumerActor.props(sourceActor.ref, settings),
+      extendedActorSystem.systemActorOf(org.apache.pekko.kafka.KafkaConsumerActor.props(sourceActor.ref, settings),
         s"kafka-consumer-$actorNumber")
     }
     consumerPromise.success(consumerActor)
@@ -315,7 +315,7 @@ private object SubSourceLogic {
   /**
    * Internal API
    *
-   * SubSourceStageLogic [[akka.kafka.scaladsl.Consumer.Control]] and the stage actor [[ActorRef]]
+   * SubSourceStageLogic [[org.apache.pekko.kafka.scaladsl.Consumer.Control]] and the stage actor [[ActorRef]]
    */
   @InternalApi
   final case class ControlAndStageActor(control: Control, stageActor: ActorRef)
diff --git a/core/src/main/scala/akka/kafka/internal/TransactionalProducerStage.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/TransactionalProducerStage.scala
similarity index 94%
rename from core/src/main/scala/akka/kafka/internal/TransactionalProducerStage.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/TransactionalProducerStage.scala
index f30ad63e..d4e53b86 100644
--- a/core/src/main/scala/akka/kafka/internal/TransactionalProducerStage.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/TransactionalProducerStage.scala
@@ -3,17 +3,17 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
-
-import akka.Done
-import akka.annotation.InternalApi
-import akka.kafka.ConsumerMessage.{ GroupTopicPartition, PartitionOffsetCommittedMarker }
-import akka.kafka.ProducerMessage.{ Envelope, Results }
-import akka.kafka.internal.DeferredProducer._
-import akka.kafka.internal.ProducerStage.ProducerCompletionState
-import akka.kafka.{ ConsumerMessage, ProducerSettings }
-import akka.stream.stage._
-import akka.stream.{ Attributes, FlowShape }
+package org.apache.pekko.kafka.internal
+
+import org.apache.pekko.Done
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.kafka.ConsumerMessage.{ GroupTopicPartition, PartitionOffsetCommittedMarker }
+import org.apache.pekko.kafka.ProducerMessage.{ Envelope, Results }
+import org.apache.pekko.kafka.internal.DeferredProducer._
+import org.apache.pekko.kafka.internal.ProducerStage.ProducerCompletionState
+import org.apache.pekko.kafka.{ ConsumerMessage, ProducerSettings }
+import org.apache.pekko.stream.stage._
+import org.apache.pekko.stream.{ Attributes, FlowShape }
 import org.apache.kafka.clients.consumer.{ ConsumerGroupMetadata, OffsetAndMetadata }
 import org.apache.kafka.clients.producer.ProducerConfig
 import org.apache.kafka.common.TopicPartition
diff --git a/core/src/main/scala/akka/kafka/internal/TransactionalSources.scala b/core/src/main/scala/org/apache/pekko/kafka/internal/TransactionalSources.scala
similarity index 93%
rename from core/src/main/scala/akka/kafka/internal/TransactionalSources.scala
rename to core/src/main/scala/org/apache/pekko/kafka/internal/TransactionalSources.scala
index 56403b37..1c96792e 100644
--- a/core/src/main/scala/akka/kafka/internal/TransactionalSources.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/internal/TransactionalSources.scala
@@ -3,25 +3,25 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
 import java.util.Locale
 
-import akka.{ Done, NotUsed }
-import akka.actor.{ ActorRef, Status, Terminated }
-import akka.actor.Status.Failure
-import akka.annotation.InternalApi
-import akka.kafka.ConsumerMessage.{ PartitionOffset, TransactionalMessage }
-import akka.kafka.internal.KafkaConsumerActor.Internal.Revoked
-import akka.kafka.internal.SubSourceLogic._
-import akka.kafka.internal.TransactionalSubSourceStageLogic.DrainingComplete
-import akka.kafka.scaladsl.Consumer.Control
-import akka.kafka.scaladsl.PartitionAssignmentHandler
-import akka.kafka.{ AutoSubscription, ConsumerFailed, ConsumerSettings, RestrictedConsumer, Subscription }
-import akka.stream.SourceShape
-import akka.stream.scaladsl.Source
-import akka.stream.stage.{ AsyncCallback, GraphStageLogic }
-import akka.util.Timeout
+import org.apache.pekko.{ Done, NotUsed }
+import org.apache.pekko.actor.{ ActorRef, Status, Terminated }
+import org.apache.pekko.actor.Status.Failure
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.kafka.ConsumerMessage.{ PartitionOffset, TransactionalMessage }
+import org.apache.pekko.kafka.internal.KafkaConsumerActor.Internal.Revoked
+import org.apache.pekko.kafka.internal.SubSourceLogic._
+import org.apache.pekko.kafka.internal.TransactionalSubSourceStageLogic.DrainingComplete
+import org.apache.pekko.kafka.scaladsl.Consumer.Control
+import org.apache.pekko.kafka.scaladsl.PartitionAssignmentHandler
+import org.apache.pekko.kafka.{ AutoSubscription, ConsumerFailed, ConsumerSettings, RestrictedConsumer, Subscription }
+import org.apache.pekko.stream.SourceShape
+import org.apache.pekko.stream.scaladsl.Source
+import org.apache.pekko.stream.stage.{ AsyncCallback, GraphStageLogic }
+import org.apache.pekko.util.Timeout
 import org.apache.kafka.clients.consumer.{ ConsumerConfig, ConsumerRecord, OffsetAndMetadata }
 import org.apache.kafka.common.{ IsolationLevel, TopicPartition }
 
@@ -171,7 +171,7 @@ private[internal] abstract class TransactionalSourceLogic[K, V, Msg](shape: Sour
   }
 
   private def waitForDraining(partitions: Set[TopicPartition]): Boolean = {
-    import akka.pattern.ask
+    import org.apache.pekko.pattern.ask
     implicit val timeout = Timeout(consumerSettings.commitTimeout)
     try {
       Await.result(ask(stageActor.ref, Drain(partitions, None, Drained)), timeout.duration)
@@ -253,7 +253,7 @@ private[kafka] final class TransactionalSubSource[K, V](
       }
 
       private def waitForDraining(partitions: Set[TopicPartition]): Boolean = {
-        import akka.pattern.ask
+        import org.apache.pekko.pattern.ask
         implicit val timeout = Timeout(txConsumerSettings.commitTimeout)
         try {
           val drainCommandFutures =
@@ -285,7 +285,7 @@ private object TransactionalSourceLogic {
   private[internal] final case class CommittedMarkerRef(sourceActor: ActorRef, commitTimeout: FiniteDuration)(
       implicit ec: ExecutionContext) extends CommittedMarker {
     override def committed(offsets: Map[TopicPartition, OffsetAndMetadata]): Future[Done] = {
-      import akka.pattern.ask
+      import org.apache.pekko.pattern.ask
       sourceActor
         .ask(Committed(offsets))(Timeout(commitTimeout))
         .map(_ => Done)
diff --git a/core/src/main/scala/akka/kafka/javadsl/Committer.scala b/core/src/main/scala/org/apache/pekko/kafka/javadsl/Committer.scala
similarity index 80%
rename from core/src/main/scala/akka/kafka/javadsl/Committer.scala
rename to core/src/main/scala/org/apache/pekko/kafka/javadsl/Committer.scala
index 5341c994..85f6b952 100644
--- a/core/src/main/scala/akka/kafka/javadsl/Committer.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/javadsl/Committer.scala
@@ -3,15 +3,15 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.javadsl
+package org.apache.pekko.kafka.javadsl
 import java.util.concurrent.CompletionStage
 
-import akka.annotation.ApiMayChange
-import akka.japi.Pair
-import akka.{ Done, NotUsed }
-import akka.kafka.ConsumerMessage.{ Committable, CommittableOffsetBatch }
-import akka.kafka.{ scaladsl, CommitterSettings }
-import akka.stream.javadsl.{ Flow, FlowWithContext, Sink }
+import org.apache.pekko.annotation.ApiMayChange
+import org.apache.pekko.japi.Pair
+import org.apache.pekko.{ Done, NotUsed }
+import org.apache.pekko.kafka.ConsumerMessage.{ Committable, CommittableOffsetBatch }
+import org.apache.pekko.kafka.{ scaladsl, CommitterSettings }
+import org.apache.pekko.stream.javadsl.{ Flow, FlowWithContext, Sink }
 
 import scala.compat.java8.FutureConverters.FutureOps
 
@@ -54,10 +54,10 @@ object Committer {
   @ApiMayChange
   def sinkWithOffsetContext[E, C <: Committable](
       settings: CommitterSettings): Sink[Pair[E, C], CompletionStage[Done]] =
-    akka.stream.scaladsl
+    org.apache.pekko.stream.scaladsl
       .Flow[Pair[E, C]]
       .map(_.toScala)
-      .toMat(scaladsl.Committer.sinkWithOffsetContext(settings))(akka.stream.scaladsl.Keep.right)
+      .toMat(scaladsl.Committer.sinkWithOffsetContext(settings))(org.apache.pekko.stream.scaladsl.Keep.right)
       .mapMaterializedValue[CompletionStage[Done]](_.toJava)
       .asJava[Pair[E, C]]
 }
diff --git a/core/src/main/scala/akka/kafka/javadsl/Consumer.scala b/core/src/main/scala/org/apache/pekko/kafka/javadsl/Consumer.scala
similarity index 96%
rename from core/src/main/scala/akka/kafka/javadsl/Consumer.scala
rename to core/src/main/scala/org/apache/pekko/kafka/javadsl/Consumer.scala
index ee32b85c..874b40c9 100644
--- a/core/src/main/scala/akka/kafka/javadsl/Consumer.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/javadsl/Consumer.scala
@@ -3,19 +3,19 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.javadsl
+package org.apache.pekko.kafka.javadsl
 
 import java.util.concurrent.{ CompletionStage, Executor }
 
-import akka.actor.ActorRef
-import akka.annotation.ApiMayChange
-import akka.dispatch.ExecutionContexts
-import akka.japi.Pair
-import akka.kafka.ConsumerMessage.{ CommittableMessage, CommittableOffset }
-import akka.kafka._
-import akka.kafka.internal.{ ConsumerControlAsJava, SourceWithOffsetContext }
-import akka.stream.javadsl.{ Source, SourceWithContext }
-import akka.{ Done, NotUsed }
+import org.apache.pekko.actor.ActorRef
+import org.apache.pekko.annotation.ApiMayChange
+import org.apache.pekko.dispatch.ExecutionContexts
+import org.apache.pekko.japi.Pair
+import org.apache.pekko.kafka.ConsumerMessage.{ CommittableMessage, CommittableOffset }
+import org.apache.pekko.kafka._
+import org.apache.pekko.kafka.internal.{ ConsumerControlAsJava, SourceWithOffsetContext }
+import org.apache.pekko.stream.javadsl.{ Source, SourceWithContext }
+import org.apache.pekko.{ Done, NotUsed }
 import org.apache.kafka.clients.consumer.ConsumerRecord
 import org.apache.kafka.common.{ Metric, MetricName, TopicPartition }
 
@@ -177,7 +177,7 @@ object Consumer {
       subscription: Subscription): SourceWithContext[ConsumerRecord[K, V], CommittableOffset, Control] =
     // TODO this could use `scaladsl committableSourceWithContext` but `mapMaterializedValue` is not available, yet
     // See https://github.com/akka/akka/issues/26836
-    akka.stream.scaladsl.Source
+    org.apache.pekko.stream.scaladsl.Source
       .fromGraph(new SourceWithOffsetContext[K, V](settings, subscription))
       .mapMaterializedValue(ConsumerControlAsJava.apply)
       .asSourceWithContext(_._2)
@@ -207,7 +207,7 @@ object Consumer {
       : SourceWithContext[ConsumerRecord[K, V], CommittableOffset, Control] =
     // TODO this could use `scaladsl committableSourceWithContext` but `mapMaterializedValue` is not available, yet
     // See https://github.com/akka/akka/issues/26836
-    akka.stream.scaladsl.Source
+    org.apache.pekko.stream.scaladsl.Source
       .fromGraph(
         new SourceWithOffsetContext[K, V](settings,
           subscription,
diff --git a/core/src/main/scala/akka/kafka/javadsl/DiscoverySupport.scala b/core/src/main/scala/org/apache/pekko/kafka/javadsl/DiscoverySupport.scala
similarity index 88%
rename from core/src/main/scala/akka/kafka/javadsl/DiscoverySupport.scala
rename to core/src/main/scala/org/apache/pekko/kafka/javadsl/DiscoverySupport.scala
index ddb0efe7..63e09bee 100644
--- a/core/src/main/scala/akka/kafka/javadsl/DiscoverySupport.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/javadsl/DiscoverySupport.scala
@@ -3,12 +3,12 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.javadsl
+package org.apache.pekko.kafka.javadsl
 
 import java.util.concurrent.CompletionStage
 
-import akka.actor.{ ActorSystem, ClassicActorSystemProvider }
-import akka.kafka.{ scaladsl, ConsumerSettings, ProducerSettings }
+import org.apache.pekko.actor.{ ActorSystem, ClassicActorSystemProvider }
+import org.apache.pekko.kafka.{ scaladsl, ConsumerSettings, ProducerSettings }
 import com.typesafe.config.Config
 
 import scala.compat.java8.FunctionConverters._
@@ -18,7 +18,7 @@ import scala.concurrent.Future
 /**
  * Scala API.
  *
- * Reads Kafka bootstrap servers from configured sources via [[akka.discovery.Discovery]] configuration.
+ * Reads Kafka bootstrap servers from configured sources via [[org.apache.pekko.discovery.Discovery]] configuration.
  */
 object DiscoverySupport {
 
diff --git a/core/src/main/scala/akka/kafka/javadsl/MetadataClient.scala b/core/src/main/scala/org/apache/pekko/kafka/javadsl/MetadataClient.scala
similarity index 86%
rename from core/src/main/scala/akka/kafka/javadsl/MetadataClient.scala
rename to core/src/main/scala/org/apache/pekko/kafka/javadsl/MetadataClient.scala
index 63b0aecf..b15d7b49 100644
--- a/core/src/main/scala/akka/kafka/javadsl/MetadataClient.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/javadsl/MetadataClient.scala
@@ -3,14 +3,14 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.javadsl
+package org.apache.pekko.kafka.javadsl
 
 import java.util.concurrent.{ CompletionStage, Executor }
 
-import akka.actor.{ ActorRef, ActorSystem }
-import akka.dispatch.ExecutionContexts
-import akka.kafka.ConsumerSettings
-import akka.util.Timeout
+import org.apache.pekko.actor.{ ActorRef, ActorSystem }
+import org.apache.pekko.dispatch.ExecutionContexts
+import org.apache.pekko.kafka.ConsumerSettings
+import org.apache.pekko.util.Timeout
 import org.apache.kafka.clients.consumer.OffsetAndMetadata
 import org.apache.kafka.common.{ PartitionInfo, TopicPartition }
 
@@ -18,7 +18,7 @@ import scala.compat.java8.FutureConverters._
 import scala.concurrent.ExecutionContextExecutor
 import scala.jdk.CollectionConverters._
 
-class MetadataClient private (metadataClient: akka.kafka.scaladsl.MetadataClient) {
+class MetadataClient private (metadataClient: org.apache.pekko.kafka.scaladsl.MetadataClient) {
 
   def getBeginningOffsets[K, V](
       partitions: java.util.Set[TopicPartition]): CompletionStage[java.util.Map[TopicPartition, java.lang.Long]] =
@@ -89,7 +89,7 @@ object MetadataClient {
 
   def create(consumerActor: ActorRef, timeout: Timeout, executor: Executor): MetadataClient = {
     implicit val ec: ExecutionContextExecutor = ExecutionContexts.fromExecutor(executor)
-    val metadataClient = akka.kafka.scaladsl.MetadataClient.create(consumerActor, timeout)
+    val metadataClient = org.apache.pekko.kafka.scaladsl.MetadataClient.create(consumerActor, timeout)
     new MetadataClient(metadataClient)
   }
 
@@ -97,7 +97,7 @@ object MetadataClient {
       timeout: Timeout,
       system: ActorSystem,
       executor: Executor): MetadataClient = {
-    val metadataClient = akka.kafka.scaladsl.MetadataClient
+    val metadataClient = org.apache.pekko.kafka.scaladsl.MetadataClient
       .create(consumerSettings, timeout)(system, ExecutionContexts.fromExecutor(executor))
     new MetadataClient(metadataClient)
   }
diff --git a/core/src/main/scala/akka/kafka/javadsl/PartitionAssignmentHandler.scala b/core/src/main/scala/org/apache/pekko/kafka/javadsl/PartitionAssignmentHandler.scala
similarity index 74%
rename from core/src/main/scala/akka/kafka/javadsl/PartitionAssignmentHandler.scala
rename to core/src/main/scala/org/apache/pekko/kafka/javadsl/PartitionAssignmentHandler.scala
index 5e89801d..f561d49b 100644
--- a/core/src/main/scala/akka/kafka/javadsl/PartitionAssignmentHandler.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/javadsl/PartitionAssignmentHandler.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.javadsl
+package org.apache.pekko.kafka.javadsl
 
-import akka.kafka.RestrictedConsumer
+import org.apache.pekko.kafka.RestrictedConsumer
 import org.apache.kafka.common.TopicPartition
 
 /**
@@ -16,7 +16,7 @@ import org.apache.kafka.common.TopicPartition
  * A warning will be logged if a callback takes longer than the configured `partition-handler-warning`.
  *
  * There is no point in calling `Committable`'s commit methods as their committing won't be executed as long as any of
- * the callbacks in this class are called. Calling `commitSync` on the passed [[akka.kafka.RestrictedConsumer]] is available.
+ * the callbacks in this class are called. Calling `commitSync` on the passed [[org.apache.pekko.kafka.RestrictedConsumer]] is available.
  *
  * This complements the methods of Kafka's [[org.apache.kafka.clients.consumer.ConsumerRebalanceListener ConsumerRebalanceListener]] with
  * an `onStop` callback which is called before `Consumer.close`.
@@ -27,7 +27,7 @@ trait PartitionAssignmentHandler {
    * See [[org.apache.kafka.clients.consumer.ConsumerRebalanceListener#onPartitionsRevoked]]
    *
    * @param revokedTps The list of partitions that were revoked from the consumer
-   * @param consumer The [[akka.kafka.RestrictedConsumer]] gives some access to the internally used [[org.apache.kafka.clients.consumer.Consumer Consumer]]
+   * @param consumer The [[org.apache.pekko.kafka.RestrictedConsumer]] gives some access to the internally used [[org.apache.kafka.clients.consumer.Consumer Consumer]]
    */
   def onRevoke(revokedTps: java.util.Set[TopicPartition], consumer: RestrictedConsumer): Unit
 
@@ -35,7 +35,7 @@ trait PartitionAssignmentHandler {
    * See [[org.apache.kafka.clients.consumer.ConsumerRebalanceListener#onPartitionsAssigned]]
    *
    * @param assignedTps The list of partitions that are now assigned to the consumer (may include partitions previously assigned to the consumer)
-   * @param consumer The [[akka.kafka.RestrictedConsumer]] gives some access to the internally used [[org.apache.kafka.clients.consumer.Consumer Consumer]]
+   * @param consumer The [[org.apache.pekko.kafka.RestrictedConsumer]] gives some access to the internally used [[org.apache.kafka.clients.consumer.Consumer Consumer]]
    */
   def onAssign(assignedTps: java.util.Set[TopicPartition], consumer: RestrictedConsumer): Unit
 
@@ -44,7 +44,7 @@ trait PartitionAssignmentHandler {
    * See [[org.apache.kafka.clients.consumer.ConsumerRebalanceListener#onPartitionsLost]]
    *
    * @param lostTps The list of partitions that are no longer valid
-   * @param consumer The [[akka.kafka.RestrictedConsumer]] gives some access to the internally used [[org.apache.kafka.clients.consumer.Consumer Consumer]]
+   * @param consumer The [[org.apache.pekko.kafka.RestrictedConsumer]] gives some access to the internally used [[org.apache.kafka.clients.consumer.Consumer Consumer]]
    */
   def onLost(lostTps: java.util.Set[TopicPartition], consumer: RestrictedConsumer): Unit
 
@@ -53,7 +53,7 @@ trait PartitionAssignmentHandler {
    * See [[org.apache.kafka.clients.consumer.ConsumerRebalanceListener#onPartitionsRevoked]]
    *
    * @param currentTps The list of partitions that are currently assigned to the consumer
-   * @param consumer The [[akka.kafka.RestrictedConsumer]] gives some access to the internally used [[org.apache.kafka.clients.consumer.Consumer Consumer]]
+   * @param consumer The [[org.apache.pekko.kafka.RestrictedConsumer]] gives some access to the internally used [[org.apache.kafka.clients.consumer.Consumer Consumer]]
    */
   def onStop(currentTps: java.util.Set[TopicPartition], consumer: RestrictedConsumer): Unit
 
diff --git a/core/src/main/scala/akka/kafka/javadsl/Producer.scala b/core/src/main/scala/org/apache/pekko/kafka/javadsl/Producer.scala
similarity index 65%
rename from core/src/main/scala/akka/kafka/javadsl/Producer.scala
rename to core/src/main/scala/org/apache/pekko/kafka/javadsl/Producer.scala
index de595ffe..522ed462 100644
--- a/core/src/main/scala/akka/kafka/javadsl/Producer.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/javadsl/Producer.scala
@@ -3,15 +3,15 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.javadsl
+package org.apache.pekko.kafka.javadsl
 
 import java.util.concurrent.CompletionStage
-import akka.annotation.ApiMayChange
-import akka.kafka.ConsumerMessage.Committable
-import akka.kafka.ProducerMessage._
-import akka.kafka.{ scaladsl, CommitterSettings, ConsumerMessage, ProducerSettings }
-import akka.stream.javadsl.{ Flow, FlowWithContext, Sink }
-import akka.{ japi, Done, NotUsed }
+import org.apache.pekko.annotation.ApiMayChange
+import org.apache.pekko.kafka.ConsumerMessage.Committable
+import org.apache.pekko.kafka.ProducerMessage._
+import org.apache.pekko.kafka.{ scaladsl, CommitterSettings, ConsumerMessage, ProducerSettings }
+import org.apache.pekko.stream.javadsl.{ Flow, FlowWithContext, Sink }
+import org.apache.pekko.{ japi, Done, NotUsed }
 import org.apache.kafka.clients.producer.ProducerRecord
 
 import scala.annotation.nowarn
@@ -57,11 +57,11 @@ object Producer {
    *
    * It publishes records to Kafka topics conditionally:
    *
-   * - [[akka.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and commits the offset
    *
-   * - [[akka.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and commits the offset
    *
-   * - [[akka.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, but commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, but commits the offset
    *
    * Note that there is a risk that something fails after publishing but before
    * committing, so it is "at-least once delivery" semantics.
@@ -86,11 +86,11 @@ object Producer {
    *
    * It publishes records to Kafka topics conditionally:
    *
-   * - [[akka.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and commits the offset
    *
-   * - [[akka.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and commits the offset
    *
-   * - [[akka.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, but commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, but commits the offset
    *
    * Note that there is always a risk that something fails after publishing but before
    * committing, so it is "at-least once delivery" semantics.
@@ -112,11 +112,11 @@ object Producer {
    *
    * It publishes records to Kafka topics conditionally:
    *
-   * - [[akka.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and commits the offset
    *
-   * - [[akka.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and commits the offset
    *
-   * - [[akka.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, but commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, but commits the offset
    *
    * Note that there is a risk that something fails after publishing but before
    * committing, so it is "at-least once delivery" semantics.
@@ -135,11 +135,11 @@ object Producer {
    *
    * It publishes records to Kafka topics conditionally:
    *
-   * - [[akka.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and commits the offset
    *
-   * - [[akka.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and commits the offset
    *
-   * - [[akka.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, but commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, but commits the offset
    *
    * Note that there is a risk that something fails after publishing but before
    * committing, so it is "at-least once delivery" semantics.
@@ -147,16 +147,16 @@ object Producer {
   @ApiMayChange(issue = "https://github.com/akka/alpakka-kafka/issues/880")
   def committableSinkWithOffsetContext[K, V, IN <: Envelope[K, V, _], C <: Committable](
       producerSettings: ProducerSettings[K, V],
-      committerSettings: CommitterSettings): Sink[akka.japi.Pair[IN, C], CompletionStage[Done]] =
+      committerSettings: CommitterSettings): Sink[org.apache.pekko.japi.Pair[IN, C], CompletionStage[Done]] =
     committableSink(producerSettings, committerSettings)
-      .contramap(new akka.japi.function.Function[japi.Pair[IN, C], Envelope[K, V, C]] {
+      .contramap(new org.apache.pekko.japi.function.Function[japi.Pair[IN, C], Envelope[K, V, C]] {
         override def apply(p: japi.Pair[IN, C]) = p.first.withPassThrough(p.second)
       })
 
   /**
    * Create a flow to publish records to Kafka topics and then pass it on.
    *
-   * The records must be wrapped in a [[akka.kafka.ProducerMessage.Message Message]] and continue in the stream as [[akka.kafka.ProducerMessage.Result Result]].
+   * The records must be wrapped in a [[org.apache.pekko.kafka.ProducerMessage.Message Message]] and continue in the stream as [[org.apache.pekko.kafka.ProducerMessage.Result Result]].
    *
    * The messages support the possibility to pass through arbitrary data, which can for example be a [[ConsumerMessage.CommittableOffset CommittableOffset]]
    * or [[ConsumerMessage.CommittableOffsetBatch CommittableOffsetBatch]] that can
@@ -180,11 +180,11 @@ object Producer {
    *
    * It publishes records to Kafka topics conditionally:
    *
-   * - [[akka.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and continues in the stream as [[akka.kafka.ProducerMessage.Result Result]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.Result Result]]
    *
-   * - [[akka.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and continues in the stream as [[akka.kafka.ProducerMessage.MultiResult MultiResult]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.MultiResult MultiResult]]
    *
-   * - [[akka.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, and continues in the stream as [[akka.kafka.ProducerMessage.PassThroughResult PassThroughResult]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.PassThroughResult PassThroughResult]]
    *
    * The messages support the possibility to pass through arbitrary data, which can for example be a [[ConsumerMessage.CommittableOffset CommittableOffset]]
    * or [[ConsumerMessage.CommittableOffsetBatch CommittableOffsetBatch]] that can
@@ -204,11 +204,11 @@ object Producer {
    *
    * It publishes records to Kafka topics conditionally:
    *
-   * - [[akka.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and continues in the stream as [[akka.kafka.ProducerMessage.Result Result]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.Result Result]]
    *
-   * - [[akka.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and continues in the stream as [[akka.kafka.ProducerMessage.MultiResult MultiResult]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.MultiResult MultiResult]]
    *
-   * - [[akka.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, and continues in the stream as [[akka.kafka.ProducerMessage.PassThroughResult PassThroughResult]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.PassThroughResult PassThroughResult]]
    *
    * This flow is intended to be used with Akka's [flow with context](https://doc.akka.io/docs/akka/current/stream/operators/Flow/asFlowWithContext.html).
    *
@@ -222,7 +222,7 @@ object Producer {
   /**
    * Create a flow to publish records to Kafka topics and then pass it on.
    *
-   * The records must be wrapped in a [[akka.kafka.ProducerMessage.Message Message]] and continue in the stream as [[akka.kafka.ProducerMessage.Result Result]].
+   * The records must be wrapped in a [[org.apache.pekko.kafka.ProducerMessage.Message Message]] and continue in the stream as [[org.apache.pekko.kafka.ProducerMessage.Result Result]].
    *
    * The messages support the possibility to pass through arbitrary data, which can for example be a [[ConsumerMessage.CommittableOffset CommittableOffset]]
    * or [[ConsumerMessage.CommittableOffsetBatch CommittableOffsetBatch]] that can
@@ -244,11 +244,11 @@ object Producer {
    *
    * It publishes records to Kafka topics conditionally:
    *
-   * - [[akka.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and continues in the stream as [[akka.kafka.ProducerMessage.Result Result]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.Result Result]]
    *
-   * - [[akka.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and continues in the stream as [[akka.kafka.ProducerMessage.MultiResult MultiResult]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.MultiResult MultiResult]]
    *
-   * - [[akka.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, and continues in the stream as [[akka.kafka.ProducerMessage.PassThroughResult PassThroughResult]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.PassThroughResult PassThroughResult]]
    *
    * The messages support the possibility to pass through arbitrary data, which can for example be a [[ConsumerMessage.CommittableOffset CommittableOffset]]
    * or [[ConsumerMessage.CommittableOffsetBatch CommittableOffsetBatch]] that can
@@ -272,11 +272,11 @@ object Producer {
    *
    * It publishes records to Kafka topics conditionally:
    *
-   * - [[akka.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and continues in the stream as [[akka.kafka.ProducerMessage.Result Result]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.Result Result]]
    *
-   * - [[akka.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and continues in the stream as [[akka.kafka.ProducerMessage.MultiResult MultiResult]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.MultiResult MultiResult]]
    *
-   * - [[akka.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, and continues in the stream as [[akka.kafka.ProducerMessage.PassThroughResult PassThroughResult]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.PassThroughResult PassThroughResult]]
    *
    * This flow is intended to be used with Akka's [flow with context](https://doc.akka.io/docs/akka/current/stream/operators/Flow/asFlowWithContext.html).
    *
diff --git a/core/src/main/scala/akka/kafka/javadsl/SendProducer.scala b/core/src/main/scala/org/apache/pekko/kafka/javadsl/SendProducer.scala
similarity index 69%
rename from core/src/main/scala/akka/kafka/javadsl/SendProducer.scala
rename to core/src/main/scala/org/apache/pekko/kafka/javadsl/SendProducer.scala
index 692c0a89..61696610 100644
--- a/core/src/main/scala/akka/kafka/javadsl/SendProducer.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/javadsl/SendProducer.scala
@@ -3,14 +3,14 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.javadsl
+package org.apache.pekko.kafka.javadsl
 
 import java.util.concurrent.CompletionStage
 
-import akka.Done
-import akka.actor.{ ActorSystem, ClassicActorSystemProvider }
-import akka.kafka.ProducerMessage._
-import akka.kafka.{ scaladsl, ProducerSettings }
+import org.apache.pekko.Done
+import org.apache.pekko.actor.{ ActorSystem, ClassicActorSystemProvider }
+import org.apache.pekko.kafka.ProducerMessage._
+import org.apache.pekko.kafka.{ scaladsl, ProducerSettings }
 import org.apache.kafka.clients.producer.{ ProducerRecord, RecordMetadata }
 
 import scala.compat.java8.FutureConverters._
@@ -39,11 +39,11 @@ final class SendProducer[K, V] private (underlying: scaladsl.SendProducer[K, V])
    *
    * It publishes records to Kafka topics conditionally:
    *
-   * - [[akka.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and completes the future with [[akka.kafka.ProducerMessage.Result Result]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and completes the future with [[org.apache.pekko.kafka.ProducerMessage.Result Result]]
    *
-   * - [[akka.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and completes the future with [[akka.kafka.ProducerMessage.MultiResult MultiResult]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and completes the future with [[org.apache.pekko.kafka.ProducerMessage.MultiResult MultiResult]]
    *
-   * - [[akka.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, and completes the future with [[akka.kafka.ProducerMessage.PassThroughResult PassThroughResult]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, and completes the future with [[org.apache.pekko.kafka.ProducerMessage.PassThroughResult PassThroughResult]]
    *
    * The messages support passing through arbitrary data.
    */
diff --git a/core/src/main/scala/akka/kafka/javadsl/Transactional.scala b/core/src/main/scala/org/apache/pekko/kafka/javadsl/Transactional.scala
similarity index 89%
rename from core/src/main/scala/akka/kafka/javadsl/Transactional.scala
rename to core/src/main/scala/org/apache/pekko/kafka/javadsl/Transactional.scala
index 2fcc4d17..1d13ff39 100644
--- a/core/src/main/scala/akka/kafka/javadsl/Transactional.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/javadsl/Transactional.scala
@@ -3,19 +3,19 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.javadsl
+package org.apache.pekko.kafka.javadsl
 
 import java.util.concurrent.CompletionStage
 
-import akka.annotation.ApiMayChange
-import akka.japi.Pair
-import akka.kafka.ConsumerMessage.{ PartitionOffset, TransactionalMessage }
-import akka.kafka.ProducerMessage._
-import akka.kafka._
-import akka.kafka.internal.{ ConsumerControlAsJava, TransactionalSourceWithOffsetContext }
-import akka.kafka.javadsl.Consumer.Control
-import akka.stream.javadsl._
-import akka.{ Done, NotUsed }
+import org.apache.pekko.annotation.ApiMayChange
+import org.apache.pekko.japi.Pair
+import org.apache.pekko.kafka.ConsumerMessage.{ PartitionOffset, TransactionalMessage }
+import org.apache.pekko.kafka.ProducerMessage._
+import org.apache.pekko.kafka._
+import org.apache.pekko.kafka.internal.{ ConsumerControlAsJava, TransactionalSourceWithOffsetContext }
+import org.apache.pekko.kafka.javadsl.Consumer.Control
+import org.apache.pekko.stream.javadsl._
+import org.apache.pekko.{ Done, NotUsed }
 import org.apache.kafka.clients.consumer.ConsumerRecord
 
 import scala.compat.java8.FutureConverters.FutureOps
@@ -46,7 +46,7 @@ object Transactional {
   def sourceWithOffsetContext[K, V](
       consumerSettings: ConsumerSettings[K, V],
       subscription: Subscription): SourceWithContext[ConsumerRecord[K, V], PartitionOffset, Control] =
-    akka.stream.scaladsl.Source
+    org.apache.pekko.stream.scaladsl.Source
       .fromGraph(new TransactionalSourceWithOffsetContext[K, V](consumerSettings, subscription))
       .mapMaterializedValue(ConsumerControlAsJava.apply)
       .asSourceWithContext(_._2)
@@ -100,10 +100,11 @@ object Transactional {
   def sinkWithOffsetContext[K, V](
       settings: ProducerSettings[K, V],
       transactionalId: String): Sink[Pair[Envelope[K, V, NotUsed], PartitionOffset], CompletionStage[Done]] =
-    akka.stream.scaladsl
+    org.apache.pekko.stream.scaladsl
       .Flow[Pair[Envelope[K, V, NotUsed], PartitionOffset]]
       .map(_.toScala)
-      .toMat(scaladsl.Transactional.sinkWithOffsetContext(settings, transactionalId))(akka.stream.scaladsl.Keep.right)
+      .toMat(scaladsl.Transactional.sinkWithOffsetContext(settings, transactionalId))(
+        org.apache.pekko.stream.scaladsl.Keep.right)
       .mapMaterializedValue(_.toJava)
       .asJava
 
diff --git a/core/src/main/scala/akka/kafka/scaladsl/Committer.scala b/core/src/main/scala/org/apache/pekko/kafka/scaladsl/Committer.scala
similarity index 81%
rename from core/src/main/scala/akka/kafka/scaladsl/Committer.scala
rename to core/src/main/scala/org/apache/pekko/kafka/scaladsl/Committer.scala
index d2688549..7723607b 100644
--- a/core/src/main/scala/akka/kafka/scaladsl/Committer.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/scaladsl/Committer.scala
@@ -3,15 +3,15 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
-import akka.annotation.ApiMayChange
-import akka.dispatch.ExecutionContexts
-import akka.kafka.CommitterSettings
-import akka.kafka.ConsumerMessage.{ Committable, CommittableOffsetBatch }
-import akka.kafka.internal.CommitCollectorStage
-import akka.stream.scaladsl.{ Flow, FlowWithContext, Keep, Sink }
-import akka.{ Done, NotUsed }
+import org.apache.pekko.annotation.ApiMayChange
+import org.apache.pekko.dispatch.ExecutionContexts
+import org.apache.pekko.kafka.CommitterSettings
+import org.apache.pekko.kafka.ConsumerMessage.{ Committable, CommittableOffsetBatch }
+import org.apache.pekko.kafka.internal.CommitCollectorStage
+import org.apache.pekko.stream.scaladsl.{ Flow, FlowWithContext, Keep, Sink }
+import org.apache.pekko.{ Done, NotUsed }
 
 import scala.concurrent.Future
 
@@ -32,7 +32,7 @@ object Committer {
         .fromGraph(new CommitCollectorStage(settings))
 
     // See https://github.com/akka/alpakka-kafka/issues/882
-    import akka.kafka.CommitDelivery._
+    import org.apache.pekko.kafka.CommitDelivery._
     settings.delivery match {
       case WaitForAck =>
         offsetBatches
diff --git a/core/src/main/scala/akka/kafka/scaladsl/Consumer.scala b/core/src/main/scala/org/apache/pekko/kafka/scaladsl/Consumer.scala
similarity index 97%
rename from core/src/main/scala/akka/kafka/scaladsl/Consumer.scala
rename to core/src/main/scala/org/apache/pekko/kafka/scaladsl/Consumer.scala
index b28b141d..5599f9d4 100644
--- a/core/src/main/scala/akka/kafka/scaladsl/Consumer.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/scaladsl/Consumer.scala
@@ -3,16 +3,16 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
-
-import akka.actor.ActorRef
-import akka.annotation.ApiMayChange
-import akka.dispatch.ExecutionContexts
-import akka.kafka.ConsumerMessage.{ CommittableMessage, CommittableOffset }
-import akka.kafka._
-import akka.kafka.internal._
-import akka.stream.scaladsl.{ Source, SourceWithContext }
-import akka.{ Done, NotUsed }
+package org.apache.pekko.kafka.scaladsl
+
+import org.apache.pekko.actor.ActorRef
+import org.apache.pekko.annotation.ApiMayChange
+import org.apache.pekko.dispatch.ExecutionContexts
+import org.apache.pekko.kafka.ConsumerMessage.{ CommittableMessage, CommittableOffset }
+import org.apache.pekko.kafka._
+import org.apache.pekko.kafka.internal._
+import org.apache.pekko.stream.scaladsl.{ Source, SourceWithContext }
+import org.apache.pekko.{ Done, NotUsed }
 import org.apache.kafka.clients.consumer.ConsumerRecord
 import org.apache.kafka.common.{ Metric, MetricName, TopicPartition }
 
diff --git a/core/src/main/scala/akka/kafka/scaladsl/DiscoverySupport.scala b/core/src/main/scala/org/apache/pekko/kafka/scaladsl/DiscoverySupport.scala
similarity index 83%
rename from core/src/main/scala/akka/kafka/scaladsl/DiscoverySupport.scala
rename to core/src/main/scala/org/apache/pekko/kafka/scaladsl/DiscoverySupport.scala
index fef0c558..d98030b7 100644
--- a/core/src/main/scala/akka/kafka/scaladsl/DiscoverySupport.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/scaladsl/DiscoverySupport.scala
@@ -3,13 +3,13 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
-import akka.actor.{ ActorSystem, ActorSystemImpl, ClassicActorSystemProvider }
-import akka.annotation.InternalApi
-import akka.discovery.{ Discovery, ServiceDiscovery }
-import akka.kafka.{ ConsumerSettings, ProducerSettings }
-import akka.util.JavaDurationConverters._
+import org.apache.pekko.actor.{ ActorSystem, ActorSystemImpl, ClassicActorSystemProvider }
+import org.apache.pekko.annotation.InternalApi
+import org.apache.pekko.discovery.{ Discovery, ServiceDiscovery }
+import org.apache.pekko.kafka.{ ConsumerSettings, ProducerSettings }
+import org.apache.pekko.util.JavaDurationConverters._
 import com.typesafe.config.Config
 
 import scala.concurrent.Future
@@ -19,7 +19,7 @@ import scala.util.Failure
 /**
  * Scala API.
  *
- * Reads Kafka bootstrap servers from configured sources via [[akka.discovery.Discovery]] configuration.
+ * Reads Kafka bootstrap servers from configured sources via [[org.apache.pekko.discovery.Discovery]] configuration.
  */
 object DiscoverySupport {
 
@@ -114,10 +114,10 @@ object DiscoverySupport {
   }
 
   private def checkClassOrThrow(system: ActorSystemImpl): Unit =
-    system.dynamicAccess.getClassFor("akka.discovery.Discovery$") match {
+    system.dynamicAccess.getClassFor("org.apache.pekko.discovery.Discovery$") match {
       case Failure(_: ClassNotFoundException | _: NoClassDefFoundError) =>
         throw new IllegalStateException(
-          s"Akka Discovery is being used but the `akka-discovery` library is not on the classpath, it must be added explicitly. See https://doc.akka.io/docs/alpakka-kafka/current/discovery.html")
+          s"Pekko Discovery is being used but the `pekko-discovery` library is not on the classpath, it must be added explicitly. See https://pekko.apache.org/docs/pekko/current/discovery/index.html")
       case _ =>
     }
 
diff --git a/core/src/main/scala/akka/kafka/scaladsl/MetadataClient.scala b/core/src/main/scala/org/apache/pekko/kafka/scaladsl/MetadataClient.scala
similarity index 91%
rename from core/src/main/scala/akka/kafka/scaladsl/MetadataClient.scala
rename to core/src/main/scala/org/apache/pekko/kafka/scaladsl/MetadataClient.scala
index 291b90b4..4515390e 100644
--- a/core/src/main/scala/akka/kafka/scaladsl/MetadataClient.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/scaladsl/MetadataClient.scala
@@ -3,16 +3,16 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
 import java.util.concurrent.atomic.AtomicLong
 
-import akka.actor.{ ActorRef, ActorSystem, ExtendedActorSystem }
-import akka.dispatch.ExecutionContexts
-import akka.kafka.Metadata._
-import akka.kafka.{ ConsumerSettings, KafkaConsumerActor }
-import akka.pattern.ask
-import akka.util.Timeout
+import org.apache.pekko.actor.{ ActorRef, ActorSystem, ExtendedActorSystem }
+import org.apache.pekko.dispatch.ExecutionContexts
+import org.apache.pekko.kafka.Metadata._
+import org.apache.pekko.kafka.{ ConsumerSettings, KafkaConsumerActor }
+import org.apache.pekko.pattern.ask
+import org.apache.pekko.util.Timeout
 import org.apache.kafka.clients.consumer.OffsetAndMetadata
 import org.apache.kafka.common.{ PartitionInfo, TopicPartition }
 
diff --git a/core/src/main/scala/akka/kafka/scaladsl/PartitionAssignmentHandler.scala b/core/src/main/scala/org/apache/pekko/kafka/scaladsl/PartitionAssignmentHandler.scala
similarity index 74%
rename from core/src/main/scala/akka/kafka/scaladsl/PartitionAssignmentHandler.scala
rename to core/src/main/scala/org/apache/pekko/kafka/scaladsl/PartitionAssignmentHandler.scala
index f6be4ab8..e5803f5a 100644
--- a/core/src/main/scala/akka/kafka/scaladsl/PartitionAssignmentHandler.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/scaladsl/PartitionAssignmentHandler.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
-import akka.kafka.RestrictedConsumer
+import org.apache.pekko.kafka.RestrictedConsumer
 import org.apache.kafka.common.TopicPartition
 
 /**
@@ -16,7 +16,7 @@ import org.apache.kafka.common.TopicPartition
  * A warning will be logged if a callback takes longer than the configured `partition-handler-warning`.
  *
  * There is no point in calling `Committable`'s commit methods as their committing won't be executed as long as any of
- * the callbacks in this class are called. Calling `commitSync` on the passed [[akka.kafka.RestrictedConsumer]] is available.
+ * the callbacks in this class are called. Calling `commitSync` on the passed [[org.apache.pekko.kafka.RestrictedConsumer]] is available.
  *
  * This complements the methods of Kafka's [[org.apache.kafka.clients.consumer.ConsumerRebalanceListener ConsumerRebalanceListener]] with
  * an `onStop` callback which is called before `Consumer.close`.
@@ -27,7 +27,7 @@ trait PartitionAssignmentHandler {
    * See [[org.apache.kafka.clients.consumer.ConsumerRebalanceListener#onPartitionsRevoked]]
    *
    * @param revokedTps The list of partitions that were revoked from the consumer
-   * @param consumer The [[akka.kafka.RestrictedConsumer]] gives some access to the internally used [[org.apache.kafka.clients.consumer.Consumer Consumer]]
+   * @param consumer The [[org.apache.pekko.kafka.RestrictedConsumer]] gives some access to the internally used [[org.apache.kafka.clients.consumer.Consumer Consumer]]
    */
   def onRevoke(revokedTps: Set[TopicPartition], consumer: RestrictedConsumer): Unit
 
@@ -35,7 +35,7 @@ trait PartitionAssignmentHandler {
    * See [[org.apache.kafka.clients.consumer.ConsumerRebalanceListener#onPartitionsAssigned]]
    *
    * @param assignedTps The list of partitions that are now assigned to the consumer (may include partitions previously assigned to the consumer)
-   * @param consumer The [[akka.kafka.RestrictedConsumer]] gives some access to the internally used [[org.apache.kafka.clients.consumer.Consumer Consumer]]
+   * @param consumer The [[org.apache.pekko.kafka.RestrictedConsumer]] gives some access to the internally used [[org.apache.kafka.clients.consumer.Consumer Consumer]]
    */
   def onAssign(assignedTps: Set[TopicPartition], consumer: RestrictedConsumer): Unit
 
@@ -44,7 +44,7 @@ trait PartitionAssignmentHandler {
    * See [[org.apache.kafka.clients.consumer.ConsumerRebalanceListener#onPartitionsLost]]
    *
    * @param lostTps The list of partitions that are no longer valid
-   * @param consumer The [[akka.kafka.RestrictedConsumer]] gives some access to the internally used [[org.apache.kafka.clients.consumer.Consumer Consumer]]
+   * @param consumer The [[org.apache.pekko.kafka.RestrictedConsumer]] gives some access to the internally used [[org.apache.kafka.clients.consumer.Consumer Consumer]]
    */
   def onLost(lostTps: Set[TopicPartition], consumer: RestrictedConsumer): Unit
 
@@ -53,7 +53,7 @@ trait PartitionAssignmentHandler {
    * See [[org.apache.kafka.clients.consumer.ConsumerRebalanceListener#onPartitionsRevoked]]
    *
    * @param currentTps The list of partitions that are currently assigned to the consumer
-   * @param consumer The [[akka.kafka.RestrictedConsumer]] gives some access to the internally used [[org.apache.kafka.clients.consumer.Consumer Consumer]]
+   * @param consumer The [[org.apache.pekko.kafka.RestrictedConsumer]] gives some access to the internally used [[org.apache.kafka.clients.consumer.Consumer Consumer]]
    */
   def onStop(currentTps: Set[TopicPartition], consumer: RestrictedConsumer): Unit
 }
diff --git a/core/src/main/scala/akka/kafka/scaladsl/Producer.scala b/core/src/main/scala/org/apache/pekko/kafka/scaladsl/Producer.scala
similarity index 67%
rename from core/src/main/scala/akka/kafka/scaladsl/Producer.scala
rename to core/src/main/scala/org/apache/pekko/kafka/scaladsl/Producer.scala
index f3ade013..3a6f2fda 100644
--- a/core/src/main/scala/akka/kafka/scaladsl/Producer.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/scaladsl/Producer.scala
@@ -3,16 +3,16 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
-import akka.annotation.ApiMayChange
-import akka.kafka.ConsumerMessage.Committable
-import akka.kafka.ProducerMessage._
-import akka.kafka.internal.{ CommittingProducerSinkStage, DefaultProducerStage }
-import akka.kafka.{ CommitterSettings, ConsumerMessage, ProducerSettings }
-import akka.stream.ActorAttributes
-import akka.stream.scaladsl.{ Flow, FlowWithContext, Keep, Sink }
-import akka.{ Done, NotUsed }
+import org.apache.pekko.annotation.ApiMayChange
+import org.apache.pekko.kafka.ConsumerMessage.Committable
+import org.apache.pekko.kafka.ProducerMessage._
+import org.apache.pekko.kafka.internal.{ CommittingProducerSinkStage, DefaultProducerStage }
+import org.apache.pekko.kafka.{ CommitterSettings, ConsumerMessage, ProducerSettings }
+import org.apache.pekko.stream.ActorAttributes
+import org.apache.pekko.stream.scaladsl.{ Flow, FlowWithContext, Keep, Sink }
+import org.apache.pekko.{ Done, NotUsed }
 import org.apache.kafka.clients.producer.ProducerRecord
 
 import scala.concurrent.Future
@@ -57,11 +57,11 @@ object Producer {
    *
    * It publishes records to Kafka topics conditionally:
    *
-   * - [[akka.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and commits the offset
    *
-   * - [[akka.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and commits the offset
    *
-   * - [[akka.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, but commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, but commits the offset
    *
    * Note that there is a risk that something fails after publishing but before
    * committing, so it is "at-least once delivery" semantics.
@@ -80,11 +80,11 @@ object Producer {
    *
    * It publishes records to Kafka topics conditionally:
    *
-   * - [[akka.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and commits the offset
    *
-   * - [[akka.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and commits the offset
    *
-   * - [[akka.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, but commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, but commits the offset
    *
    * Note that there is always a risk that something fails after publishing but before
    * committing, so it is "at-least once delivery" semantics.
@@ -104,11 +104,11 @@ object Producer {
    *
    * It publishes records to Kafka topics conditionally:
    *
-   * - [[akka.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and commits the offset
    *
-   * - [[akka.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and commits the offset
    *
-   * - [[akka.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, but commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, but commits the offset
    *
    * Note that there is a risk that something fails after publishing but before
    * committing, so it is "at-least once delivery" semantics.
@@ -124,11 +124,11 @@ object Producer {
    *
    * It publishes records to Kafka topics conditionally:
    *
-   * - [[akka.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and commits the offset
    *
-   * - [[akka.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and commits the offset
    *
-   * - [[akka.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, but commits the offset
+   * - [[org.apache.pekko.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, but commits the offset
    *
    * Note that there is a risk that something fails after publishing but before
    * committing, so it is "at-least once delivery" semantics.
@@ -146,7 +146,7 @@ object Producer {
   /**
    * Create a flow to publish records to Kafka topics and then pass it on.
    *
-   * The records must be wrapped in a [[akka.kafka.ProducerMessage.Message Message]] and continue in the stream as [[akka.kafka.ProducerMessage.Result Result]].
+   * The records must be wrapped in a [[org.apache.pekko.kafka.ProducerMessage.Message Message]] and continue in the stream as [[org.apache.pekko.kafka.ProducerMessage.Result Result]].
    *
    * The messages support the possibility to pass through arbitrary data, which can for example be a [[ConsumerMessage.CommittableOffset CommittableOffset]]
    * or [[ConsumerMessage.CommittableOffsetBatch CommittableOffsetBatch]] that can
@@ -169,11 +169,11 @@ object Producer {
    *
    * It publishes records to Kafka topics conditionally:
    *
-   * - [[akka.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and continues in the stream as [[akka.kafka.ProducerMessage.Result Result]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.Result Result]]
    *
-   * - [[akka.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and continues in the stream as [[akka.kafka.ProducerMessage.MultiResult MultiResult]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.MultiResult MultiResult]]
    *
-   * - [[akka.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, and continues in the stream as [[akka.kafka.ProducerMessage.PassThroughResult PassThroughResult]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.PassThroughResult PassThroughResult]]
    *
    * The messages support the possibility to pass through arbitrary data, which can for example be a [[ConsumerMessage.CommittableOffset CommittableOffset]]
    * or [[ConsumerMessage.CommittableOffsetBatch CommittableOffsetBatch]] that can
@@ -197,11 +197,11 @@ object Producer {
    *
    * It publishes records to Kafka topics conditionally:
    *
-   * - [[akka.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and continues in the stream as [[akka.kafka.ProducerMessage.Result Result]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.Result Result]]
    *
-   * - [[akka.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and continues in the stream as [[akka.kafka.ProducerMessage.MultiResult MultiResult]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.MultiResult MultiResult]]
    *
-   * - [[akka.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, and continues in the stream as [[akka.kafka.ProducerMessage.PassThroughResult PassThroughResult]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.PassThroughResult PassThroughResult]]
    *
    * This flow is intended to be used with Akka's [flow with context](https://doc.akka.io/docs/akka/current/stream/operators/Flow/asFlowWithContext.html).
    *
@@ -218,7 +218,7 @@ object Producer {
   /**
    * Create a flow to publish records to Kafka topics and then pass it on.
    *
-   * The records must be wrapped in a [[akka.kafka.ProducerMessage.Message Message]] and continue in the stream as [[akka.kafka.ProducerMessage.Result Result]].
+   * The records must be wrapped in a [[org.apache.pekko.kafka.ProducerMessage.Message Message]] and continue in the stream as [[org.apache.pekko.kafka.ProducerMessage.Result Result]].
    *
    * The messages support the possibility to pass through arbitrary data, which can for example be a [[ConsumerMessage.CommittableOffset CommittableOffset]]
    * or [[ConsumerMessage.CommittableOffsetBatch CommittableOffsetBatch]] that can
@@ -238,11 +238,11 @@ object Producer {
    *
    * It publishes records to Kafka topics conditionally:
    *
-   * - [[akka.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and continues in the stream as [[akka.kafka.ProducerMessage.Result Result]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.Result Result]]
    *
-   * - [[akka.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and continues in the stream as [[akka.kafka.ProducerMessage.MultiResult MultiResult]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.MultiResult MultiResult]]
    *
-   * - [[akka.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, and continues in the stream as [[akka.kafka.ProducerMessage.PassThroughResult PassThroughResult]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.PassThroughResult PassThroughResult]]
    *
    * The messages support the possibility to pass through arbitrary data, which can for example be a [[ConsumerMessage.CommittableOffset CommittableOffset]]
    * or [[ConsumerMessage.CommittableOffsetBatch CommittableOffsetBatch]] that can
@@ -266,11 +266,11 @@ object Producer {
    *
    * It publishes records to Kafka topics conditionally:
    *
-   * - [[akka.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and continues in the stream as [[akka.kafka.ProducerMessage.Result Result]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.Result Result]]
    *
-   * - [[akka.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and continues in the stream as [[akka.kafka.ProducerMessage.MultiResult MultiResult]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.MultiResult MultiResult]]
    *
-   * - [[akka.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, and continues in the stream as [[akka.kafka.ProducerMessage.PassThroughResult PassThroughResult]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, and continues in the stream as [[org.apache.pekko.kafka.ProducerMessage.PassThroughResult PassThroughResult]]
    *
    * This flow is intended to be used with Akka's [flow with context](https://doc.akka.io/docs/akka/current/stream/operators/Flow/asFlowWithContext.html).
    *
diff --git a/core/src/main/scala/akka/kafka/scaladsl/SendProducer.scala b/core/src/main/scala/org/apache/pekko/kafka/scaladsl/SendProducer.scala
similarity index 78%
rename from core/src/main/scala/akka/kafka/scaladsl/SendProducer.scala
rename to core/src/main/scala/org/apache/pekko/kafka/scaladsl/SendProducer.scala
index 60aa0420..58cd7aba 100644
--- a/core/src/main/scala/akka/kafka/scaladsl/SendProducer.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/scaladsl/SendProducer.scala
@@ -3,13 +3,13 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
-import akka.Done
-import akka.actor.{ ActorSystem, ClassicActorSystemProvider }
-import akka.kafka.ProducerMessage._
-import akka.kafka.ProducerSettings
-import akka.util.JavaDurationConverters._
+import org.apache.pekko.Done
+import org.apache.pekko.actor.{ ActorSystem, ClassicActorSystemProvider }
+import org.apache.pekko.kafka.ProducerMessage._
+import org.apache.pekko.kafka.ProducerSettings
+import org.apache.pekko.util.JavaDurationConverters._
 import org.apache.kafka.clients.producer.{ Callback, ProducerRecord, RecordMetadata }
 
 import scala.concurrent.{ ExecutionContext, Future, Promise }
@@ -28,11 +28,11 @@ final class SendProducer[K, V] private (val settings: ProducerSettings[K, V], sy
    *
    * It publishes records to Kafka topics conditionally:
    *
-   * - [[akka.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and completes the future with [[akka.kafka.ProducerMessage.Result Result]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.Message Message]] publishes a single message to its topic, and completes the future with [[org.apache.pekko.kafka.ProducerMessage.Result Result]]
    *
-   * - [[akka.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and completes the future with [[akka.kafka.ProducerMessage.MultiResult MultiResult]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.MultiMessage MultiMessage]] publishes all messages in its `records` field, and completes the future with [[org.apache.pekko.kafka.ProducerMessage.MultiResult MultiResult]]
    *
-   * - [[akka.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, and completes the future with [[akka.kafka.ProducerMessage.PassThroughResult PassThroughResult]]
+   * - [[org.apache.pekko.kafka.ProducerMessage.PassThroughMessage PassThroughMessage]] does not publish anything, and completes the future with [[org.apache.pekko.kafka.ProducerMessage.PassThroughResult PassThroughResult]]
    *
    * The messages support passing through arbitrary data.
    */
diff --git a/core/src/main/scala/akka/kafka/scaladsl/Transactional.scala b/core/src/main/scala/org/apache/pekko/kafka/scaladsl/Transactional.scala
similarity index 90%
rename from core/src/main/scala/akka/kafka/scaladsl/Transactional.scala
rename to core/src/main/scala/org/apache/pekko/kafka/scaladsl/Transactional.scala
index 73c45d2b..526d519f 100644
--- a/core/src/main/scala/akka/kafka/scaladsl/Transactional.scala
+++ b/core/src/main/scala/org/apache/pekko/kafka/scaladsl/Transactional.scala
@@ -3,22 +3,22 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
-import akka.annotation.{ ApiMayChange, InternalApi }
-import akka.kafka.ConsumerMessage.{ PartitionOffset, TransactionalMessage }
-import akka.kafka.ProducerMessage._
-import akka.kafka.internal.{
+import org.apache.pekko.annotation.{ ApiMayChange, InternalApi }
+import org.apache.pekko.kafka.ConsumerMessage.{ PartitionOffset, TransactionalMessage }
+import org.apache.pekko.kafka.ProducerMessage._
+import org.apache.pekko.kafka.internal.{
   TransactionalProducerStage,
   TransactionalSource,
   TransactionalSourceWithOffsetContext,
   TransactionalSubSource
 }
-import akka.kafka.scaladsl.Consumer.Control
-import akka.kafka.{ AutoSubscription, ConsumerMessage, ConsumerSettings, ProducerSettings, Subscription }
-import akka.stream.ActorAttributes
-import akka.stream.scaladsl.{ Flow, FlowWithContext, Keep, Sink, Source, SourceWithContext }
-import akka.{ Done, NotUsed }
+import org.apache.pekko.kafka.scaladsl.Consumer.Control
+import org.apache.pekko.kafka.{ AutoSubscription, ConsumerMessage, ConsumerSettings, ProducerSettings, Subscription }
+import org.apache.pekko.stream.ActorAttributes
+import org.apache.pekko.stream.scaladsl.{ Flow, FlowWithContext, Keep, Sink, Source, SourceWithContext }
+import org.apache.pekko.{ Done, NotUsed }
 import org.apache.kafka.clients.consumer.ConsumerRecord
 import org.apache.kafka.common.TopicPartition
 
diff --git a/docs/src/main/paradox/atleastonce.md b/docs/src/main/paradox/atleastonce.md
index 904d8093..014a6d4c 100644
--- a/docs/src/main/paradox/atleastonce.md
+++ b/docs/src/main/paradox/atleastonce.md
@@ -45,7 +45,7 @@ Messages from committable sources should be processed in order, otherwise a larg
 
 Reordering would be acceptable if the original order was reconstituted before committing the offsets, but that is a fairly complex and possibly brittle process that we will not consider here.
 
-Using `mapAsync` is safe since it preserves the order of messages. That is in contrast to `mapAsyncUnordered` which would not be safe to use here. As indicated in the @extref[Akka Streams documentation](akka:/stream/stream-flows-and-basics.html#stream-ordering) almost all stages will preserve input ordering.
+Using `mapAsync` is safe since it preserves the order of messages. That is in contrast to `mapAsyncUnordered` which would not be safe to use here. As indicated in the @extref[Pekko Streams documentation](pekko:/stream/stream-flows-and-basics.html#stream-ordering) almost all stages will preserve input ordering.
 
 ### Using groupBy
 
diff --git a/docs/src/main/paradox/cluster-sharding.md b/docs/src/main/paradox/cluster-sharding.md
index 9623f3b7..da9b8c88 100644
--- a/docs/src/main/paradox/cluster-sharding.md
+++ b/docs/src/main/paradox/cluster-sharding.md
@@ -3,7 +3,7 @@ project.description: Alpakka Kafka provides a module to use Kafka with Akka Clus
 ---
 # Akka Cluster Sharding
 
-Akka Cluster allows the user to use an @extref[external shard allocation](akka:/typed/cluster-sharding.html#external-shard-allocation) strategy in order to give the user more control over how many shards are created and what cluster nodes they are assigned to. 
+Akka Cluster allows the user to use an @extref[external shard allocation](pekko:/typed/cluster-sharding.html#external-shard-allocation) strategy in order to give the user more control over how many shards are created and what cluster nodes they are assigned to. 
 If you consume Kafka messages into your Akka Cluster application then it's possible to run an Alpakka Kafka Consumer on each cluster node and co-locate Kafka partitions with Akka Cluster shards. 
 When partitions and shards are co-located together then there is less chance that a message must be transmitted over the network by the Akka Cluster Shard Coordinator to a destination user sharded entity.
 
@@ -30,8 +30,8 @@ This module contains an Akka extension called `KafkaClusterSharding` and depends
 
 There are two steps required to setup the cluster sharding module.
 
-* Initialize Akka Cluster Sharding with a @scaladoc[ShardingMessageExtractor](akka.cluster.sharding.typed.ShardingMessageExtractor) to route Kafka consumed messages to the correct Akka Cluster shard and user entity.
-* Use a provided Rebalance Listener in your @scaladoc[ConsumerSettings](akka.kafka.ConsumerSettings) to update the external shard allocation at runtime when Kafka Consumer Group rebalances occur.
+* Initialize Akka Cluster Sharding with a @scaladoc[ShardingMessageExtractor](org.apache.pekko.cluster.sharding.typed.ShardingMessageExtractor) to route Kafka consumed messages to the correct Akka Cluster shard and user entity.
+* Use a provided Rebalance Listener in your @scaladoc[ConsumerSettings](org.apache.pekko.kafka.ConsumerSettings) to update the external shard allocation at runtime when Kafka Consumer Group rebalances occur.
 
 @@@ note
 
@@ -42,8 +42,8 @@ It's a self-contained example that can run on a developer's laptop.
 
 ## Sharding Message Extractors
 
-To setup the @scaladoc[ShardingMessageExtractor](akka.cluster.sharding.typed.ShardingMessageExtractor) pick a factory method in the `KafkaClusterSharding` Akka extension that best fits your use case. 
-This module provides two kinds of extractors, extractors for entities that are within a @scaladoc[ShardingEnvelope](akka.cluster.sharding.typed.ShardingEnvelope) and without.  
+To setup the @scaladoc[ShardingMessageExtractor](org.apache.pekko.cluster.sharding.typed.ShardingMessageExtractor) pick a factory method in the `KafkaClusterSharding` Akka extension that best fits your use case. 
+This module provides two kinds of extractors, extractors for entities that are within a @scaladoc[ShardingEnvelope](org.apache.pekko.cluster.sharding.typed.ShardingEnvelope) and without.  
 They're called `messageExtractor` and `messageExtractorNoEnvelope` respectively.
 
 To route Kafka messages to the correct user entity we must use the same algorithm used to define the Kafka partition for the consumed message. 
@@ -52,7 +52,7 @@ The input to this algorithm is the entity key and the number of partitions used
 Therefore it's critical to use the same Kafka message key (sharded entity id) and number of Kafka topic partitions (shards). 
 The message extractors can optionally look up the number of shards given a topic name, or the user can provide the number of shards explicitly.
 
-To get the @scaladoc[ShardingMessageExtractor](akka.cluster.sharding.typed.ShardingMessageExtractor) call the `messageExtractor` overload that's suitable for your use case.  
+To get the @scaladoc[ShardingMessageExtractor](org.apache.pekko.cluster.sharding.typed.ShardingMessageExtractor) call the `messageExtractor` overload that's suitable for your use case.  
 In the following example we asynchronously request an extractor that does not use a sharding envelope and will use the same number of partitions as the given topic name.
 
 Given a user entity.
@@ -81,22 +81,22 @@ Java
 
 ## Rebalance Listener
 
-The Rebalance Listener is a pre-defined Actor that will handle @scaladoc[ConsumerRebalanceEvents](akka.kafka.ConsumerRebalanceEvent) that will update the Akka Cluster External Sharding strategy when subscribed partitions are re-assigned to consumers running on different cluster nodes. 
+The Rebalance Listener is a pre-defined Actor that will handle @scaladoc[ConsumerRebalanceEvents](org.apache.pekko.kafka.ConsumerRebalanceEvent) that will update the Akka Cluster External Sharding strategy when subscribed partitions are re-assigned to consumers running on different cluster nodes. 
 This makes sure that shards remain local to Kafka Consumers after a consumer group rebalance.
-The Rebalance Listener is returned as a Typed @scaladoc[ActorRef[ConsumerRebalanceEvent]](akka.actor.typed.ActorRef) and must be converted to a classic @scaladoc[ActorRef](akka.actor.ActorRef) before being passed to @scaladoc[ConsumerSettings](akka.kafka.ConsumerSettings).
+The Rebalance Listener is returned as a Typed @scaladoc[ActorRef[ConsumerRebalanceEvent]](org.apache.pekko.actor.typed.ActorRef) and must be converted to a classic @scaladoc[ActorRef](org.apache.pekko.actor.ActorRef) before being passed to @scaladoc[ConsumerSettings](org.apache.pekko.kafka.ConsumerSettings).
 
 @@@ note
 
-It's recommended to use the same value for both the Kafka Consumer Group ID and the @scaladoc[EntityTypeKey](akka.cluster.sharding.typed.scaladsl.EntityTypeKey).
-This allows you to create multiple Kafka Consumer Groups that consume the same type of messages from the same topic, but are routed to different @scaladoc[Behaviors](akka.actor.typed.Behavior) to be processed in a different way.
+It's recommended to use the same value for both the Kafka Consumer Group ID and the @scaladoc[EntityTypeKey](org.apache.pekko.cluster.sharding.typed.scaladsl.EntityTypeKey).
+This allows you to create multiple Kafka Consumer Groups that consume the same type of messages from the same topic, but are routed to different @scaladoc[Behaviors](org.apache.pekko.actor.typed.Behavior) to be processed in a different way.
 
 For example, a `user-events` topic is consumed by two consumer groups.
 One consumer group is used to maintain an up-to-date view of the user's profile and the other is used to represent an aggregate history of the types of user events.
-The same message type is used by separate Alpakka Kafka consumers, but the messages are routed to different Akka Cluster Sharding Coordinators that are setup to use separate @scaladoc[Behaviors](akka.actor.typed.Behavior).  
+The same message type is used by separate Alpakka Kafka consumers, but the messages are routed to different Akka Cluster Sharding Coordinators that are setup to use separate @scaladoc[Behaviors](org.apache.pekko.actor.typed.Behavior).  
 
 @@@ 
 
-Create the rebalance listener using the extension and pass it into an Alpakka Kafka @scaladoc[Subscription](akka.kafka.Subscription).
+Create the rebalance listener using the extension and pass it into an Alpakka Kafka @scaladoc[Subscription](org.apache.pekko.kafka.Subscription).
 
 Scala
 : @@snip [snip](/tests/src/test/scala/docs/scaladsl/ClusterShardingExample.scala) { #rebalance-listener }
diff --git a/docs/src/main/paradox/consumer-metadata.md b/docs/src/main/paradox/consumer-metadata.md
index 243ffe2b..f03b1805 100644
--- a/docs/src/main/paradox/consumer-metadata.md
+++ b/docs/src/main/paradox/consumer-metadata.md
@@ -5,9 +5,9 @@ project.description: Access Kafka consumer metadata by sending messages to the a
 
 ## Metadata Client
 
-`MetadataClient` is a thin wrapper for @apidoc[akka.kafka.KafkaConsumerActor$] hiding the ask calls and mapping to the correct response types.
+`MetadataClient` is a thin wrapper for @apidoc[org.apache.pekko.kafka.KafkaConsumerActor$] hiding the ask calls and mapping to the correct response types.
 
-To access the Kafka consumer metadata you need to create the @apidoc[akka.kafka.KafkaConsumerActor$] as described in the @ref[Consumer documentation](consumer.md#sharing-the-kafkaconsumer-instance) pass it to `MetadataClient`'s factory method `create`.
+To access the Kafka consumer metadata you need to create the @apidoc[org.apache.pekko.kafka.KafkaConsumerActor$] as described in the @ref[Consumer documentation](consumer.md#sharing-the-kafkaconsumer-instance) pass it to `MetadataClient`'s factory method `create`.
 
 Another approach to create metadata client is passing the `ConsumerSettings` and `ActorSystem` objects to the factory method. Then the metadata client manages the internal actor and stops it when the `close` method is called.
 
@@ -17,21 +17,21 @@ The metadata the `MetadataClient` provides is documented in the @javadoc[Kafka C
 
 The supported metadata are
 
-| Metadata | Response type |
-|-------| ------- |
-| Topics list | @scala[Future[Map[String, List[PartitionInfo]]]]@java[CompletionStage[java.util.Map[java.lang.String, java.util.List[PartitionInfo]]]] |
-| Partitions | @scala[Future[List[PartitionInfo]]]@java[CompletionStage[java.util.List[PartitionInfo]]] |
-| Beginning offsets | @scala[Future[Map[TopicPartition, Long]]]@java[CompletionStage[java.util.Map[TopicPartition, java.lang.Long]]] |
-| End offsets | @scala[Future[Map[TopicPartition, Long]]]@java[CompletionStage[java.util.Map[TopicPartition, java.lang.Long]]] |
-| Committed offsets | @scala[Future[Map[TopicPartition, OffsetAndMetadata]]]@java[CompletionStage[java.util.Map[TopicPartition, OffsetAndMetadata]]] |
+| Metadata          | Response type                                                                                                                          |
+|-------------------|----------------------------------------------------------------------------------------------------------------------------------------|
+| Topics list       | @scala[Future[Map[String, List[PartitionInfo]]]]@java[CompletionStage[java.util.Map[java.lang.String, java.util.List[PartitionInfo]]]] |
+| Partitions        | @scala[Future[List[PartitionInfo]]]@java[CompletionStage[java.util.List[PartitionInfo]]]                                               |
+| Beginning offsets | @scala[Future[Map[TopicPartition, Long]]]@java[CompletionStage[java.util.Map[TopicPartition, java.lang.Long]]]                         |
+| End offsets       | @scala[Future[Map[TopicPartition, Long]]]@java[CompletionStage[java.util.Map[TopicPartition, java.lang.Long]]]                         |
+| Committed offsets | @scala[Future[Map[TopicPartition, OffsetAndMetadata]]]@java[CompletionStage[java.util.Map[TopicPartition, OffsetAndMetadata]]]         |
    
 @@@ warning
 
-Processing of these requests blocks the actor loop. The @apidoc[akka.kafka.KafkaConsumerActor$] is configured to run on its own dispatcher, so just as the other remote calls to Kafka, the blocking happens within a designated thread pool.
+Processing of these requests blocks the actor loop. The @apidoc[org.apache.pekko.kafka.KafkaConsumerActor$] is configured to run on its own dispatcher, so just as the other remote calls to Kafka, the blocking happens within a designated thread pool.
 
 However, calling these during consuming might affect performance and even cause timeouts in extreme cases.
 
-Please consider to use a dedicated @apidoc[akka.kafka.KafkaConsumerActor$] to create metadata client requests against.
+Please consider to use a dedicated @apidoc[org.apache.pekko.kafka.KafkaConsumerActor$] to create metadata client requests against.
 
 @@@
 
@@ -46,19 +46,19 @@ Java
 
 ## Accessing metadata using KafkaConsumerActor
 
-To access the Kafka consumer metadata you need to create the @apidoc[akka.kafka.KafkaConsumerActor$] as described in the @ref[Consumer documentation](consumer.md#sharing-the-kafkaconsumer-instance) and send messages from @apidoc[Metadata$] to it.
+To access the Kafka consumer metadata you need to create the @apidoc[org.apache.pekko.kafka.KafkaConsumerActor$] as described in the @ref[Consumer documentation](consumer.md#sharing-the-kafkaconsumer-instance) and send messages from @apidoc[Metadata$] to it.
 
 ## Supported metadata by KafkaConsumerActor
 
 The supported metadata are
 
-| Request | Reply | 
-|---------|-------|
-| ListTopics | Topics | 
-| GetPartitionsFor | PartitionsFor |
+| Request             | Reply            | 
+|---------------------|------------------|
+| ListTopics          | Topics           | 
+| GetPartitionsFor    | PartitionsFor    |
 | GetBeginningOffsets | BeginningOffsets |
-| GetEndOffsets | EndOffsets |
-| GetOffsetsForTimes | OffsetsForTimes |
+| GetEndOffsets       | EndOffsets       |
+| GetOffsetsForTimes  | OffsetsForTimes  |
 | GetCommittedOffsets | CommittedOffsets |
 
 These requests are blocking within the Kafka client library up to a timeout configured by `metadata-request-timeout` or `ConsumerSettings.withMetadataRequestTimeout` respectively.
diff --git a/docs/src/main/paradox/consumer-rebalance.md b/docs/src/main/paradox/consumer-rebalance.md
index ed9e40f8..20a1f90d 100644
--- a/docs/src/main/paradox/consumer-rebalance.md
+++ b/docs/src/main/paradox/consumer-rebalance.md
@@ -54,8 +54,8 @@ from consuming from specific topic partitions. Two kinds of messages will be sen
 * @apidoc[TopicPartitionsAssigned]
 * @apidoc[TopicPartitionsRevoked]
 
-You can use a typed @apidoc[akka.actor.typed.ActorRef] to implement your rebalance event listener by converting it into a classic actor ref.
-See the example below and read the @extref[Coexistence](akka:/typed/coexisting.html) page of the Akka Documentation for more details on Akka Classic and Typed interoperability.
+You can use a typed @apidoc[org.apache.pekko.actor.typed.ActorRef] to implement your rebalance event listener by converting it into a classic actor ref.
+See the example below and read the @extref[Coexistence](pekko:/typed/coexisting.html) page of the Akka Documentation for more details on Akka Classic and Typed interoperability.
 
 Scala
 : @@ snip [snip](/tests/src/test/scala/docs/scaladsl/ConsumerExample.scala) { #withTypedRebalanceListenerActor }
diff --git a/docs/src/main/paradox/consumer.md b/docs/src/main/paradox/consumer.md
index 08bbc906..dc0ac306 100644
--- a/docs/src/main/paradox/consumer.md
+++ b/docs/src/main/paradox/consumer.md
@@ -16,29 +16,29 @@ Alpakka Kafka offers a large variety of consumers that connect to Kafka and stre
 
 These factory methods are part of the @apidoc[Consumer$] API.
 
-| Offsets handling                        | Partition aware | Subscription        | Shared consumer | Factory method | Stream element type |
-|-----------------------------------------|-----------------|---------------------|-----------------|----------------|---------------------|
-| No (auto commit can be enabled)         | No              | Topic or Partition  | No              | `plainSource` | `ConsumerRecord` |
-| No (auto commit can be enabled)         | No              | Partition           | Yes             | `plainExternalSource` | `ConsumerRecord` |
-| Explicit committing                     | No              | Topic or Partition  | No              | `committableSource` | `CommittableMessage` |
-| Explicit committing                     | No              | Partition           | Yes             | `committableExternalSource` | `CommittableMessage` |
-| Explicit committing with metadata       | No              | Topic or Partition  | No              | `commitWithMetadataSource` | `CommittableMessage` |
-| Explicit committing (with metadata)     | No              | Topic or Partition  | No              | `sourceWithOffsetContext` | `ConsumerRecord` |
-| Offset committed per element            | No              | Topic or Partition  | No              | `atMostOnceSource` | `ConsumerRecord` |
-| No (auto commit can be enabled)         | Yes             | Topic or Partition  | No              | `plainPartitionedSource` | `(TopicPartition, Source[ConsumerRecord, ..])` |
-| External to Kafka                       | Yes             | Topic or Partition  | No              | `plainPartitionedManualOffsetSource` | `(TopicPartition, Source[ConsumerRecord, ..])` |
-| Explicit committing                     | Yes             | Topic or Partition  | No              | `committablePartitionedSource` | `(TopicPartition, Source[CommittableMessage, ..])`     |
-| External to Kafka & Explicit Committing | Yes             | Topic or Partition  | No              | `committablePartitionedManualOffsetSource` | `(TopicPartition, Source[CommittableMessage, ..])` |
-| Explicit committing with metadata       | Yes             | Topic or Partition  | No              | `commitWithMetadataPartitionedSource` | `(TopicPartition, Source[CommittableMessage, ..])`  |
+| Offsets handling                        | Partition aware | Subscription       | Shared consumer | Factory method                             | Stream element type                                |
+|-----------------------------------------|-----------------|--------------------|-----------------|--------------------------------------------|----------------------------------------------------|
+| No (auto commit can be enabled)         | No              | Topic or Partition | No              | `plainSource`                              | `ConsumerRecord`                                   |
+| No (auto commit can be enabled)         | No              | Partition          | Yes             | `plainExternalSource`                      | `ConsumerRecord`                                   |
+| Explicit committing                     | No              | Topic or Partition | No              | `committableSource`                        | `CommittableMessage`                               |
+| Explicit committing                     | No              | Partition          | Yes             | `committableExternalSource`                | `CommittableMessage`                               |
+| Explicit committing with metadata       | No              | Topic or Partition | No              | `commitWithMetadataSource`                 | `CommittableMessage`                               |
+| Explicit committing (with metadata)     | No              | Topic or Partition | No              | `sourceWithOffsetContext`                  | `ConsumerRecord`                                   |
+| Offset committed per element            | No              | Topic or Partition | No              | `atMostOnceSource`                         | `ConsumerRecord`                                   |
+| No (auto commit can be enabled)         | Yes             | Topic or Partition | No              | `plainPartitionedSource`                   | `(TopicPartition, Source[ConsumerRecord, ..])`     |
+| External to Kafka                       | Yes             | Topic or Partition | No              | `plainPartitionedManualOffsetSource`       | `(TopicPartition, Source[ConsumerRecord, ..])`     |
+| Explicit committing                     | Yes             | Topic or Partition | No              | `committablePartitionedSource`             | `(TopicPartition, Source[CommittableMessage, ..])` |
+| External to Kafka & Explicit Committing | Yes             | Topic or Partition | No              | `committablePartitionedManualOffsetSource` | `(TopicPartition, Source[CommittableMessage, ..])` |
+| Explicit committing with metadata       | Yes             | Topic or Partition | No              | `commitWithMetadataPartitionedSource`      | `(TopicPartition, Source[CommittableMessage, ..])` |
 
 ### Transactional consumers
 
 These factory methods are part of the @apidoc[Transactional$]. For details see @ref[Transactions](transactions.md).
 
-| Offsets handling                  | Partition aware | Shared consumer | Factory method | Stream element type |
-|-----------------------------------|-----------------|-----------------|----------------|---------------------|
-| Transactional                     | No              | No              | `Transactional.source` | `TransactionalMessage` |
-| Transactional                     | No              | No              | `Transactional.sourceWithOffsetContext` | `ConsumerRecord` |
+| Offsets handling | Partition aware | Shared consumer | Factory method                          | Stream element type    |
+|------------------|-----------------|-----------------|-----------------------------------------|------------------------|
+| Transactional    | No              | No              | `Transactional.source`                  | `TransactionalMessage` |
+| Transactional    | No              | No              | `Transactional.sourceWithOffsetContext` | `ConsumerRecord`       |
 
 
 ## Settings
@@ -228,7 +228,7 @@ How to achieve at-least-once delivery semantics is covered in @ref:[At-Least-Onc
 
 For cases when you need to read messages from one topic, transform or enrich them, and then write to another topic you can use @apidoc[Consumer.committableSource](Consumer$) and connect it to a @apidoc[Producer.committableSink](Producer$). The `committableSink` will commit the offset back to the consumer regularly.
 
-The `committableSink` accepts implementations @apidoc[ProducerMessage.Envelope] that contain the offset to commit the consumption of the originating message (of type @apidoc[akka.kafka.ConsumerMessage.Committable]). See @ref[Producing messages](producer.md#producing-messages) about different implementations of @apidoc[ProducerMessage.Envelope].
+The `committableSink` accepts implementations @apidoc[ProducerMessage.Envelope] that contain the offset to commit the consumption of the originating message (of type @apidoc[org.apache.pekko.kafka.ConsumerMessage.Committable]). See @ref[Producing messages](producer.md#producing-messages) about different implementations of @apidoc[ProducerMessage.Envelope].
 
 Scala
 : @@ snip [snip](/tests/src/test/scala/docs/scaladsl/ConsumerExample.scala) { #consumerToProducerSink }
@@ -268,11 +268,11 @@ Java
 ## Sharing the KafkaConsumer instance
 
 If you have many streams it can be more efficient to share the underlying @javadoc[KafkaConsumer](org.apache.kafka.clients.consumer.KafkaConsumer) instance. 
-It is shared by creating a @apidoc[akka.kafka.KafkaConsumerActor$]. 
+It is shared by creating a @apidoc[org.apache.pekko.kafka.KafkaConsumerActor$]. 
 You need to create the actor and stop it by sending `KafkaConsumerActor.Stop` when it is not needed any longer. 
-You pass the classic @apidoc[akka.actor.ActorRef] as a parameter to the @apidoc[Consumer](Consumer$) factory methods.
+You pass the classic @apidoc[org.apache.pekko.actor.ActorRef] as a parameter to the @apidoc[Consumer](Consumer$) factory methods.
 
-When using a typed @apidoc[akka.actor.typed.ActorSystem] you can create the @apidoc[akka.kafka.KafkaConsumerActor$] by using the Akka typed adapter to create a classic @apidoc[akka.actor.ActorRef].
+When using a typed @apidoc[org.apache.pekko.actor.typed.ActorSystem] you can create the @apidoc[org.apache.pekko.kafka.KafkaConsumerActor$] by using the Akka typed adapter to create a classic @apidoc[org.apache.pekko.actor.ActorRef].
 Then you can carry on using the existing Alpakka Kafka API.
 
 Scala
@@ -281,7 +281,7 @@ Scala
 Java
 : @@ snip [snip](/tests/src/test/java/docs/javadsl/ConsumerExampleTest.java) { #consumerActorTyped }
 
-Using the @apidoc[akka.kafka.KafkaConsumerActor$].
+Using the @apidoc[org.apache.pekko.kafka.KafkaConsumerActor$].
 
 Scala
 : @@ snip [snip](/tests/src/test/scala/docs/scaladsl/PartitionExamples.scala) { #consumerActor }
@@ -307,7 +307,7 @@ Accessing of Kafka consumer metadata is possible as described in @ref[Consumer M
 
 
 ## Controlled shutdown
-The @apidoc[Source] created with @apidoc[Consumer.plainSource](Consumer$) and similar methods materializes to a @apidoc[akka.kafka.(javadsl|scaladsl).Consumer.Control] instance. This can be used to stop the stream in a controlled manner.
+The @apidoc[Source] created with @apidoc[Consumer.plainSource](Consumer$) and similar methods materializes to a @apidoc[org.apache.pekko.kafka.(javadsl|scaladsl).Consumer.Control] instance. This can be used to stop the stream in a controlled manner.
 
 When using external offset storage, a call to `Consumer.Control.shutdown()` suffices to complete the `Source`, which starts the completion of the stream.
 
diff --git a/docs/src/main/paradox/debugging.md b/docs/src/main/paradox/debugging.md
index 033482d6..f6d10e5f 100644
--- a/docs/src/main/paradox/debugging.md
+++ b/docs/src/main/paradox/debugging.md
@@ -19,7 +19,7 @@ The Kafka client library used by the Alpakka Kafka connector uses SLF4J, as well
   version2=1.2.3
 }
 
-To enable Akka SLF4J logging, configure Akka in `application.conf` as below. Refer to the @extref[Akka documentation](akka:logging.html#slf4j) for details.
+To enable Akka SLF4J logging, configure Akka in `application.conf` as below. Refer to the @extref[Pekko documentation](pekko:logging.html#slf4j) for details.
 
 ```hocon
 akka {
diff --git a/docs/src/main/paradox/discovery.md b/docs/src/main/paradox/discovery.md
index 8e3212b9..b62a5f71 100644
--- a/docs/src/main/paradox/discovery.md
+++ b/docs/src/main/paradox/discovery.md
@@ -3,9 +3,9 @@ project.description: Akka Discovery can be used to achieve Kafka broker discover
 ---
 # Service discovery
 
-By using @extref:[Akka Discovery](akka:discovery/index.html) Alpakka Kafka may read the Kafka bootstrap server addresses from any Akka Discovery-compatible service discovery mechanism.
+By using @extref:[Pekko Discovery](pekko:discovery/index.html) Alpakka Kafka may read the Kafka bootstrap server addresses from any Akka Discovery-compatible service discovery mechanism.
 
-Akka Discovery supports Configuration (HOCON), DNS (SRV records), and aggregation of multiple discovery methods out-of-the-box. Kubernetes API, AWS API: EC2 Tag-Based Discovery, AWS API: ECS Discovery and Consul implementations for Akka Discovery are available in @extref:[Akka Management](akka-management:).
+Akka Discovery supports Configuration (HOCON), DNS (SRV records), and aggregation of multiple discovery methods out-of-the-box. Kubernetes API, AWS API: EC2 Tag-Based Discovery, AWS API: ECS Discovery and Consul implementations for Akka Discovery are available in @extref:[Pekko Management](pekko-management:).
 
 ## Dependency
 
@@ -37,7 +37,7 @@ application.conf
 Mount the @apidoc[DiscoverySupport$] in your consumer settings:
 
 Scala
-: @@snip [snip](/tests/src/test/scala/akka/kafka/ConsumerSettingsSpec.scala) { #discovery-settings }
+: @@snip [snip](/tests/src/test/scala/org/apache/pekko/kafka/ConsumerSettingsSpec.scala) { #discovery-settings }
 
 Java
 : @@snip [conf](/tests/src/test/java/docs/javadsl/ConsumerSettingsTest.java) { #discovery-settings }
@@ -61,7 +61,7 @@ application.conf
 Mount the @apidoc[DiscoverySupport$] in your producer settings:
 
 Scala
-: @@snip [conf](/tests/src/test/scala/akka/kafka/ProducerSettingsSpec.scala) { #discovery-settings }
+: @@snip [conf](/tests/src/test/scala/org/apache/pekko/kafka/ProducerSettingsSpec.scala) { #discovery-settings }
 
 Java
 : @@snip [conf](/tests/src/test/java/docs/javadsl/ProducerSettingsTest.java) { #discovery-settings }
@@ -103,7 +103,7 @@ application.conf
 
 ## Use Config (HOCON) to describe the bootstrap servers
 
-The setup below uses the built-in Akka Discovery implementation reading from Config (HOCON) files. That might be a good choice for development and testing. You may use the @extref:[Aggregate implementation](akka:discovery/index.html#discovery-method-aggregate-multiple-discovery-methods) to first use another discovery technology, before falling back to the config file.
+The setup below uses the built-in Akka Discovery implementation reading from Config (HOCON) files. That might be a good choice for development and testing. You may use the @extref:[Aggregate implementation](pekko:discovery/index.html#discovery-method-aggregate-multiple-discovery-methods) to first use another discovery technology, before falling back to the config file.
 
 application.conf
-:   @@snip [conf](/tests/src/test/scala/akka/kafka/ConsumerSettingsSpec.scala) { #discovery-with-config }
+:   @@snip [conf](/tests/src/test/scala/org/apache/pekko/kafka/ConsumerSettingsSpec.scala) { #discovery-with-config }
diff --git a/docs/src/main/paradox/errorhandling.md b/docs/src/main/paradox/errorhandling.md
index 937ba6f8..6a40829e 100644
--- a/docs/src/main/paradox/errorhandling.md
+++ b/docs/src/main/paradox/errorhandling.md
@@ -18,7 +18,7 @@ Retry handling for producers is built-in into Kafka. In case of failure when sen
 
 ## Restarting the stream with a backoff stage
 
-Akka streams @extref[provides graph stages](akka:stream/stream-error.html#delayed-restarts-with-a-backoff-stage)
+Akka streams @extref[provides graph stages](pekko:stream/stream-error.html#delayed-restarts-with-a-backoff-stage)
 to gracefully restart a stream on failure, with a configurable backoff. This can be taken advantage of to restart a failing stream and its consumer with an exponential backoff, by wrapping it in a `RestartSource`.
 
 Scala
diff --git a/docs/src/main/paradox/home.md b/docs/src/main/paradox/home.md
index c36825dd..5c1770a1 100644
--- a/docs/src/main/paradox/home.md
+++ b/docs/src/main/paradox/home.md
@@ -1,6 +1,6 @@
 # Overview
 
-The [Alpakka project](https://doc.akka.io/docs/alpakka/current/) is an open source initiative to implement stream-aware and reactive integration pipelines for Java and Scala. It is built on top of @extref[Akka Streams](akka:stream/index.html), and has been designed from the ground up to understand streaming natively and provide a DSL for reactive and stream-oriented programming, with built-in support for backpressure. Akka Streams is a [Reactive Streams](https://www.reactive-streams.org/ [...]
+The [Apache Pekko Kafka connector project](https://pekko.apache.org/docs/pekko-connectors-kafka/current/) is an open source initiative to implement stream-aware and reactive integration pipelines for Java and Scala. It is built on top of @extref[Pekko Streams](pekko:stream/index.html), and has been designed from the ground up to understand streaming natively and provide a DSL for reactive and stream-oriented programming, with built-in support for backpressure. Akka Streams is a [Reactive [...]
 
 This **Alpakka Kafka connector** lets you connect [Apache Kafka](https://kafka.apache.org/) to Akka Streams. It was formerly known as **Akka Streams Kafka** and even **Reactive Kafka**.
 
@@ -53,15 +53,15 @@ Check even Confluent's [Versions and Interoperability](https://docs.confluent.io
 
 This connector depends on Akka 2.6.x and note that it is important that all `akka-*` dependencies are in the same version, so it is recommended to depend on them explicitly to avoid problems with transient dependencies causing an unlucky mix of versions.
 
-Alpakka Kafka APIs accept a typed @apidoc[akka.actor.typed.ActorSystem] or a classic @apidoc[akka.actor.ActorSystem] because both implement the @apidoc[akka.actor.ClassicActorSystemProvider] @scala[trait]@java[interface].
-There are some Alpakka Kafka APIs that only accept classic a @apidoc[akka.actor.ActorRef], such as the @ref[rebalance listener](./consumer-rebalance.md) API, but otherwise there is no difference between running Alpakka Kafka and any other Akka Streams implementation with a typed @apidoc[akka.actor.typed.ActorSystem]. 
-For more information on Akka classic and typed interoperability read the @extref[Coexistence](akka:/typed/coexisting.html) page of the Akka Documentation.
+Alpakka Kafka APIs accept a typed @apidoc[org.apache.pekko.actor.typed.ActorSystem] or a classic @apidoc[org.apache.pekko.actor.ActorSystem] because both implement the @apidoc[org.apache.pekko.actor.ClassicActorSystemProvider] @scala[trait]@java[interface].
+There are some Alpakka Kafka APIs that only accept classic a @apidoc[org.apache.pekko.actor.ActorRef], such as the @ref[rebalance listener](./consumer-rebalance.md) API, but otherwise there is no difference between running Alpakka Kafka and any other Akka Streams implementation with a typed @apidoc[org.apache.pekko.actor.typed.ActorSystem]. 
+For more information on Akka classic and typed interoperability read the @extref[Coexistence](pekko:/typed/coexisting.html) page of the Akka Documentation.
 
 The table below shows Alpakka Kafka's direct dependencies and the second tab shows all libraries it depends on transitively.
 
 @@dependencies { projectId="core" }
 
-* Akka Streams $akka.version$ @extref[documentation](akka:stream/index.html) and [sources](https://github.com/akka/akka)
+* Akka Streams $akka.version$ @extref[documentation](pekko:stream/index.html) and [sources](https://github.com/akka/akka)
 * Apache Kafka client $kafka.version$ @extref[documentation](kafka:/documentation#index) and [sources](https://github.com/apache/kafka)
 
 
diff --git a/docs/src/main/paradox/index.md b/docs/src/main/paradox/index.md
index 69f5ca87..53d76333 100644
--- a/docs/src/main/paradox/index.md
+++ b/docs/src/main/paradox/index.md
@@ -1,6 +1,6 @@
 # Alpakka Kafka Documentation
 
-The [Alpakka project](https://doc.akka.io/docs/alpakka/current/) is an open source initiative to implement stream-aware and reactive integration pipelines for Java and Scala. It is built on top of @extref[Akka Streams](akka:stream/index.html), and has been designed from the ground up to understand streaming natively and provide a DSL for reactive and stream-oriented programming, with built-in support for backpressure. Akka Streams is a [Reactive Streams](https://www.reactive-streams.org/ [...]
+The [Alpakka project](https://doc.akka.io/docs/alpakka/current/) is an open source initiative to implement stream-aware and reactive integration pipelines for Java and Scala. It is built on top of @extref[Pekko Streams](pekko:stream/index.html), and has been designed from the ground up to understand streaming natively and provide a DSL for reactive and stream-oriented programming, with built-in support for backpressure. Akka Streams is a [Reactive Streams](https://www.reactive-streams.or [...]
 
 This **Alpakka Kafka connector** lets you connect [Apache Kafka](https://kafka.apache.org/) to Akka Streams. It was formerly known as **Akka Streams Kafka** and even **Reactive Kafka**.
 
diff --git a/docs/src/main/paradox/producer.md b/docs/src/main/paradox/producer.md
index b5cbb7c3..bac2f852 100644
--- a/docs/src/main/paradox/producer.md
+++ b/docs/src/main/paradox/producer.md
@@ -63,7 +63,7 @@ Java
 
 In addition to programmatic construction of the @apidoc[ProducerSettings$] it can also be created from configuration (`application.conf`). 
 
-When creating @apidoc[ProducerSettings$] with a classic @apidoc[akka.actor.ActorSystem] or typed @apidoc[akka.actor.typed.ActorSystem] it uses the config section `akka.kafka.producer`. 
+When creating @apidoc[ProducerSettings$] with a classic @apidoc[org.apache.pekko.actor.ActorSystem] or typed @apidoc[org.apache.pekko.actor.typed.ActorSystem] it uses the config section `akka.kafka.producer`. 
 The format of these settings files are described in the [Typesafe Config Documentation](https://github.com/lightbend/config#using-hocon-the-json-superset).
 
 @@ snip [snip](/core/src/main/resources/reference.conf) { #producer-settings }
@@ -94,7 +94,7 @@ Java
 
 ## Producing messages
 
-Sinks and flows accept implementations of @apidoc[ProducerMessage.Envelope] as input. They contain an extra field to pass through data, the so called `passThrough`. Its value is passed through the flow and becomes available in the @apidoc[akka.kafka.ProducerMessage.Results]' `passThrough()`. It can for example hold a @apidoc[akka.kafka.ConsumerMessage.CommittableOffset] or @apidoc[ConsumerMessage.CommittableOffsetBatch] from a @apidoc[Consumer.committableSource](Consumer$) that can be co [...]
+Sinks and flows accept implementations of @apidoc[ProducerMessage.Envelope] as input. They contain an extra field to pass through data, the so called `passThrough`. Its value is passed through the flow and becomes available in the @apidoc[org.apache.pekko.kafka.ProducerMessage.Results]' `passThrough()`. It can for example hold a @apidoc[org.apache.pekko.kafka.ConsumerMessage.CommittableOffset] or @apidoc[ConsumerMessage.CommittableOffsetBatch] from a @apidoc[Consumer.committableSource](C [...]
 
 
 ### Produce a single message to Kafka
@@ -108,7 +108,7 @@ Java
 : @@ snip [snip](/tests/src/test/java/docs/javadsl/ProducerTest.java) { #singleMessage }
 
 
-For flows the @apidoc[ProducerMessage.Message]s continue as @apidoc[akka.kafka.ProducerMessage.Result] elements containing: 
+For flows the @apidoc[ProducerMessage.Message]s continue as @apidoc[org.apache.pekko.kafka.ProducerMessage.Result] elements containing: 
  
  1. the original input message,
  1. the record metadata (Kafka @javadoc[RecordMetadata](org.apache.kafka.clients.producer.RecordMetadata) API), and
@@ -125,7 +125,7 @@ Scala
 Java
 : @@ snip [snip](/tests/src/test/java/docs/javadsl/ProducerTest.java) { #multiMessage }
 
-For flows the @apidoc[ProducerMessage.MultiMessage]s continue as @apidoc[akka.kafka.ProducerMessage.MultiResult] elements containing: 
+For flows the @apidoc[ProducerMessage.MultiMessage]s continue as @apidoc[org.apache.pekko.kafka.ProducerMessage.MultiResult] elements containing: 
  
  1. a list of @apidoc[ProducerMessage.MultiResultPart] with
     1. the original input message,
@@ -163,7 +163,7 @@ Java
 
 ## Connecting a Producer to a Consumer
 
-The `passThrough` can for example hold a @apidoc[akka.kafka.ConsumerMessage.Committable] that can be committed after publishing to Kafka. 
+The `passThrough` can for example hold a @apidoc[org.apache.pekko.kafka.ConsumerMessage.Committable] that can be committed after publishing to Kafka. 
 
 Scala
 : @@ snip [snip](/tests/src/test/scala/docs/scaladsl/ConsumerExample.scala) { #consumerToProducerSink }
diff --git a/docs/src/main/paradox/send-producer.md b/docs/src/main/paradox/send-producer.md
index de3bcf42..fdf0653c 100644
--- a/docs/src/main/paradox/send-producer.md
+++ b/docs/src/main/paradox/send-producer.md
@@ -41,13 +41,13 @@ Scala
 Java
 : @@ snip [snip](/tests/src/test/java/docs/javadsl/SendProducerTest.java) { #multiMessage }
 
-After successful sending, a @apidoc[ProducerMessage.Message] will return a @apidoc[akka.kafka.ProducerMessage.Result] element containing:
+After successful sending, a @apidoc[ProducerMessage.Message] will return a @apidoc[org.apache.pekko.kafka.ProducerMessage.Result] element containing:
 
  1. the original input message,
  1. the record metadata (Kafka @javadoc[RecordMetadata](org.apache.kafka.clients.producer.RecordMetadata) API), and
  1. access to the `passThrough` within the message.
 
-A @apidoc[ProducerMessage.MultiMessage] will return a @apidoc[akka.kafka.ProducerMessage.MultiResult] containing:
+A @apidoc[ProducerMessage.MultiMessage] will return a @apidoc[org.apache.pekko.kafka.ProducerMessage.MultiResult] containing:
 
  1. a list of @apidoc[ProducerMessage.MultiResultPart] with
     1. the original input message,
diff --git a/docs/src/main/paradox/testing-testcontainers.md b/docs/src/main/paradox/testing-testcontainers.md
index 5b65111c..d09f397e 100644
--- a/docs/src/main/paradox/testing-testcontainers.md
+++ b/docs/src/main/paradox/testing-testcontainers.md
@@ -27,16 +27,16 @@ By applying settings in code you can also configure the Kafka and ZooKeeper cont
 For example, the following demonstrates creating a 3 Broker Kafka cluster and disables the automatic topic creation broker configuration using environment variables.
 
 Scala
-: @@snip [snip](/tests/src/test/scala/akka/kafka/scaladsl/SpecBase.scala) { #testkit #testcontainers-settings }
+: @@snip [snip](/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/SpecBase.scala) { #testkit #testcontainers-settings }
 
 Java
 : @@snip [snip](/tests/src/test/java/docs/javadsl/TestkitTestcontainersTest.java) { #testcontainers-settings }
 
 <!-- NOTE: Can't get this working with paradox..
-To see what options are available for configuring testcontainers using `configureKafka` and `configureZooKeeper` in @apidoc[KafkaTestkitTestcontainersSettings] see the API docs for @apidoc[AlpakkaKafkaContainer] and @apidoc[org.testcontainers.containers.GenericContainer]. 
+To see what options are available for configuring testcontainers using `configureKafka` and `configureZooKeeper` in @apidoc[KafkaTestkitTestcontainersSettings] see the API docs for @apidoc[PekkoConnectorsKafkaContainer] and @apidoc[org.testcontainers.containers.GenericContainer]. 
 -->
 
-To see what options are available for configuring testcontainers using `configureKafka` and `configureZooKeeper` in @apidoc[KafkaTestkitTestcontainersSettings] see the API docs for @apidoc[AlpakkaKafkaContainer] and [`GenericContainer`](https://www.javadoc.io/static/org.testcontainers/testcontainers/$testcontainers.version$/org/testcontainers/containers/GenericContainer.html).
+To see what options are available for configuring testcontainers using `configureKafka` and `configureZooKeeper` in @apidoc[KafkaTestkitTestcontainersSettings] see the API docs for @apidoc[PekkoConnectorsKafkaContainer] and [`GenericContainer`](https://www.javadoc.io/static/org.testcontainers/testcontainers/$testcontainers.version$/org/testcontainers/containers/GenericContainer.html).
 
 ### Testing with Schema Registry
 
@@ -88,13 +88,13 @@ To ensure proper shutdown of all stages in every test, wrap your test code in @a
 
 ### One cluster for all tests
 
-By mixing in @scaladoc[TestcontainersKafkaLike](akka.kafka.testkit.scaladsl.TestcontainersKafkaLike) the Kafka Docker cluster will be started before the first test and shut down after all tests are finished.
+By mixing in @scaladoc[TestcontainersKafkaLike](org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike) the Kafka Docker cluster will be started before the first test and shut down after all tests are finished.
 
 Scala
-: @@snip [snip](/tests/src/test/scala/akka/kafka/scaladsl/SpecBase.scala) { #testkit #testcontainers}
+: @@snip [snip](/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/SpecBase.scala) { #testkit #testcontainers}
 
 With this `TestcontainersSampleSpec` class test classes can extend it to automatically start and stop a Kafka broker to test with.
 
 ### One cluster per test class
 
-By mixing in @scaladoc[TestcontainersKafkaPerClassLike](akka.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike) a specific Kafka Docker cluster will be started for that test class and stopped after its run finished.
+By mixing in @scaladoc[TestcontainersKafkaPerClassLike](org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike) a specific Kafka Docker cluster will be started for that test class and stopped after its run finished.
diff --git a/docs/src/main/paradox/transactions.md b/docs/src/main/paradox/transactions.md
index acb01669..dd428e19 100644
--- a/docs/src/main/paradox/transactions.md
+++ b/docs/src/main/paradox/transactions.md
@@ -93,7 +93,7 @@ Java
 
 When any stage in the stream fails the whole stream will be torn down.  In the general case it's desirable to allow transient errors to fail the whole stream because they cannot be recovered from within the application.  Transient errors can be caused by network partitions, Kafka broker failures, @javadoc[ProducerFencedException](org.apache.kafka.common.errors.ProducerFencedException)'s from other application instances, and so on.  When the stream encounters transient errors then the cur [...]
 
-For transient errors we can choose to rely on the Kafka producer's configuration to retry, or we can handle it ourselves at the Akka Streams or Application layer.  Using the @extref[RestartSource](akka:/stream/stream-error.html#delayed-restarts-with-a-backoff-stage) we can backoff connection attempts so that we don't hammer the Kafka cluster in a tight loop.
+For transient errors we can choose to rely on the Kafka producer's configuration to retry, or we can handle it ourselves at the Akka Streams or Application layer.  Using the @extref[RestartSource](pekko:/stream/stream-error.html#delayed-restarts-with-a-backoff-stage) we can backoff connection attempts so that we don't hammer the Kafka cluster in a tight loop.
 
 Scala
 : @@ snip [snip](/tests/src/test/scala/docs/scaladsl/TransactionsExample.scala) { #transactionalFailureRetry }
diff --git a/project/VersionGenerator.scala b/project/VersionGenerator.scala
index ca5cbe76..c8abf39e 100644
--- a/project/VersionGenerator.scala
+++ b/project/VersionGenerator.scala
@@ -15,7 +15,7 @@ object VersionGenerator {
       sourceGenerators += generateVersion(
         sourceManaged,
         _ / "akka" / "kafka" / "Version.scala",
-        """|package akka.kafka
+        """|package org.apache.pekko.kafka
          |
          |object Version {
          |  val current: String = "%s"
diff --git a/testkit/src/main/java/akka/kafka/testkit/javadsl/KafkaTest.java b/testkit/src/main/java/org/apache/pekko/kafka/testkit/KafkaTest.java
similarity index 82%
rename from testkit/src/main/java/akka/kafka/testkit/javadsl/KafkaTest.java
rename to testkit/src/main/java/org/apache/pekko/kafka/testkit/KafkaTest.java
index 1f5e1dca..92f3df6b 100644
--- a/testkit/src/main/java/akka/kafka/testkit/javadsl/KafkaTest.java
+++ b/testkit/src/main/java/org/apache/pekko/kafka/testkit/KafkaTest.java
@@ -3,12 +3,13 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit.javadsl;
+package org.apache.pekko.kafka.testkit;
 
-import akka.actor.ActorSystem;
-import akka.actor.ClassicActorSystemProvider;
-import akka.stream.Materializer;
-import akka.stream.testkit.javadsl.StreamTestKit;
+import org.apache.pekko.actor.ActorSystem;
+import org.apache.pekko.actor.ClassicActorSystemProvider;
+import org.apache.pekko.stream.Materializer;
+import org.apache.pekko.stream.testkit.javadsl.StreamTestKit;
+import org.apache.pekko.kafka.testkit.javadsl.BaseKafkaTest;
 import org.junit.jupiter.api.AfterAll;
 import org.junit.jupiter.api.AfterEach;
 import org.junit.jupiter.api.BeforeAll;
diff --git a/testkit/src/main/java/akka/kafka/testkit/javadsl/TestcontainersKafkaJunit4Test.java b/testkit/src/main/java/org/apache/pekko/kafka/testkit/TestcontainersKafkaJunit4Test.java
similarity index 82%
rename from testkit/src/main/java/akka/kafka/testkit/javadsl/TestcontainersKafkaJunit4Test.java
rename to testkit/src/main/java/org/apache/pekko/kafka/testkit/TestcontainersKafkaJunit4Test.java
index 6106ee16..d51ac382 100644
--- a/testkit/src/main/java/akka/kafka/testkit/javadsl/TestcontainersKafkaJunit4Test.java
+++ b/testkit/src/main/java/org/apache/pekko/kafka/testkit/TestcontainersKafkaJunit4Test.java
@@ -3,13 +3,14 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit.javadsl;
+package org.apache.pekko.kafka.testkit;
 
-import akka.actor.ActorSystem;
-import akka.actor.ClassicActorSystemProvider;
-import akka.kafka.testkit.KafkaTestkitTestcontainersSettings;
-import akka.kafka.testkit.internal.TestcontainersKafka;
-import akka.stream.Materializer;
+import org.apache.pekko.actor.ActorSystem;
+import org.apache.pekko.actor.ClassicActorSystemProvider;
+import org.apache.pekko.kafka.testkit.KafkaTestkitTestcontainersSettings;
+import org.apache.pekko.kafka.testkit.internal.TestcontainersKafka;
+import org.apache.pekko.stream.Materializer;
+import org.apache.pekko.kafka.testkit.javadsl.KafkaJunit4Test;
 import org.junit.After;
 import org.junit.Before;
 
diff --git a/testkit/src/main/java/akka/kafka/testkit/javadsl/TestcontainersKafkaTest.java b/testkit/src/main/java/org/apache/pekko/kafka/testkit/TestcontainersKafkaTest.java
similarity index 85%
rename from testkit/src/main/java/akka/kafka/testkit/javadsl/TestcontainersKafkaTest.java
rename to testkit/src/main/java/org/apache/pekko/kafka/testkit/TestcontainersKafkaTest.java
index 4226eb2a..d14e869d 100644
--- a/testkit/src/main/java/akka/kafka/testkit/javadsl/TestcontainersKafkaTest.java
+++ b/testkit/src/main/java/org/apache/pekko/kafka/testkit/TestcontainersKafkaTest.java
@@ -3,13 +3,13 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit.javadsl;
+package org.apache.pekko.kafka.testkit;
 
-import akka.actor.ActorSystem;
-import akka.actor.ClassicActorSystemProvider;
-import akka.kafka.testkit.KafkaTestkitTestcontainersSettings;
-import akka.kafka.testkit.internal.TestcontainersKafka;
-import akka.stream.Materializer;
+import org.apache.pekko.actor.ActorSystem;
+import org.apache.pekko.actor.ClassicActorSystemProvider;
+import org.apache.pekko.kafka.testkit.KafkaTestkitTestcontainersSettings;
+import org.apache.pekko.kafka.testkit.internal.TestcontainersKafka;
+import org.apache.pekko.stream.Materializer;
 
 /**
  * JUnit 5 base class using [[https://www.testcontainers.org/ Testcontainers]] to start a Kafka
diff --git a/testkit/src/main/java/akka/kafka/testkit/internal/KafkaContainerCluster.java b/testkit/src/main/java/org/apache/pekko/kafka/testkit/internal/KafkaContainerCluster.java
similarity index 90%
rename from testkit/src/main/java/akka/kafka/testkit/internal/KafkaContainerCluster.java
rename to testkit/src/main/java/org/apache/pekko/kafka/testkit/internal/KafkaContainerCluster.java
index f2e10d5e..2763a122 100644
--- a/testkit/src/main/java/akka/kafka/testkit/internal/KafkaContainerCluster.java
+++ b/testkit/src/main/java/org/apache/pekko/kafka/testkit/internal/KafkaContainerCluster.java
@@ -1,378 +1,381 @@
-/*
- * Copyright (C) 2014 - 2016 Softwaremill <https://softwaremill.com>
- * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
- */
-
-package akka.kafka.testkit.internal;
-
-import akka.annotation.InternalApi;
-import org.rnorth.ducttape.unreliables.Unreliables;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.testcontainers.containers.Container;
-import org.testcontainers.containers.GenericContainer;
-import org.testcontainers.containers.Network;
-import org.testcontainers.containers.output.Slf4jLogConsumer;
-import org.testcontainers.images.builder.Transferable;
-import org.testcontainers.lifecycle.Startable;
-import org.testcontainers.lifecycle.Startables;
-import org.testcontainers.utility.DockerImageName;
-
-import java.nio.charset.StandardCharsets;
-import java.time.Duration;
-import java.util.Collection;
-import java.util.Optional;
-import java.util.concurrent.Callable;
-import java.util.concurrent.TimeUnit;
-import java.util.stream.Collectors;
-import java.util.stream.IntStream;
-import java.util.stream.Stream;
-
-import static java.util.concurrent.TimeUnit.SECONDS;
-
-/** Provides an easy way to launch a Kafka cluster with multiple brokers. */
-@InternalApi
-public class KafkaContainerCluster implements Startable {
-
-  public static final DockerImageName DEFAULT_ZOOKEEPER_IMAGE_NAME =
-      AlpakkaKafkaContainer.DEFAULT_ZOOKEEPER_IMAGE_NAME;
-  public static final DockerImageName DEFAULT_KAFKA_IMAGE_NAME =
-      AlpakkaKafkaContainer.DEFAULT_KAFKA_IMAGE_NAME;
-  public static final DockerImageName DEFAULT_SCHEMA_REGISTRY_IMAGE_NAME =
-      SchemaRegistryContainer.DEFAULT_SCHEMA_REGISTRY_IMAGE_NAME;
-  public static final Duration DEFAULT_CLUSTER_START_TIMEOUT = Duration.ofSeconds(360);
-  public static final Duration DEFAULT_READINESS_CHECK_TIMEOUT = DEFAULT_CLUSTER_START_TIMEOUT;
-
-  private static final String LOGGING_NAMESPACE_PREFIX = "akka.kafka.testkit.testcontainers.logs";
-  private static final String READINESS_CHECK_SCRIPT = "/testcontainers_readiness_check.sh";
-  private static final String READINESS_CHECK_TOPIC = "ready-kafka-container-cluster";
-  private static final Version BOOTSTRAP_PARAM_MIN_VERSION = new Version("5.2.0");
-
-  private final Logger log = LoggerFactory.getLogger(getClass());
-  private final Version kafkaImageTag;
-  private final int brokersNum;
-  private final Boolean useSchemaRegistry;
-  private final Boolean containerLogging;
-  private final Duration clusterStartTimeout;
-  private final Duration readinessCheckTimeout;
-  private final Network network;
-  private final GenericContainer zookeeper;
-  private final Collection<AlpakkaKafkaContainer> brokers;
-  private DockerImageName schemaRegistryImage;
-  private Optional<SchemaRegistryContainer> schemaRegistry = Optional.empty();
-
-  public KafkaContainerCluster(int brokersNum, int internalTopicsRf) {
-    this(
-        DEFAULT_ZOOKEEPER_IMAGE_NAME,
-        DEFAULT_KAFKA_IMAGE_NAME,
-        DEFAULT_SCHEMA_REGISTRY_IMAGE_NAME,
-        brokersNum,
-        internalTopicsRf,
-        false,
-        false,
-        DEFAULT_CLUSTER_START_TIMEOUT,
-        DEFAULT_READINESS_CHECK_TIMEOUT);
-  }
-
-  public KafkaContainerCluster(
-      DockerImageName zooKeeperImage,
-      DockerImageName kafkaImage,
-      DockerImageName schemaRegistryImage,
-      int brokersNum,
-      int internalTopicsRf,
-      boolean useSchemaRegistry,
-      boolean containerLogging,
-      Duration clusterStartTimeout,
-      Duration readinessCheckTimeout) {
-    if (brokersNum < 0) {
-      throw new IllegalArgumentException("brokersNum '" + brokersNum + "' must be greater than 0");
-    }
-    if (internalTopicsRf < 0 || internalTopicsRf > brokersNum) {
-      throw new IllegalArgumentException(
-          "internalTopicsRf '"
-              + internalTopicsRf
-              + "' must be less than brokersNum and greater than 0");
-    }
-
-    this.kafkaImageTag = new Version(kafkaImage.getVersionPart());
-    this.brokersNum = brokersNum;
-    this.useSchemaRegistry = useSchemaRegistry;
-    this.containerLogging = containerLogging;
-    this.clusterStartTimeout = clusterStartTimeout;
-    this.readinessCheckTimeout = readinessCheckTimeout;
-    this.network = Network.newNetwork();
-    this.schemaRegistryImage = schemaRegistryImage;
-
-    this.zookeeper =
-        new GenericContainer(zooKeeperImage)
-            .withNetwork(network)
-            .withNetworkAliases("zookeeper")
-            .withEnv("ZOOKEEPER_CLIENT_PORT", String.valueOf(AlpakkaKafkaContainer.ZOOKEEPER_PORT));
-
-    this.brokers =
-        IntStream.range(0, this.brokersNum)
-            .mapToObj(
-                brokerNum ->
-                    new AlpakkaKafkaContainer(kafkaImage)
-                        .withNetwork(this.network)
-                        .withBrokerNum(brokerNum)
-                        .withRemoteJmxService()
-                        .dependsOn(this.zookeeper)
-                        .withExternalZookeeper("zookeeper:" + AlpakkaKafkaContainer.ZOOKEEPER_PORT)
-                        .withEnv("KAFKA_BROKER_ID", brokerNum + "")
-                        .withEnv("KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR", internalTopicsRf + "")
-                        .withEnv("KAFKA_OFFSETS_TOPIC_NUM_PARTITIONS", internalTopicsRf + "")
-                        .withEnv(
-                            "KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR", internalTopicsRf + "")
-                        .withEnv("KAFKA_TRANSACTION_STATE_LOG_MIN_ISR", internalTopicsRf + ""))
-            .collect(Collectors.toList());
-
-    if (useSchemaRegistry) {
-      this.schemaRegistry =
-          Optional.of(
-              new SchemaRegistryContainer(this.schemaRegistryImage)
-                  .withNetworkAliases("schema-registry")
-                  .withCluster(this));
-    } else {
-      this.schemaRegistry = Optional.empty();
-    }
-  }
-
-  public Network getNetwork() {
-    return this.network;
-  }
-
-  public GenericContainer getZooKeeper() {
-    return this.zookeeper;
-  }
-
-  public Optional<SchemaRegistryContainer> getSchemaRegistry() {
-    return this.schemaRegistry;
-  }
-
-  public Collection<AlpakkaKafkaContainer> getBrokers() {
-    return this.brokers;
-  }
-
-  public String getBootstrapServers() {
-    return brokers.stream()
-        .map(AlpakkaKafkaContainer::getBootstrapServers)
-        .collect(Collectors.joining(","));
-  }
-
-  public String getInternalNetworkBootstrapServers() {
-    return IntStream.range(0, this.brokersNum)
-        .mapToObj(brokerNum -> String.format("broker-%s:%s", brokerNum, "9092"))
-        .collect(Collectors.joining(","));
-  }
-
-  /** for backwards compatibility with Java 8 */
-  private <T> Stream<T> optionalStream(Optional<T> option) {
-    if (option.isPresent()) return Stream.of(option.get());
-    else return Stream.empty();
-  }
-
-  private Stream<GenericContainer> allContainers() {
-    return Stream.concat(
-        Stream.concat(this.brokers.stream(), Stream.of(this.zookeeper)),
-        optionalStream(this.schemaRegistry));
-  }
-
-  @Override
-  public void start() {
-    try {
-      configureContainerLogging();
-      Stream<Startable> startables = this.brokers.stream().map(Startable.class::cast);
-      Startables.deepStart(startables).get(clusterStartTimeout.getSeconds(), SECONDS);
-
-      this.brokers.stream()
-          .findFirst()
-          .ifPresent(
-              broker -> {
-                broker.copyFileToContainer(
-                    Transferable.of(readinessCheckScript().getBytes(StandardCharsets.UTF_8), 0777),
-                    READINESS_CHECK_SCRIPT);
-              });
-
-      waitForClusterFormation();
-
-      // start schema registry if the container is initialized
-      Startables.deepStart(optionalStream(this.schemaRegistry))
-          .get(clusterStartTimeout.getSeconds(), SECONDS);
-
-    } catch (Exception ex) {
-      throw new RuntimeException(ex);
-    }
-  }
-
-  private void configureContainerLogging() {
-    if (containerLogging) {
-      log.debug("Testcontainer logging enabled");
-      this.brokers.forEach(
-          broker ->
-              setContainerLogger(
-                  LOGGING_NAMESPACE_PREFIX + ".broker.broker-" + broker.getBrokerNum(), broker));
-      setContainerLogger(LOGGING_NAMESPACE_PREFIX + ".zookeeper", this.zookeeper);
-      this.schemaRegistry.ifPresent(
-          container -> setContainerLogger(LOGGING_NAMESPACE_PREFIX + ".schemaregistry", container));
-    }
-  }
-
-  private void setContainerLogger(String loggerName, GenericContainer<?> container) {
-    Logger logger = LoggerFactory.getLogger(loggerName);
-    Slf4jLogConsumer logConsumer = new Slf4jLogConsumer(logger);
-    container.withLogConsumer(logConsumer);
-  }
-
-  private void waitForClusterFormation() {
-    // assert that cluster has formed
-    runReadinessCheck(
-        "Readiness check (1/2). ZooKeeper state updated.",
-        () -> {
-          Container.ExecResult result =
-              this.zookeeper.execInContainer(
-                  "sh",
-                  "-c",
-                  "zookeeper-shell zookeeper:"
-                      + AlpakkaKafkaContainer.ZOOKEEPER_PORT
-                      + " ls /brokers/ids | tail -n 1");
-          String brokers = result.getStdout();
-          return brokers != null && brokers.split(",").length == this.brokersNum;
-        });
-
-    runReadinessCheck(
-        "Readiness check (2/2). Run producer consumer with acks=all.",
-        () -> this.brokers.stream().findFirst().map(this::runReadinessCheck).orElse(false));
-  }
-
-  public void stopKafka() {
-    this.brokers.forEach(AlpakkaKafkaContainer::stopKafka);
-  }
-
-  public void startKafka() {
-    this.brokers.forEach(AlpakkaKafkaContainer::startKafka);
-    waitForClusterFormation();
-  }
-
-  private void runReadinessCheck(String logLine, Callable<Boolean> fn) {
-    try {
-      log.debug("Start: {}", logLine);
-      Unreliables.retryUntilTrue((int) readinessCheckTimeout.getSeconds(), TimeUnit.SECONDS, fn);
-    } catch (Throwable t) {
-      log.error("Failed: {}", logLine);
-      throw t;
-    }
-    log.debug("Passed: {}", logLine);
-  }
-
-  private String readinessCheckScript() {
-    String connect = kafkaTopicConnectParam();
-    String command = "#!/bin/bash \n";
-    command += "set -e \n";
-    command +=
-        "[[ $(kafka-topics "
-            + connect
-            + " --describe --topic "
-            + READINESS_CHECK_TOPIC
-            + " | wc -l) > 1 ]] && "
-            + "kafka-topics "
-            + connect
-            + " --delete --topic "
-            + READINESS_CHECK_TOPIC
-            + " \n";
-    command +=
-        "kafka-topics "
-            + connect
-            + " --topic "
-            + READINESS_CHECK_TOPIC
-            + " --create --partitions "
-            + this.brokersNum
-            + " --replication-factor "
-            + this.brokersNum
-            + " --config min.insync.replicas="
-            + this.brokersNum
-            + " \n";
-    command += "MESSAGE=\"`date -u`\" \n";
-    command +=
-        "echo \"$MESSAGE\" | kafka-console-producer --broker-list localhost:9092 --topic "
-            + READINESS_CHECK_TOPIC
-            + " --producer-property acks=all \n";
-    command +=
-        "kafka-console-consumer --bootstrap-server localhost:9092 --topic "
-            + READINESS_CHECK_TOPIC
-            + " --from-beginning --timeout-ms 2000 --max-messages 1 | grep \"$MESSAGE\" \n";
-    command += "kafka-topics " + connect + " --delete --topic " + READINESS_CHECK_TOPIC + " \n";
-    command += "echo \"test succeeded\" \n";
-    return command;
-  }
-
-  private String kafkaTopicConnectParam() {
-    if (this.kafkaImageTag.compareTo(BOOTSTRAP_PARAM_MIN_VERSION) >= 0) {
-      return "--bootstrap-server localhost:9092";
-    } else {
-      return "--zookeeper zookeeper:" + AlpakkaKafkaContainer.ZOOKEEPER_PORT;
-    }
-  }
-
-  private Boolean runReadinessCheck(GenericContainer c) {
-    try {
-      Container.ExecResult result = c.execInContainer("sh", "-c", READINESS_CHECK_SCRIPT);
-
-      if (result.getExitCode() != 0 || !result.getStdout().contains("test succeeded")) {
-        log.debug(
-            "Readiness check returned errors:\nSTDOUT:\n{}\nSTDERR\n{}",
-            result.getStdout(),
-            result.getStderr());
-        return false;
-      }
-      return true;
-    } catch (Exception ex) {
-      throw new RuntimeException(ex);
-    }
-  }
-
-  @Override
-  public void stop() {
-    allContainers().parallel().forEach(GenericContainer::stop);
-  }
-}
-
-@InternalApi
-class Version implements Comparable<Version> {
-
-  private String version;
-
-  public final String get() {
-    return this.version;
-  }
-
-  public Version(String version) {
-    if (version == null) throw new IllegalArgumentException("Version can not be null");
-    if (!version.matches("[0-9]+(\\.[0-9]+)*"))
-      throw new IllegalArgumentException("Invalid version format");
-    this.version = version;
-  }
-
-  @Override
-  public int compareTo(Version that) {
-    if (that == null) return 1;
-    String[] thisParts = this.get().split("\\.");
-    String[] thatParts = that.get().split("\\.");
-    int length = Math.max(thisParts.length, thatParts.length);
-    for (int i = 0; i < length; i++) {
-      int thisPart = i < thisParts.length ? Integer.parseInt(thisParts[i]) : 0;
-      int thatPart = i < thatParts.length ? Integer.parseInt(thatParts[i]) : 0;
-      if (thisPart < thatPart) return -1;
-      if (thisPart > thatPart) return 1;
-    }
-    return 0;
-  }
-
-  @Override
-  public boolean equals(Object that) {
-    if (this == that) return true;
-    if (that == null) return false;
-    if (this.getClass() != that.getClass()) return false;
-    return this.compareTo((Version) that) == 0;
-  }
-}
+/*
+ * Copyright (C) 2014 - 2016 Softwaremill <https://softwaremill.com>
+ * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
+ */
+
+package org.apache.pekko.kafka.testkit.internal;
+
+import org.apache.pekko.annotation.InternalApi;
+import org.rnorth.ducttape.unreliables.Unreliables;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.testcontainers.containers.Container;
+import org.testcontainers.containers.GenericContainer;
+import org.testcontainers.containers.Network;
+import org.testcontainers.containers.output.Slf4jLogConsumer;
+import org.testcontainers.images.builder.Transferable;
+import org.testcontainers.lifecycle.Startable;
+import org.testcontainers.lifecycle.Startables;
+import org.testcontainers.utility.DockerImageName;
+
+import java.nio.charset.StandardCharsets;
+import java.time.Duration;
+import java.util.Collection;
+import java.util.Optional;
+import java.util.concurrent.Callable;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+import java.util.stream.Stream;
+
+import static java.util.concurrent.TimeUnit.SECONDS;
+
+/** Provides an easy way to launch a Kafka cluster with multiple brokers. */
+@InternalApi
+public class KafkaContainerCluster implements Startable {
+
+  public static final DockerImageName DEFAULT_ZOOKEEPER_IMAGE_NAME =
+      PekkoConnectorsKafkaContainer.DEFAULT_ZOOKEEPER_IMAGE_NAME;
+  public static final DockerImageName DEFAULT_KAFKA_IMAGE_NAME =
+      PekkoConnectorsKafkaContainer.DEFAULT_KAFKA_IMAGE_NAME;
+  public static final DockerImageName DEFAULT_SCHEMA_REGISTRY_IMAGE_NAME =
+      SchemaRegistryContainer.DEFAULT_SCHEMA_REGISTRY_IMAGE_NAME;
+  public static final Duration DEFAULT_CLUSTER_START_TIMEOUT = Duration.ofSeconds(360);
+  public static final Duration DEFAULT_READINESS_CHECK_TIMEOUT = DEFAULT_CLUSTER_START_TIMEOUT;
+
+  private static final String LOGGING_NAMESPACE_PREFIX = "akka.kafka.testkit.testcontainers.logs";
+  private static final String READINESS_CHECK_SCRIPT = "/testcontainers_readiness_check.sh";
+  private static final String READINESS_CHECK_TOPIC = "ready-kafka-container-cluster";
+  private static final Version BOOTSTRAP_PARAM_MIN_VERSION = new Version("5.2.0");
+
+  private final Logger log = LoggerFactory.getLogger(getClass());
+  private final Version kafkaImageTag;
+  private final int brokersNum;
+  private final Boolean useSchemaRegistry;
+  private final Boolean containerLogging;
+  private final Duration clusterStartTimeout;
+  private final Duration readinessCheckTimeout;
+  private final Network network;
+  private final GenericContainer zookeeper;
+  private final Collection<PekkoConnectorsKafkaContainer> brokers;
+  private DockerImageName schemaRegistryImage;
+  private Optional<SchemaRegistryContainer> schemaRegistry = Optional.empty();
+
+  public KafkaContainerCluster(int brokersNum, int internalTopicsRf) {
+    this(
+        DEFAULT_ZOOKEEPER_IMAGE_NAME,
+        DEFAULT_KAFKA_IMAGE_NAME,
+        DEFAULT_SCHEMA_REGISTRY_IMAGE_NAME,
+        brokersNum,
+        internalTopicsRf,
+        false,
+        false,
+        DEFAULT_CLUSTER_START_TIMEOUT,
+        DEFAULT_READINESS_CHECK_TIMEOUT);
+  }
+
+  public KafkaContainerCluster(
+      DockerImageName zooKeeperImage,
+      DockerImageName kafkaImage,
+      DockerImageName schemaRegistryImage,
+      int brokersNum,
+      int internalTopicsRf,
+      boolean useSchemaRegistry,
+      boolean containerLogging,
+      Duration clusterStartTimeout,
+      Duration readinessCheckTimeout) {
+    if (brokersNum < 0) {
+      throw new IllegalArgumentException("brokersNum '" + brokersNum + "' must be greater than 0");
+    }
+    if (internalTopicsRf < 0 || internalTopicsRf > brokersNum) {
+      throw new IllegalArgumentException(
+          "internalTopicsRf '"
+              + internalTopicsRf
+              + "' must be less than brokersNum and greater than 0");
+    }
+
+    this.kafkaImageTag = new Version(kafkaImage.getVersionPart());
+    this.brokersNum = brokersNum;
+    this.useSchemaRegistry = useSchemaRegistry;
+    this.containerLogging = containerLogging;
+    this.clusterStartTimeout = clusterStartTimeout;
+    this.readinessCheckTimeout = readinessCheckTimeout;
+    this.network = Network.newNetwork();
+    this.schemaRegistryImage = schemaRegistryImage;
+
+    this.zookeeper =
+        new GenericContainer(zooKeeperImage)
+            .withNetwork(network)
+            .withNetworkAliases("zookeeper")
+            .withEnv(
+                "ZOOKEEPER_CLIENT_PORT",
+                String.valueOf(PekkoConnectorsKafkaContainer.ZOOKEEPER_PORT));
+
+    this.brokers =
+        IntStream.range(0, this.brokersNum)
+            .mapToObj(
+                brokerNum ->
+                    new PekkoConnectorsKafkaContainer(kafkaImage)
+                        .withNetwork(this.network)
+                        .withBrokerNum(brokerNum)
+                        .withRemoteJmxService()
+                        .dependsOn(this.zookeeper)
+                        .withExternalZookeeper(
+                            "zookeeper:" + PekkoConnectorsKafkaContainer.ZOOKEEPER_PORT)
+                        .withEnv("KAFKA_BROKER_ID", brokerNum + "")
+                        .withEnv("KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR", internalTopicsRf + "")
+                        .withEnv("KAFKA_OFFSETS_TOPIC_NUM_PARTITIONS", internalTopicsRf + "")
+                        .withEnv(
+                            "KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR", internalTopicsRf + "")
+                        .withEnv("KAFKA_TRANSACTION_STATE_LOG_MIN_ISR", internalTopicsRf + ""))
+            .collect(Collectors.toList());
+
+    if (useSchemaRegistry) {
+      this.schemaRegistry =
+          Optional.of(
+              new SchemaRegistryContainer(this.schemaRegistryImage)
+                  .withNetworkAliases("schema-registry")
+                  .withCluster(this));
+    } else {
+      this.schemaRegistry = Optional.empty();
+    }
+  }
+
+  public Network getNetwork() {
+    return this.network;
+  }
+
+  public GenericContainer getZooKeeper() {
+    return this.zookeeper;
+  }
+
+  public Optional<SchemaRegistryContainer> getSchemaRegistry() {
+    return this.schemaRegistry;
+  }
+
+  public Collection<PekkoConnectorsKafkaContainer> getBrokers() {
+    return this.brokers;
+  }
+
+  public String getBootstrapServers() {
+    return brokers.stream()
+        .map(PekkoConnectorsKafkaContainer::getBootstrapServers)
+        .collect(Collectors.joining(","));
+  }
+
+  public String getInternalNetworkBootstrapServers() {
+    return IntStream.range(0, this.brokersNum)
+        .mapToObj(brokerNum -> String.format("broker-%s:%s", brokerNum, "9092"))
+        .collect(Collectors.joining(","));
+  }
+
+  /** for backwards compatibility with Java 8 */
+  private <T> Stream<T> optionalStream(Optional<T> option) {
+    if (option.isPresent()) return Stream.of(option.get());
+    else return Stream.empty();
+  }
+
+  private Stream<GenericContainer> allContainers() {
+    return Stream.concat(
+        Stream.concat(this.brokers.stream(), Stream.of(this.zookeeper)),
+        optionalStream(this.schemaRegistry));
+  }
+
+  @Override
+  public void start() {
+    try {
+      configureContainerLogging();
+      Stream<Startable> startables = this.brokers.stream().map(Startable.class::cast);
+      Startables.deepStart(startables).get(clusterStartTimeout.getSeconds(), SECONDS);
+
+      this.brokers.stream()
+          .findFirst()
+          .ifPresent(
+              broker -> {
+                broker.copyFileToContainer(
+                    Transferable.of(readinessCheckScript().getBytes(StandardCharsets.UTF_8), 0777),
+                    READINESS_CHECK_SCRIPT);
+              });
+
+      waitForClusterFormation();
+
+      // start schema registry if the container is initialized
+      Startables.deepStart(optionalStream(this.schemaRegistry))
+          .get(clusterStartTimeout.getSeconds(), SECONDS);
+
+    } catch (Exception ex) {
+      throw new RuntimeException(ex);
+    }
+  }
+
+  private void configureContainerLogging() {
+    if (containerLogging) {
+      log.debug("Testcontainer logging enabled");
+      this.brokers.forEach(
+          broker ->
+              setContainerLogger(
+                  LOGGING_NAMESPACE_PREFIX + ".broker.broker-" + broker.getBrokerNum(), broker));
+      setContainerLogger(LOGGING_NAMESPACE_PREFIX + ".zookeeper", this.zookeeper);
+      this.schemaRegistry.ifPresent(
+          container -> setContainerLogger(LOGGING_NAMESPACE_PREFIX + ".schemaregistry", container));
+    }
+  }
+
+  private void setContainerLogger(String loggerName, GenericContainer<?> container) {
+    Logger logger = LoggerFactory.getLogger(loggerName);
+    Slf4jLogConsumer logConsumer = new Slf4jLogConsumer(logger);
+    container.withLogConsumer(logConsumer);
+  }
+
+  private void waitForClusterFormation() {
+    // assert that cluster has formed
+    runReadinessCheck(
+        "Readiness check (1/2). ZooKeeper state updated.",
+        () -> {
+          Container.ExecResult result =
+              this.zookeeper.execInContainer(
+                  "sh",
+                  "-c",
+                  "zookeeper-shell zookeeper:"
+                      + PekkoConnectorsKafkaContainer.ZOOKEEPER_PORT
+                      + " ls /brokers/ids | tail -n 1");
+          String brokers = result.getStdout();
+          return brokers != null && brokers.split(",").length == this.brokersNum;
+        });
+
+    runReadinessCheck(
+        "Readiness check (2/2). Run producer consumer with acks=all.",
+        () -> this.brokers.stream().findFirst().map(this::runReadinessCheck).orElse(false));
+  }
+
+  public void stopKafka() {
+    this.brokers.forEach(PekkoConnectorsKafkaContainer::stopKafka);
+  }
+
+  public void startKafka() {
+    this.brokers.forEach(PekkoConnectorsKafkaContainer::startKafka);
+    waitForClusterFormation();
+  }
+
+  private void runReadinessCheck(String logLine, Callable<Boolean> fn) {
+    try {
+      log.debug("Start: {}", logLine);
+      Unreliables.retryUntilTrue((int) readinessCheckTimeout.getSeconds(), TimeUnit.SECONDS, fn);
+    } catch (Throwable t) {
+      log.error("Failed: {}", logLine);
+      throw t;
+    }
+    log.debug("Passed: {}", logLine);
+  }
+
+  private String readinessCheckScript() {
+    String connect = kafkaTopicConnectParam();
+    String command = "#!/bin/bash \n";
+    command += "set -e \n";
+    command +=
+        "[[ $(kafka-topics "
+            + connect
+            + " --describe --topic "
+            + READINESS_CHECK_TOPIC
+            + " | wc -l) > 1 ]] && "
+            + "kafka-topics "
+            + connect
+            + " --delete --topic "
+            + READINESS_CHECK_TOPIC
+            + " \n";
+    command +=
+        "kafka-topics "
+            + connect
+            + " --topic "
+            + READINESS_CHECK_TOPIC
+            + " --create --partitions "
+            + this.brokersNum
+            + " --replication-factor "
+            + this.brokersNum
+            + " --config min.insync.replicas="
+            + this.brokersNum
+            + " \n";
+    command += "MESSAGE=\"`date -u`\" \n";
+    command +=
+        "echo \"$MESSAGE\" | kafka-console-producer --broker-list localhost:9092 --topic "
+            + READINESS_CHECK_TOPIC
+            + " --producer-property acks=all \n";
+    command +=
+        "kafka-console-consumer --bootstrap-server localhost:9092 --topic "
+            + READINESS_CHECK_TOPIC
+            + " --from-beginning --timeout-ms 2000 --max-messages 1 | grep \"$MESSAGE\" \n";
+    command += "kafka-topics " + connect + " --delete --topic " + READINESS_CHECK_TOPIC + " \n";
+    command += "echo \"test succeeded\" \n";
+    return command;
+  }
+
+  private String kafkaTopicConnectParam() {
+    if (this.kafkaImageTag.compareTo(BOOTSTRAP_PARAM_MIN_VERSION) >= 0) {
+      return "--bootstrap-server localhost:9092";
+    } else {
+      return "--zookeeper zookeeper:" + PekkoConnectorsKafkaContainer.ZOOKEEPER_PORT;
+    }
+  }
+
+  private Boolean runReadinessCheck(GenericContainer c) {
+    try {
+      Container.ExecResult result = c.execInContainer("sh", "-c", READINESS_CHECK_SCRIPT);
+
+      if (result.getExitCode() != 0 || !result.getStdout().contains("test succeeded")) {
+        log.debug(
+            "Readiness check returned errors:\nSTDOUT:\n{}\nSTDERR\n{}",
+            result.getStdout(),
+            result.getStderr());
+        return false;
+      }
+      return true;
+    } catch (Exception ex) {
+      throw new RuntimeException(ex);
+    }
+  }
+
+  @Override
+  public void stop() {
+    allContainers().parallel().forEach(GenericContainer::stop);
+  }
+}
+
+@InternalApi
+class Version implements Comparable<Version> {
+
+  private String version;
+
+  public final String get() {
+    return this.version;
+  }
+
+  public Version(String version) {
+    if (version == null) throw new IllegalArgumentException("Version can not be null");
+    if (!version.matches("[0-9]+(\\.[0-9]+)*"))
+      throw new IllegalArgumentException("Invalid version format");
+    this.version = version;
+  }
+
+  @Override
+  public int compareTo(Version that) {
+    if (that == null) return 1;
+    String[] thisParts = this.get().split("\\.");
+    String[] thatParts = that.get().split("\\.");
+    int length = Math.max(thisParts.length, thatParts.length);
+    for (int i = 0; i < length; i++) {
+      int thisPart = i < thisParts.length ? Integer.parseInt(thisParts[i]) : 0;
+      int thatPart = i < thatParts.length ? Integer.parseInt(thatParts[i]) : 0;
+      if (thisPart < thatPart) return -1;
+      if (thisPart > thatPart) return 1;
+    }
+    return 0;
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (this == that) return true;
+    if (that == null) return false;
+    if (this.getClass() != that.getClass()) return false;
+    return this.compareTo((Version) that) == 0;
+  }
+}
diff --git a/testkit/src/main/java/akka/kafka/testkit/internal/AlpakkaKafkaContainer.java b/testkit/src/main/java/org/apache/pekko/kafka/testkit/internal/PekkoConnectorsKafkaContainer.java
similarity index 92%
rename from testkit/src/main/java/akka/kafka/testkit/internal/AlpakkaKafkaContainer.java
rename to testkit/src/main/java/org/apache/pekko/kafka/testkit/internal/PekkoConnectorsKafkaContainer.java
index f8bccddf..67198617 100644
--- a/testkit/src/main/java/akka/kafka/testkit/internal/AlpakkaKafkaContainer.java
+++ b/testkit/src/main/java/org/apache/pekko/kafka/testkit/internal/PekkoConnectorsKafkaContainer.java
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit.internal;
+package org.apache.pekko.kafka.testkit.internal;
 
-import akka.annotation.InternalApi;
+import org.apache.pekko.annotation.InternalApi;
 import com.github.dockerjava.api.command.InspectContainerResponse;
 import com.github.dockerjava.api.model.ContainerNetwork;
 import org.testcontainers.containers.GenericContainer;
@@ -25,7 +25,7 @@ import java.util.stream.Stream;
  * needed
  */
 @InternalApi
-public class AlpakkaKafkaContainer extends GenericContainer<AlpakkaKafkaContainer> {
+public class PekkoConnectorsKafkaContainer extends GenericContainer<PekkoConnectorsKafkaContainer> {
 
   private static final String START_STOP_SCRIPT = "/testcontainers_start_stop_wrapper.sh";
 
@@ -60,11 +60,11 @@ public class AlpakkaKafkaContainer extends GenericContainer<AlpakkaKafkaContaine
 
   private boolean enableRemoteJmxService = false;
 
-  public AlpakkaKafkaContainer() {
+  public PekkoConnectorsKafkaContainer() {
     this(DEFAULT_KAFKA_IMAGE_NAME);
   }
 
-  public AlpakkaKafkaContainer(final DockerImageName dockerImageName) {
+  public PekkoConnectorsKafkaContainer(final DockerImageName dockerImageName) {
     super(dockerImageName);
 
     super.withNetwork(Network.SHARED);
@@ -88,12 +88,12 @@ public class AlpakkaKafkaContainer extends GenericContainer<AlpakkaKafkaContaine
   }
 
   @Override
-  public AlpakkaKafkaContainer withNetwork(Network network) {
+  public PekkoConnectorsKafkaContainer withNetwork(Network network) {
     useImplicitNetwork = false;
     return super.withNetwork(network);
   }
 
-  public AlpakkaKafkaContainer withBrokerNum(int brokerNum) {
+  public PekkoConnectorsKafkaContainer withBrokerNum(int brokerNum) {
     if (brokerNum != this.brokerNum) {
       this.brokerNum = brokerNum;
       return super.withNetworkAliases("broker-" + this.brokerNum)
@@ -138,17 +138,17 @@ public class AlpakkaKafkaContainer extends GenericContainer<AlpakkaKafkaContaine
     }
   }
 
-  public AlpakkaKafkaContainer withEmbeddedZookeeper() {
+  public PekkoConnectorsKafkaContainer withEmbeddedZookeeper() {
     externalZookeeperConnect = null;
     return self();
   }
 
-  public AlpakkaKafkaContainer withExternalZookeeper(String connectString) {
+  public PekkoConnectorsKafkaContainer withExternalZookeeper(String connectString) {
     externalZookeeperConnect = connectString;
     return self();
   }
 
-  public AlpakkaKafkaContainer withRemoteJmxService() {
+  public PekkoConnectorsKafkaContainer withRemoteJmxService() {
     enableRemoteJmxService = true;
     return self();
   }
diff --git a/testkit/src/main/java/akka/kafka/testkit/internal/SchemaRegistryContainer.java b/testkit/src/main/java/org/apache/pekko/kafka/testkit/internal/SchemaRegistryContainer.java
similarity index 91%
rename from testkit/src/main/java/akka/kafka/testkit/internal/SchemaRegistryContainer.java
rename to testkit/src/main/java/org/apache/pekko/kafka/testkit/internal/SchemaRegistryContainer.java
index a9eb68fe..a64fb7ae 100644
--- a/testkit/src/main/java/akka/kafka/testkit/internal/SchemaRegistryContainer.java
+++ b/testkit/src/main/java/org/apache/pekko/kafka/testkit/internal/SchemaRegistryContainer.java
@@ -3,7 +3,7 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit.internal;
+package org.apache.pekko.kafka.testkit.internal;
 
 import org.testcontainers.containers.GenericContainer;
 import org.testcontainers.containers.Network;
@@ -13,7 +13,7 @@ public class SchemaRegistryContainer extends GenericContainer<SchemaRegistryCont
   // Align these confluent platform constants with testkit/src/main/resources/reference.conf
   public static final DockerImageName DEFAULT_SCHEMA_REGISTRY_IMAGE_NAME =
       DockerImageName.parse("confluentinc/cp-schema-registry")
-          .withTag(AlpakkaKafkaContainer.DEFAULT_CONFLUENT_PLATFORM_VERSION);
+          .withTag(PekkoConnectorsKafkaContainer.DEFAULT_CONFLUENT_PLATFORM_VERSION);
 
   public static int SCHEMA_REGISTRY_PORT = 8081;
 
diff --git a/testkit/src/main/java/akka/kafka/testkit/javadsl/BaseKafkaTest.java b/testkit/src/main/java/org/apache/pekko/kafka/testkit/javadsl/BaseKafkaTest.java
similarity index 90%
rename from testkit/src/main/java/akka/kafka/testkit/javadsl/BaseKafkaTest.java
rename to testkit/src/main/java/org/apache/pekko/kafka/testkit/javadsl/BaseKafkaTest.java
index d66e66e0..ca170159 100644
--- a/testkit/src/main/java/akka/kafka/testkit/javadsl/BaseKafkaTest.java
+++ b/testkit/src/main/java/org/apache/pekko/kafka/testkit/javadsl/BaseKafkaTest.java
@@ -3,20 +3,20 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit.javadsl;
-
-import akka.Done;
-import akka.actor.ActorSystem;
-import akka.actor.ClassicActorSystemProvider;
-import akka.japi.Pair;
-import akka.kafka.Subscriptions;
-import akka.kafka.javadsl.Consumer;
-import akka.kafka.javadsl.Producer;
-import akka.kafka.testkit.internal.KafkaTestKitChecks;
-import akka.kafka.testkit.internal.KafkaTestKitClass;
-import akka.stream.Materializer;
-import akka.stream.javadsl.Sink;
-import akka.stream.javadsl.Source;
+package org.apache.pekko.kafka.testkit.javadsl;
+
+import org.apache.pekko.Done;
+import org.apache.pekko.actor.ActorSystem;
+import org.apache.pekko.actor.ClassicActorSystemProvider;
+import org.apache.pekko.japi.Pair;
+import org.apache.pekko.kafka.Subscriptions;
+import org.apache.pekko.kafka.javadsl.Consumer;
+import org.apache.pekko.kafka.javadsl.Producer;
+import org.apache.pekko.kafka.testkit.internal.KafkaTestKitChecks;
+import org.apache.pekko.kafka.testkit.internal.KafkaTestKitClass;
+import org.apache.pekko.stream.Materializer;
+import org.apache.pekko.stream.javadsl.Sink;
+import org.apache.pekko.stream.javadsl.Source;
 import org.apache.kafka.clients.admin.ConsumerGroupDescription;
 import org.apache.kafka.clients.admin.DescribeClusterResult;
 import org.apache.kafka.clients.admin.MemberDescription;
diff --git a/testkit/src/main/java/akka/kafka/testkit/javadsl/KafkaJunit4Test.java b/testkit/src/main/java/org/apache/pekko/kafka/testkit/javadsl/KafkaJunit4Test.java
similarity index 81%
rename from testkit/src/main/java/akka/kafka/testkit/javadsl/KafkaJunit4Test.java
rename to testkit/src/main/java/org/apache/pekko/kafka/testkit/javadsl/KafkaJunit4Test.java
index 1ad493d0..7b66b4ad 100644
--- a/testkit/src/main/java/akka/kafka/testkit/javadsl/KafkaJunit4Test.java
+++ b/testkit/src/main/java/org/apache/pekko/kafka/testkit/javadsl/KafkaJunit4Test.java
@@ -3,12 +3,12 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit.javadsl;
+package org.apache.pekko.kafka.testkit.javadsl;
 
-import akka.actor.ActorSystem;
-import akka.actor.ClassicActorSystemProvider;
-import akka.stream.Materializer;
-import akka.stream.testkit.javadsl.StreamTestKit;
+import org.apache.pekko.actor.ActorSystem;
+import org.apache.pekko.actor.ClassicActorSystemProvider;
+import org.apache.pekko.stream.Materializer;
+import org.apache.pekko.stream.testkit.javadsl.StreamTestKit;
 import org.junit.After;
 import org.junit.Before;
 
diff --git a/testkit/src/main/scala/akka/kafka/testkit/ConsumerResultFactory.scala b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/ConsumerResultFactory.scala
similarity index 82%
rename from testkit/src/main/scala/akka/kafka/testkit/ConsumerResultFactory.scala
rename to testkit/src/main/scala/org/apache/pekko/kafka/testkit/ConsumerResultFactory.scala
index 56951839..4100ed5e 100644
--- a/testkit/src/main/scala/akka/kafka/testkit/ConsumerResultFactory.scala
+++ b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/ConsumerResultFactory.scala
@@ -3,20 +3,20 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit
+package org.apache.pekko.kafka.testkit
 
-import akka.Done
-import akka.annotation.ApiMayChange
-import akka.kafka.ConsumerMessage
-import akka.kafka.ConsumerMessage.{ CommittableOffset, GroupTopicPartition, PartitionOffsetCommittedMarker }
-import akka.kafka.internal.{ CommittableOffsetImpl, KafkaAsyncConsumerCommitterRef }
+import org.apache.pekko.Done
+import org.apache.pekko.annotation.ApiMayChange
+import org.apache.pekko.kafka.ConsumerMessage
+import org.apache.pekko.kafka.ConsumerMessage.{ CommittableOffset, GroupTopicPartition, PartitionOffsetCommittedMarker }
+import org.apache.pekko.kafka.internal.{ CommittableOffsetImpl, KafkaAsyncConsumerCommitterRef }
 import org.apache.kafka.clients.consumer.{ ConsumerRecord, OffsetAndMetadata }
 import org.apache.kafka.common.TopicPartition
 
 import scala.concurrent.Future
 
 /**
- * Factory methods to create instances that normally are emitted by [[akka.kafka.scaladsl.Consumer]] and [[akka.kafka.javadsl.Consumer]] flows.
+ * Factory methods to create instances that normally are emitted by [[org.apache.pekko.kafka.scaladsl.Consumer]] and [[org.apache.pekko.kafka.javadsl.Consumer]] flows.
  */
 @ApiMayChange
 object ConsumerResultFactory {
diff --git a/testkit/src/main/scala/akka/kafka/testkit/KafkaTestkitSettings.scala b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/KafkaTestkitSettings.scala
similarity index 95%
rename from testkit/src/main/scala/akka/kafka/testkit/KafkaTestkitSettings.scala
rename to testkit/src/main/scala/org/apache/pekko/kafka/testkit/KafkaTestkitSettings.scala
index ec51367a..bdf541e1 100644
--- a/testkit/src/main/scala/akka/kafka/testkit/KafkaTestkitSettings.scala
+++ b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/KafkaTestkitSettings.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit
+package org.apache.pekko.kafka.testkit
 
-import akka.actor.ActorSystem
+import org.apache.pekko.actor.ActorSystem
 import com.typesafe.config.Config
 
 import scala.concurrent.duration._
diff --git a/testkit/src/main/scala/akka/kafka/testkit/KafkaTestkitTestcontainersSettings.scala b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/KafkaTestkitTestcontainersSettings.scala
similarity index 92%
rename from testkit/src/main/scala/akka/kafka/testkit/KafkaTestkitTestcontainersSettings.scala
rename to testkit/src/main/scala/org/apache/pekko/kafka/testkit/KafkaTestkitTestcontainersSettings.scala
index 07dfc28c..f9394f97 100644
--- a/testkit/src/main/scala/akka/kafka/testkit/KafkaTestkitTestcontainersSettings.scala
+++ b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/KafkaTestkitTestcontainersSettings.scala
@@ -3,15 +3,14 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit
+package org.apache.pekko.kafka.testkit
 
 import java.time.Duration
 import java.util.function.Consumer
-
-import akka.actor.ActorSystem
-import akka.kafka.testkit.internal.AlpakkaKafkaContainer
-import akka.util.JavaDurationConverters._
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.util.JavaDurationConverters._
 import com.typesafe.config.Config
+import org.apache.pekko.kafka.testkit.internal.PekkoConnectorsKafkaContainer
 import org.testcontainers.containers.GenericContainer
 
 import scala.concurrent.duration.FiniteDuration
@@ -29,10 +28,10 @@ final class KafkaTestkitTestcontainersSettings private (
     val containerLogging: Boolean,
     val clusterStartTimeout: FiniteDuration,
     val readinessCheckTimeout: FiniteDuration,
-    val configureKafka: Vector[AlpakkaKafkaContainer] => Unit = _ => (),
-    val configureKafkaConsumer: java.util.function.Consumer[java.util.Collection[AlpakkaKafkaContainer]] =
-      new Consumer[java.util.Collection[AlpakkaKafkaContainer]]() {
-        override def accept(arg: java.util.Collection[AlpakkaKafkaContainer]): Unit = ()
+    val configureKafka: Vector[PekkoConnectorsKafkaContainer] => Unit = _ => (),
+    val configureKafkaConsumer: java.util.function.Consumer[java.util.Collection[PekkoConnectorsKafkaContainer]] =
+      new Consumer[java.util.Collection[PekkoConnectorsKafkaContainer]]() {
+        override def accept(arg: java.util.Collection[PekkoConnectorsKafkaContainer]): Unit = ()
       },
     val configureZooKeeper: GenericContainer[_] => Unit = _ => (),
     val configureZooKeeperConsumer: java.util.function.Consumer[GenericContainer[_]] =
@@ -155,13 +154,14 @@ final class KafkaTestkitTestcontainersSettings private (
    * Replaces the default Kafka testcontainers configuration logic
    */
   def withConfigureKafkaConsumer(
-      configureKafkaConsumer: java.util.function.Consumer[java.util.Collection[AlpakkaKafkaContainer]])
+      configureKafkaConsumer: java.util.function.Consumer[java.util.Collection[PekkoConnectorsKafkaContainer]])
       : KafkaTestkitTestcontainersSettings = copy(configureKafkaConsumer = configureKafkaConsumer)
 
   /**
    * Replaces the default Kafka testcontainers configuration logic
    */
-  def withConfigureKafka(configureKafka: Vector[AlpakkaKafkaContainer] => Unit): KafkaTestkitTestcontainersSettings =
+  def withConfigureKafka(
+      configureKafka: Vector[PekkoConnectorsKafkaContainer] => Unit): KafkaTestkitTestcontainersSettings =
     copy(configureKafka = configureKafka)
 
   /**
@@ -241,8 +241,8 @@ final class KafkaTestkitTestcontainersSettings private (
       containerLogging: Boolean = containerLogging,
       clusterStartTimeout: FiniteDuration = clusterStartTimeout,
       readinessCheckTimeout: FiniteDuration = readinessCheckTimeout,
-      configureKafka: Vector[AlpakkaKafkaContainer] => Unit = configureKafka,
-      configureKafkaConsumer: java.util.function.Consumer[java.util.Collection[AlpakkaKafkaContainer]] =
+      configureKafka: Vector[PekkoConnectorsKafkaContainer] => Unit = configureKafka,
+      configureKafkaConsumer: java.util.function.Consumer[java.util.Collection[PekkoConnectorsKafkaContainer]] =
         configureKafkaConsumer,
       configureZooKeeper: GenericContainer[_] => Unit = configureZooKeeper,
       configureZooKeeperConsumer: java.util.function.Consumer[GenericContainer[_]] = configureZooKeeperConsumer,
diff --git a/testkit/src/main/scala/akka/kafka/testkit/ProducerResultFactory.scala b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/ProducerResultFactory.scala
similarity index 91%
rename from testkit/src/main/scala/akka/kafka/testkit/ProducerResultFactory.scala
rename to testkit/src/main/scala/org/apache/pekko/kafka/testkit/ProducerResultFactory.scala
index faf82be7..174159a6 100644
--- a/testkit/src/main/scala/akka/kafka/testkit/ProducerResultFactory.scala
+++ b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/ProducerResultFactory.scala
@@ -3,10 +3,10 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit
+package org.apache.pekko.kafka.testkit
 
-import akka.annotation.ApiMayChange
-import akka.kafka.ProducerMessage
+import org.apache.pekko.annotation.ApiMayChange
+import org.apache.pekko.kafka.ProducerMessage
 import org.apache.kafka.clients.producer.{ ProducerRecord, RecordMetadata }
 import org.apache.kafka.common.TopicPartition
 
@@ -14,7 +14,7 @@ import scala.jdk.CollectionConverters._
 import scala.collection.immutable
 
 /**
- * Factory methods to create instances that normally are emitted by [[akka.kafka.scaladsl.Producer]] and [[akka.kafka.javadsl.Producer]] flows.
+ * Factory methods to create instances that normally are emitted by [[org.apache.pekko.kafka.scaladsl.Producer]] and [[org.apache.pekko.kafka.javadsl.Producer]] flows.
  */
 @ApiMayChange
 object ProducerResultFactory {
diff --git a/testkit/src/main/scala/akka/kafka/testkit/internal/KafkaTestKit.scala b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/internal/KafkaTestKit.scala
similarity index 96%
rename from testkit/src/main/scala/akka/kafka/testkit/internal/KafkaTestKit.scala
rename to testkit/src/main/scala/org/apache/pekko/kafka/testkit/internal/KafkaTestKit.scala
index d6cbb363..99fa217d 100644
--- a/testkit/src/main/scala/akka/kafka/testkit/internal/KafkaTestKit.scala
+++ b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/internal/KafkaTestKit.scala
@@ -3,16 +3,16 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit.internal
+package org.apache.pekko.kafka.testkit.internal
 
 import java.time.Duration
 import java.util.concurrent.TimeUnit
 import java.util.concurrent.atomic.AtomicInteger
 import java.util.Arrays
 
-import akka.actor.ActorSystem
-import akka.kafka.testkit.KafkaTestkitSettings
-import akka.kafka.{ CommitterSettings, ConsumerSettings, ProducerSettings }
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.kafka.testkit.KafkaTestkitSettings
+import org.apache.pekko.kafka.{ CommitterSettings, ConsumerSettings, ProducerSettings }
 import org.apache.kafka.clients.admin.{ Admin, AdminClientConfig, NewTopic }
 import org.apache.kafka.clients.consumer.ConsumerConfig
 import org.apache.kafka.common.serialization.{ Deserializer, Serializer, StringDeserializer, StringSerializer }
diff --git a/testkit/src/main/scala/akka/kafka/testkit/internal/KafkaTestKitChecks.scala b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/internal/KafkaTestKitChecks.scala
similarity index 98%
rename from testkit/src/main/scala/akka/kafka/testkit/internal/KafkaTestKitChecks.scala
rename to testkit/src/main/scala/org/apache/pekko/kafka/testkit/internal/KafkaTestKitChecks.scala
index 1a0e4f83..f48d0f77 100644
--- a/testkit/src/main/scala/akka/kafka/testkit/internal/KafkaTestKitChecks.scala
+++ b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/internal/KafkaTestKitChecks.scala
@@ -3,7 +3,7 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit.internal
+package org.apache.pekko.kafka.testkit.internal
 
 import java.util.Collections
 import java.util.concurrent.TimeUnit
diff --git a/testkit/src/main/scala/akka/kafka/testkit/internal/TestFrameworkInterface.scala b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/internal/TestFrameworkInterface.scala
similarity index 92%
rename from testkit/src/main/scala/akka/kafka/testkit/internal/TestFrameworkInterface.scala
rename to testkit/src/main/scala/org/apache/pekko/kafka/testkit/internal/TestFrameworkInterface.scala
index 34a50443..8da9fd53 100644
--- a/testkit/src/main/scala/akka/kafka/testkit/internal/TestFrameworkInterface.scala
+++ b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/internal/TestFrameworkInterface.scala
@@ -3,7 +3,7 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit.internal
+package org.apache.pekko.kafka.testkit.internal
 
 import org.scalatest.{ BeforeAndAfterAll, Suite }
 
diff --git a/testkit/src/main/scala/akka/kafka/testkit/internal/TestcontainersKafka.scala b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/internal/TestcontainersKafka.scala
similarity index 93%
rename from testkit/src/main/scala/akka/kafka/testkit/internal/TestcontainersKafka.scala
rename to testkit/src/main/scala/org/apache/pekko/kafka/testkit/internal/TestcontainersKafka.scala
index 42094415..e65cca65 100644
--- a/testkit/src/main/scala/akka/kafka/testkit/internal/TestcontainersKafka.scala
+++ b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/internal/TestcontainersKafka.scala
@@ -3,11 +3,11 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit.internal
+package org.apache.pekko.kafka.testkit.internal
 
-import akka.kafka.testkit.KafkaTestkitTestcontainersSettings
-import akka.kafka.testkit.scaladsl.KafkaSpec
-import akka.util.JavaDurationConverters._
+import org.apache.pekko.kafka.testkit.KafkaTestkitTestcontainersSettings
+import org.apache.pekko.kafka.testkit.scaladsl.KafkaSpec
+import org.apache.pekko.util.JavaDurationConverters._
 import org.testcontainers.containers.GenericContainer
 import org.testcontainers.utility.DockerImageName
 
@@ -39,7 +39,7 @@ object TestcontainersKafka {
       kafkaBootstrapServersInternal
     }
 
-    def brokerContainers: Vector[AlpakkaKafkaContainer] = {
+    def brokerContainers: Vector[PekkoConnectorsKafkaContainer] = {
       requireStarted()
       cluster.getBrokers.asScala.toVector
     }
diff --git a/testkit/src/main/scala/akka/kafka/testkit/javadsl/ConsumerControlFactory.scala b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/javadsl/ConsumerControlFactory.scala
similarity index 80%
rename from testkit/src/main/scala/akka/kafka/testkit/javadsl/ConsumerControlFactory.scala
rename to testkit/src/main/scala/org/apache/pekko/kafka/testkit/javadsl/ConsumerControlFactory.scala
index 23a54522..8649dfcd 100644
--- a/testkit/src/main/scala/akka/kafka/testkit/javadsl/ConsumerControlFactory.scala
+++ b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/javadsl/ConsumerControlFactory.scala
@@ -3,19 +3,19 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit.javadsl
+package org.apache.pekko.kafka.testkit.javadsl
 
 import java.util.concurrent.{ CompletableFuture, CompletionStage, Executor }
 
-import akka.Done
-import akka.annotation.ApiMayChange
-import akka.kafka.javadsl.Consumer
-import akka.stream.javadsl.{ Flow, Keep, Source }
-import akka.stream.{ scaladsl, KillSwitch, KillSwitches }
+import org.apache.pekko.Done
+import org.apache.pekko.annotation.ApiMayChange
+import org.apache.pekko.kafka.javadsl.Consumer
+import org.apache.pekko.stream.javadsl.{ Flow, Keep, Source }
+import org.apache.pekko.stream.{ scaladsl, KillSwitch, KillSwitches }
 import org.apache.kafka.common.{ Metric, MetricName }
 
 /**
- * Helper factory to create [[akka.kafka.javadsl.Consumer.Control]] instances when
+ * Helper factory to create [[org.apache.pekko.kafka.javadsl.Consumer.Control]] instances when
  * testing without a Kafka broker.
  */
 @ApiMayChange
diff --git a/testkit/src/main/scala/akka/kafka/testkit/scaladsl/ConsumerControlFactory.scala b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/scaladsl/ConsumerControlFactory.scala
similarity index 76%
rename from testkit/src/main/scala/akka/kafka/testkit/scaladsl/ConsumerControlFactory.scala
rename to testkit/src/main/scala/org/apache/pekko/kafka/testkit/scaladsl/ConsumerControlFactory.scala
index 581e5f1d..e92186f7 100644
--- a/testkit/src/main/scala/akka/kafka/testkit/scaladsl/ConsumerControlFactory.scala
+++ b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/scaladsl/ConsumerControlFactory.scala
@@ -3,19 +3,19 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit.scaladsl
+package org.apache.pekko.kafka.testkit.scaladsl
 
-import akka.Done
-import akka.annotation.ApiMayChange
-import akka.kafka.scaladsl.Consumer
-import akka.stream.scaladsl.{ Flow, Keep, Source }
-import akka.stream.{ KillSwitch, KillSwitches }
+import org.apache.pekko.Done
+import org.apache.pekko.annotation.ApiMayChange
+import org.apache.pekko.kafka.scaladsl.Consumer
+import org.apache.pekko.stream.scaladsl.{ Flow, Keep, Source }
+import org.apache.pekko.stream.{ KillSwitch, KillSwitches }
 import org.apache.kafka.common.{ Metric, MetricName }
 
 import scala.concurrent.{ Future, Promise }
 
 /**
- * Helper factory to create [[akka.kafka.scaladsl.Consumer.Control]] instances when
+ * Helper factory to create [[org.apache.pekko.kafka.scaladsl.Consumer.Control]] instances when
  * testing without a Kafka broker.
  */
 @ApiMayChange
diff --git a/testkit/src/main/scala/akka/kafka/testkit/scaladsl/KafkaSpec.scala b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/scaladsl/KafkaSpec.scala
similarity index 91%
rename from testkit/src/main/scala/akka/kafka/testkit/scaladsl/KafkaSpec.scala
rename to testkit/src/main/scala/org/apache/pekko/kafka/testkit/scaladsl/KafkaSpec.scala
index 228a6310..0e7d76e7 100644
--- a/testkit/src/main/scala/akka/kafka/testkit/scaladsl/KafkaSpec.scala
+++ b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/scaladsl/KafkaSpec.scala
@@ -3,24 +3,24 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit.scaladsl
+package org.apache.pekko.kafka.testkit.scaladsl
 
 import java.time.Duration
 import java.util
 import java.util.concurrent.TimeUnit
 
-import akka.Done
-import akka.actor.ActorSystem
-import akka.event.LoggingAdapter
-import akka.kafka._
-import akka.kafka.scaladsl.Consumer.Control
-import akka.kafka.scaladsl.{ Consumer, Producer }
-import akka.kafka.testkit.internal.{ KafkaTestKit, KafkaTestKitChecks }
-import akka.stream.{ Materializer, SystemMaterializer }
-import akka.stream.scaladsl.{ Keep, Source }
-import akka.stream.testkit.TestSubscriber
-import akka.stream.testkit.scaladsl.TestSink
-import akka.testkit.TestKit
+import org.apache.pekko.Done
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.event.LoggingAdapter
+import org.apache.pekko.kafka._
+import org.apache.pekko.kafka.scaladsl.Consumer.Control
+import org.apache.pekko.kafka.scaladsl.{ Consumer, Producer }
+import org.apache.pekko.kafka.testkit.internal.{ KafkaTestKit, KafkaTestKitChecks }
+import org.apache.pekko.stream.{ Materializer, SystemMaterializer }
+import org.apache.pekko.stream.scaladsl.{ Keep, Source }
+import org.apache.pekko.stream.testkit.TestSubscriber
+import org.apache.pekko.stream.testkit.scaladsl.TestSink
+import org.apache.pekko.testkit.TestKit
 import org.apache.kafka.clients.admin._
 import org.apache.kafka.clients.producer.{ Producer => KProducer, ProducerRecord }
 import org.apache.kafka.common.ConsumerGroupState
@@ -47,7 +47,7 @@ abstract class KafkaSpec(_kafkaPort: Int, val zooKeeperPort: Int, actorSystem: A
 
   implicit val ec: ExecutionContext = system.dispatcher
   implicit val mat: Materializer = SystemMaterializer(system).materializer
-  implicit val scheduler: akka.actor.Scheduler = system.scheduler
+  implicit val scheduler: org.apache.pekko.actor.Scheduler = system.scheduler
 
   var testProducer: KProducer[String, String] = _
 
diff --git a/testkit/src/main/scala/akka/kafka/testkit/scaladsl/ScalatestKafkaSpec.scala b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/scaladsl/ScalatestKafkaSpec.scala
similarity index 74%
rename from testkit/src/main/scala/akka/kafka/testkit/scaladsl/ScalatestKafkaSpec.scala
rename to testkit/src/main/scala/org/apache/pekko/kafka/testkit/scaladsl/ScalatestKafkaSpec.scala
index cfb77dcb..a4142cc2 100644
--- a/testkit/src/main/scala/akka/kafka/testkit/scaladsl/ScalatestKafkaSpec.scala
+++ b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/scaladsl/ScalatestKafkaSpec.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit.scaladsl
+package org.apache.pekko.kafka.testkit.scaladsl
 
-import akka.kafka.testkit.internal.TestFrameworkInterface
+import org.apache.pekko.kafka.testkit.internal.TestFrameworkInterface
 import org.scalatest.Suite
 
 abstract class ScalatestKafkaSpec(kafkaPort: Int)
diff --git a/testkit/src/main/scala/akka/kafka/testkit/scaladsl/TestcontainersKafkaLike.scala b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/scaladsl/TestcontainersKafkaLike.scala
similarity index 83%
rename from testkit/src/main/scala/akka/kafka/testkit/scaladsl/TestcontainersKafkaLike.scala
rename to testkit/src/main/scala/org/apache/pekko/kafka/testkit/scaladsl/TestcontainersKafkaLike.scala
index cc5f09e5..4ba17241 100644
--- a/testkit/src/main/scala/akka/kafka/testkit/scaladsl/TestcontainersKafkaLike.scala
+++ b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/scaladsl/TestcontainersKafkaLike.scala
@@ -3,10 +3,14 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit.scaladsl
+package org.apache.pekko.kafka.testkit.scaladsl
 
-import akka.kafka.testkit.KafkaTestkitTestcontainersSettings
-import akka.kafka.testkit.internal.{ AlpakkaKafkaContainer, SchemaRegistryContainer, TestcontainersKafka }
+import org.apache.pekko.kafka.testkit.KafkaTestkitTestcontainersSettings
+import org.apache.pekko.kafka.testkit.internal.{
+  PekkoConnectorsKafkaContainer,
+  SchemaRegistryContainer,
+  TestcontainersKafka
+}
 import org.testcontainers.containers.GenericContainer
 
 /**
@@ -19,7 +23,7 @@ import org.testcontainers.containers.GenericContainer
 trait TestcontainersKafkaLike extends TestcontainersKafka.Spec {
   override def kafkaPort: Int = TestcontainersKafka.Singleton.kafkaPort
   override def bootstrapServers: String = TestcontainersKafka.Singleton.bootstrapServers
-  override def brokerContainers: Vector[AlpakkaKafkaContainer] = TestcontainersKafka.Singleton.brokerContainers
+  override def brokerContainers: Vector[PekkoConnectorsKafkaContainer] = TestcontainersKafka.Singleton.brokerContainers
   override def zookeeperContainer: GenericContainer[_] = TestcontainersKafka.Singleton.zookeeperContainer
   override def schemaRegistryContainer: Option[SchemaRegistryContainer] =
     TestcontainersKafka.Singleton.schemaRegistryContainer
diff --git a/testkit/src/main/scala/akka/kafka/testkit/scaladsl/TestcontainersKafkaPerClassLike.scala b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/scaladsl/TestcontainersKafkaPerClassLike.scala
similarity index 83%
rename from testkit/src/main/scala/akka/kafka/testkit/scaladsl/TestcontainersKafkaPerClassLike.scala
rename to testkit/src/main/scala/org/apache/pekko/kafka/testkit/scaladsl/TestcontainersKafkaPerClassLike.scala
index 507ff433..cc19794c 100644
--- a/testkit/src/main/scala/akka/kafka/testkit/scaladsl/TestcontainersKafkaPerClassLike.scala
+++ b/testkit/src/main/scala/org/apache/pekko/kafka/testkit/scaladsl/TestcontainersKafkaPerClassLike.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.testkit.scaladsl
+package org.apache.pekko.kafka.testkit.scaladsl
 
-import akka.kafka.testkit.internal.TestcontainersKafka
+import org.apache.pekko.kafka.testkit.internal.TestcontainersKafka
 
 /**
  * Uses [[https://www.testcontainers.org/ Testcontainers]] to start a Kafka broker in a Docker container once per class.
diff --git a/tests/src/it/resources/logback-test.xml b/tests/src/it/resources/logback-test.xml
index 1aaa8ad2..c71cf40d 100644
--- a/tests/src/it/resources/logback-test.xml
+++ b/tests/src/it/resources/logback-test.xml
@@ -18,15 +18,15 @@
         </encoder>
     </appender>
 
-    <appender name="CapturingAppender" class="akka.kafka.tests.CapturingAppender"/>
+    <appender name="CapturingAppender" class="org.apache.pekko.kafka.tests.CapturingAppender"/>
 
-    <logger name="akka.kafka.tests.CapturingAppenderDelegate">
+    <logger name="org.apache.pekko.kafka.tests.CapturingAppenderDelegate">
         <appender-ref ref="STDOUT"/>
     </logger>
 
-    <logger name="akka" level="DEBUG"/>
-    <logger name="akka.kafka" level="DEBUG"/>
-    <logger name="akka.kafka.test.testcontainers.logs" level="INFO" />
+    <logger name="org.apache.pekko" level="DEBUG"/>
+    <logger name="org.apache.pekko.kafka" level="DEBUG"/>
+    <logger name="org.apache.pekko.kafka.test.testcontainers.logs" level="INFO" />
     <logger name="docs.scaladsl" levle="DEBUG"/>
 
     <logger name="org.apache.zookeeper" level="WARN"/>
diff --git a/tests/src/it/scala/akka/kafka/IntegrationTests.scala b/tests/src/it/scala/org/apache/pekko/kafka/IntegrationTests.scala
similarity index 93%
rename from tests/src/it/scala/akka/kafka/IntegrationTests.scala
rename to tests/src/it/scala/org/apache/pekko/kafka/IntegrationTests.scala
index 8b610b8c..356677f2 100644
--- a/tests/src/it/scala/akka/kafka/IntegrationTests.scala
+++ b/tests/src/it/scala/org/apache/pekko/kafka/IntegrationTests.scala
@@ -3,10 +3,10 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
-import akka.NotUsed
-import akka.stream.scaladsl.Flow
+import org.apache.pekko.NotUsed
+import org.apache.pekko.stream.scaladsl.Flow
 import org.apache.kafka.common.TopicPartition
 import org.slf4j.Logger
 import org.testcontainers.containers.GenericContainer
diff --git a/tests/src/it/scala/akka/kafka/PartitionedSourceFailoverSpec.scala b/tests/src/it/scala/org/apache/pekko/kafka/PartitionedSourceFailoverSpec.scala
similarity index 90%
rename from tests/src/it/scala/akka/kafka/PartitionedSourceFailoverSpec.scala
rename to tests/src/it/scala/org/apache/pekko/kafka/PartitionedSourceFailoverSpec.scala
index 8c45053c..1254a001 100644
--- a/tests/src/it/scala/akka/kafka/PartitionedSourceFailoverSpec.scala
+++ b/tests/src/it/scala/org/apache/pekko/kafka/PartitionedSourceFailoverSpec.scala
@@ -3,14 +3,14 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
-import akka.Done
-import akka.kafka.scaladsl.{ Consumer, Producer, SpecBase }
-import akka.kafka.testkit.KafkaTestkitTestcontainersSettings
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike
-import akka.stream.scaladsl.{ Sink, Source }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.Done
+import org.apache.pekko.kafka.scaladsl.{ Consumer, Producer, SpecBase }
+import org.apache.pekko.kafka.testkit.KafkaTestkitTestcontainersSettings
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike
+import org.apache.pekko.stream.scaladsl.{ Sink, Source }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
 import org.apache.kafka.clients.consumer.ConsumerConfig
 import org.apache.kafka.clients.producer.{ ProducerConfig, ProducerRecord }
 import org.apache.kafka.common.config.TopicConfig
diff --git a/tests/src/it/scala/akka/kafka/PlainSourceFailoverSpec.scala b/tests/src/it/scala/org/apache/pekko/kafka/PlainSourceFailoverSpec.scala
similarity index 89%
rename from tests/src/it/scala/akka/kafka/PlainSourceFailoverSpec.scala
rename to tests/src/it/scala/org/apache/pekko/kafka/PlainSourceFailoverSpec.scala
index e4389f05..7fb89f0b 100644
--- a/tests/src/it/scala/akka/kafka/PlainSourceFailoverSpec.scala
+++ b/tests/src/it/scala/org/apache/pekko/kafka/PlainSourceFailoverSpec.scala
@@ -3,13 +3,13 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
-import akka.kafka.scaladsl.{ Consumer, Producer, SpecBase }
-import akka.kafka.testkit.KafkaTestkitTestcontainersSettings
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike
-import akka.stream.scaladsl.{ Sink, Source }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.kafka.scaladsl.{ Consumer, Producer, SpecBase }
+import org.apache.pekko.kafka.testkit.KafkaTestkitTestcontainersSettings
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike
+import org.apache.pekko.stream.scaladsl.{ Sink, Source }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
 import org.apache.kafka.clients.consumer.ConsumerConfig
 import org.apache.kafka.clients.producer.{ ProducerConfig, ProducerRecord }
 import org.apache.kafka.common.config.TopicConfig
diff --git a/tests/src/it/scala/akka/kafka/TransactionsPartitionedSourceSpec.scala b/tests/src/it/scala/org/apache/pekko/kafka/TransactionsPartitionedSourceSpec.scala
similarity index 91%
rename from tests/src/it/scala/akka/kafka/TransactionsPartitionedSourceSpec.scala
rename to tests/src/it/scala/org/apache/pekko/kafka/TransactionsPartitionedSourceSpec.scala
index 0f8f199c..c56040eb 100644
--- a/tests/src/it/scala/akka/kafka/TransactionsPartitionedSourceSpec.scala
+++ b/tests/src/it/scala/org/apache/pekko/kafka/TransactionsPartitionedSourceSpec.scala
@@ -3,17 +3,17 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
 import java.util.concurrent.atomic.AtomicInteger
 
-import akka.Done
-import akka.kafka.scaladsl.SpecBase
-import akka.kafka.testkit.KafkaTestkitTestcontainersSettings
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike
-import akka.stream._
-import akka.stream.scaladsl.{ Keep, RestartSource, Sink }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.Done
+import org.apache.pekko.kafka.scaladsl.SpecBase
+import org.apache.pekko.kafka.testkit.KafkaTestkitTestcontainersSettings
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike
+import org.apache.pekko.stream._
+import org.apache.pekko.stream.scaladsl.{ Keep, RestartSource, Sink }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
 import org.scalatest.concurrent.PatienceConfiguration.Interval
 import org.scalatest.concurrent.ScalaFutures
 import org.scalatest.Ignore
diff --git a/tests/src/it/scala/akka/kafka/TransactionsSourceSpec.scala b/tests/src/it/scala/org/apache/pekko/kafka/TransactionsSourceSpec.scala
similarity index 93%
rename from tests/src/it/scala/akka/kafka/TransactionsSourceSpec.scala
rename to tests/src/it/scala/org/apache/pekko/kafka/TransactionsSourceSpec.scala
index a0553d45..015f1d30 100644
--- a/tests/src/it/scala/akka/kafka/TransactionsSourceSpec.scala
+++ b/tests/src/it/scala/org/apache/pekko/kafka/TransactionsSourceSpec.scala
@@ -3,18 +3,18 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
 import java.util.concurrent.atomic.AtomicInteger
 
-import akka.Done
-import akka.kafka.scaladsl.Consumer.Control
-import akka.kafka.scaladsl.{ Consumer, SpecBase, Transactional }
-import akka.kafka.testkit.KafkaTestkitTestcontainersSettings
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike
-import akka.stream._
-import akka.stream.scaladsl.{ Flow, Keep, RestartSource, Sink }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.Done
+import org.apache.pekko.kafka.scaladsl.Consumer.Control
+import org.apache.pekko.kafka.scaladsl.{ Consumer, SpecBase, Transactional }
+import org.apache.pekko.kafka.testkit.KafkaTestkitTestcontainersSettings
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike
+import org.apache.pekko.stream._
+import org.apache.pekko.stream.scaladsl.{ Flow, Keep, RestartSource, Sink }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
 import org.apache.kafka.clients.producer.ProducerRecord
 import org.scalatest.concurrent.PatienceConfiguration.Interval
 import org.scalatest.concurrent.ScalaFutures
diff --git a/tests/src/main/scala/akka/kafka/KafkaPorts.scala b/tests/src/main/scala/org/apache/pekko/kafka/KafkaPorts.scala
similarity index 92%
rename from tests/src/main/scala/akka/kafka/KafkaPorts.scala
rename to tests/src/main/scala/org/apache/pekko/kafka/KafkaPorts.scala
index 3e24db46..01dba327 100644
--- a/tests/src/main/scala/akka/kafka/KafkaPorts.scala
+++ b/tests/src/main/scala/org/apache/pekko/kafka/KafkaPorts.scala
@@ -3,7 +3,7 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
 /**
  * Ports to use for Kafka and Zookeeper throughout integration tests.
diff --git a/tests/src/test/java/docs/javadsl/AssignmentTest.java b/tests/src/test/java/docs/javadsl/AssignmentTest.java
index d8935da9..2d3c410b 100644
--- a/tests/src/test/java/docs/javadsl/AssignmentTest.java
+++ b/tests/src/test/java/docs/javadsl/AssignmentTest.java
@@ -5,22 +5,22 @@
 
 package docs.javadsl;
 
-import akka.Done;
-import akka.actor.ActorSystem;
-import akka.kafka.AutoSubscription;
-import akka.kafka.ManualSubscription;
-import akka.kafka.ProducerMessage;
-import akka.kafka.Subscriptions;
-import akka.kafka.javadsl.Consumer;
+import org.apache.pekko.Done;
+import org.apache.pekko.actor.ActorSystem;
+import org.apache.pekko.kafka.AutoSubscription;
+import org.apache.pekko.kafka.ManualSubscription;
+import org.apache.pekko.kafka.ProducerMessage;
+import org.apache.pekko.kafka.Subscriptions;
+import org.apache.pekko.kafka.javadsl.Consumer;
 // #testkit
-import akka.kafka.testkit.javadsl.TestcontainersKafkaJunit4Test;
+import org.apache.pekko.kafka.testkit.TestcontainersKafkaJunit4Test;
 // #testkit
-import akka.kafka.javadsl.Producer;
-import akka.kafka.tests.javadsl.LogCapturingJunit4;
-import akka.stream.javadsl.Sink;
-import akka.stream.javadsl.Source;
+import org.apache.pekko.kafka.javadsl.Producer;
+import org.apache.pekko.kafka.tests.javadsl.LogCapturingJunit4;
+import org.apache.pekko.stream.javadsl.Sink;
+import org.apache.pekko.stream.javadsl.Source;
 // #testkit
-import akka.testkit.javadsl.TestKit;
+import org.apache.pekko.testkit.javadsl.TestKit;
 // #testkit
 import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.apache.kafka.clients.producer.ProducerRecord;
diff --git a/tests/src/test/java/docs/javadsl/AtLeastOnceTest.java b/tests/src/test/java/docs/javadsl/AtLeastOnceTest.java
index 44bd6988..e2583036 100644
--- a/tests/src/test/java/docs/javadsl/AtLeastOnceTest.java
+++ b/tests/src/test/java/docs/javadsl/AtLeastOnceTest.java
@@ -8,13 +8,13 @@ package docs.javadsl;
 import static org.hamcrest.CoreMatchers.*;
 import static org.hamcrest.MatcherAssert.assertThat;
 
-import akka.NotUsed;
-import akka.actor.ActorSystem;
-import akka.kafka.testkit.javadsl.TestcontainersKafkaJunit4Test;
-import akka.kafka.tests.javadsl.LogCapturingJunit4;
-import akka.stream.javadsl.Keep;
-import akka.stream.javadsl.Sink;
-import akka.testkit.javadsl.TestKit;
+import org.apache.pekko.NotUsed;
+import org.apache.pekko.actor.ActorSystem;
+import org.apache.pekko.kafka.testkit.TestcontainersKafkaJunit4Test;
+import org.apache.pekko.kafka.tests.javadsl.LogCapturingJunit4;
+import org.apache.pekko.stream.javadsl.Keep;
+import org.apache.pekko.stream.javadsl.Sink;
+import org.apache.pekko.testkit.javadsl.TestKit;
 import org.junit.*;
 import java.util.Arrays;
 import java.util.List;
@@ -27,14 +27,14 @@ import java.util.stream.Collectors;
 import org.apache.kafka.clients.consumer.ConsumerRecord;
 
 // #oneToMany #oneToConditional
-import akka.Done;
-import akka.japi.Pair;
-import akka.kafka.*;
-import akka.kafka.ConsumerMessage.CommittableOffset;
-import akka.kafka.ProducerMessage.Envelope;
-import akka.kafka.javadsl.Committer;
-import akka.kafka.javadsl.Consumer;
-import akka.kafka.javadsl.Producer;
+import org.apache.pekko.Done;
+import org.apache.pekko.japi.Pair;
+import org.apache.pekko.kafka.*;
+import org.apache.pekko.kafka.ConsumerMessage.CommittableOffset;
+import org.apache.pekko.kafka.ProducerMessage.Envelope;
+import org.apache.pekko.kafka.javadsl.Committer;
+import org.apache.pekko.kafka.javadsl.Consumer;
+import org.apache.pekko.kafka.javadsl.Producer;
 import org.apache.kafka.clients.producer.ProducerRecord;
 
 // #oneToMany #oneToConditional
diff --git a/tests/src/test/java/docs/javadsl/ClusterShardingExample.java b/tests/src/test/java/docs/javadsl/ClusterShardingExample.java
index 10c6bac2..ac96a663 100644
--- a/tests/src/test/java/docs/javadsl/ClusterShardingExample.java
+++ b/tests/src/test/java/docs/javadsl/ClusterShardingExample.java
@@ -5,24 +5,24 @@
 
 package docs.javadsl;
 
-import akka.NotUsed;
-import akka.actor.typed.ActorSystem;
-import akka.actor.typed.Behavior;
-import akka.actor.typed.javadsl.Adapter;
-import akka.actor.typed.javadsl.Behaviors;
-import akka.cluster.sharding.external.ExternalShardAllocationStrategy;
-import akka.cluster.sharding.typed.javadsl.ClusterSharding;
-import akka.cluster.sharding.typed.javadsl.Entity;
-import akka.cluster.sharding.typed.javadsl.EntityTypeKey;
-import akka.kafka.AutoSubscription;
-import akka.kafka.ConsumerRebalanceEvent;
-import akka.kafka.ConsumerSettings;
-import akka.kafka.Subscriptions;
-import akka.kafka.cluster.sharding.KafkaClusterSharding;
-import akka.kafka.javadsl.Consumer;
-import akka.stream.javadsl.Flow;
-import akka.stream.javadsl.Sink;
-import akka.util.Timeout;
+import org.apache.pekko.NotUsed;
+import org.apache.pekko.actor.typed.ActorSystem;
+import org.apache.pekko.actor.typed.Behavior;
+import org.apache.pekko.actor.typed.javadsl.Adapter;
+import org.apache.pekko.actor.typed.javadsl.Behaviors;
+import org.apache.pekko.cluster.sharding.external.ExternalShardAllocationStrategy;
+import org.apache.pekko.cluster.sharding.typed.javadsl.ClusterSharding;
+import org.apache.pekko.cluster.sharding.typed.javadsl.Entity;
+import org.apache.pekko.cluster.sharding.typed.javadsl.EntityTypeKey;
+import org.apache.pekko.kafka.AutoSubscription;
+import org.apache.pekko.kafka.ConsumerRebalanceEvent;
+import org.apache.pekko.kafka.ConsumerSettings;
+import org.apache.pekko.kafka.Subscriptions;
+import org.apache.pekko.kafka.cluster.sharding.KafkaClusterSharding;
+import org.apache.pekko.kafka.javadsl.Consumer;
+import org.apache.pekko.stream.javadsl.Flow;
+import org.apache.pekko.stream.javadsl.Sink;
+import org.apache.pekko.util.Timeout;
 import org.apache.kafka.common.serialization.ByteArrayDeserializer;
 import org.apache.kafka.common.serialization.StringDeserializer;
 
@@ -84,7 +84,7 @@ public class ClusterShardingExample {
     // #setup-cluster-sharding
 
     // #rebalance-listener
-    akka.actor.typed.ActorRef<ConsumerRebalanceEvent> rebalanceListener =
+    org.apache.pekko.actor.typed.ActorRef<ConsumerRebalanceEvent> rebalanceListener =
         KafkaClusterSharding.get(system).rebalanceListener(typeKey);
 
     ConsumerSettings<String, byte[]> consumerSettings =
diff --git a/tests/src/test/java/docs/javadsl/ConsumerExampleTest.java b/tests/src/test/java/docs/javadsl/ConsumerExampleTest.java
index 983acbdd..b2f07c30 100644
--- a/tests/src/test/java/docs/javadsl/ConsumerExampleTest.java
+++ b/tests/src/test/java/docs/javadsl/ConsumerExampleTest.java
@@ -5,32 +5,32 @@
 
 package docs.javadsl;
 
-import akka.Done;
-import akka.NotUsed;
-import akka.actor.AbstractLoggingActor;
-import akka.actor.ActorRef;
-import akka.actor.ActorSystem;
-import akka.actor.Props;
-import akka.actor.typed.Behavior;
-import akka.actor.typed.javadsl.ActorContext;
-import akka.actor.typed.javadsl.Behaviors;
+import org.apache.pekko.Done;
+import org.apache.pekko.NotUsed;
+import org.apache.pekko.actor.AbstractLoggingActor;
+import org.apache.pekko.actor.ActorRef;
+import org.apache.pekko.actor.ActorSystem;
+import org.apache.pekko.actor.Props;
+import org.apache.pekko.actor.typed.Behavior;
+import org.apache.pekko.actor.typed.javadsl.ActorContext;
+import org.apache.pekko.actor.typed.javadsl.Behaviors;
 // #withTypedRebalanceListenerActor
 // #consumerActorTyped
 // adds support for actors to a classic actor system and context
-import akka.actor.typed.javadsl.Adapter;
+import org.apache.pekko.actor.typed.javadsl.Adapter;
 // #consumerActorTyped
 // #withTypedRebalanceListenerActor
-import akka.japi.Pair;
-import akka.kafka.*;
-import akka.kafka.javadsl.Committer;
-import akka.kafka.javadsl.Consumer;
-import akka.kafka.javadsl.Producer;
-import akka.kafka.javadsl.PartitionAssignmentHandler;
-import akka.kafka.testkit.javadsl.TestcontainersKafkaTest;
-import akka.kafka.tests.javadsl.LogCapturingExtension;
-import akka.stream.RestartSettings;
-import akka.stream.javadsl.*;
-import akka.testkit.javadsl.TestKit;
+import org.apache.pekko.japi.Pair;
+import org.apache.pekko.kafka.*;
+import org.apache.pekko.kafka.javadsl.Committer;
+import org.apache.pekko.kafka.javadsl.Consumer;
+import org.apache.pekko.kafka.javadsl.Producer;
+import org.apache.pekko.kafka.javadsl.PartitionAssignmentHandler;
+import org.apache.pekko.kafka.testkit.TestcontainersKafkaTest;
+import org.apache.pekko.kafka.tests.javadsl.LogCapturingExtension;
+import org.apache.pekko.stream.RestartSettings;
+import org.apache.pekko.stream.javadsl.*;
+import org.apache.pekko.testkit.javadsl.TestKit;
 import com.typesafe.config.Config;
 import org.apache.kafka.clients.consumer.ConsumerConfig;
 import org.apache.kafka.clients.consumer.ConsumerRecord;
@@ -538,10 +538,10 @@ class ConsumerExampleTest extends TestcontainersKafkaTest {
               Behavior<ConsumerRebalanceEvent> listener =
                   Behaviors.setup(ctx -> rebalanceListener.apply(ctx));
 
-              akka.actor.typed.ActorRef<ConsumerRebalanceEvent> typedRef =
+              org.apache.pekko.actor.typed.ActorRef<ConsumerRebalanceEvent> typedRef =
                   guardianCtx.spawn(listener, "rebalance-listener");
 
-              akka.actor.ActorRef classicRef = Adapter.toClassic(typedRef);
+              org.apache.pekko.actor.ActorRef classicRef = Adapter.toClassic(typedRef);
 
               Subscription subscription =
                   Subscriptions.topics(topic)
@@ -565,8 +565,9 @@ class ConsumerExampleTest extends TestcontainersKafkaTest {
               return Behaviors.stopped();
             });
 
-    akka.actor.typed.ActorSystem<Object> typed =
-        akka.actor.typed.ActorSystem.create(guardian, "typed-rebalance-listener-example");
+    org.apache.pekko.actor.typed.ActorSystem<Object> typed =
+        org.apache.pekko.actor.typed.ActorSystem.create(
+            guardian, "typed-rebalance-listener-example");
     assertDone(typed.getWhenTerminated());
   }
 
diff --git a/tests/src/test/java/docs/javadsl/ConsumerSettingsTest.java b/tests/src/test/java/docs/javadsl/ConsumerSettingsTest.java
index ce4e4fa6..10431ec0 100644
--- a/tests/src/test/java/docs/javadsl/ConsumerSettingsTest.java
+++ b/tests/src/test/java/docs/javadsl/ConsumerSettingsTest.java
@@ -5,13 +5,13 @@
 
 package docs.javadsl;
 
-import akka.actor.ActorSystem;
-import akka.kafka.ConsumerSettings;
-import akka.kafka.ConsumerSettingsSpec$;
+import org.apache.pekko.actor.ActorSystem;
+import org.apache.pekko.kafka.ConsumerSettings;
+import org.apache.pekko.kafka.ConsumerSettingsSpec$;
 // #discovery-settings
-import akka.kafka.javadsl.DiscoverySupport;
+import org.apache.pekko.kafka.javadsl.DiscoverySupport;
 // #discovery-settings
-import akka.testkit.javadsl.TestKit;
+import org.apache.pekko.testkit.javadsl.TestKit;
 import com.typesafe.config.Config;
 import com.typesafe.config.ConfigFactory;
 import org.apache.kafka.common.serialization.StringDeserializer;
diff --git a/tests/src/test/java/docs/javadsl/FetchMetadataTest.java b/tests/src/test/java/docs/javadsl/FetchMetadataTest.java
index 31d14b7a..775de496 100644
--- a/tests/src/test/java/docs/javadsl/FetchMetadataTest.java
+++ b/tests/src/test/java/docs/javadsl/FetchMetadataTest.java
@@ -6,13 +6,13 @@
 package docs.javadsl;
 
 // #metadata
-import akka.actor.ActorRef;
-import akka.kafka.ConsumerSettings;
-import akka.kafka.KafkaConsumerActor;
-import akka.kafka.Metadata;
-import akka.kafka.testkit.javadsl.TestcontainersKafkaJunit4Test;
-import akka.kafka.tests.javadsl.LogCapturingJunit4;
-import akka.pattern.Patterns;
+import org.apache.pekko.actor.ActorRef;
+import org.apache.pekko.kafka.ConsumerSettings;
+import org.apache.pekko.kafka.KafkaConsumerActor;
+import org.apache.pekko.kafka.Metadata;
+import org.apache.pekko.kafka.testkit.TestcontainersKafkaJunit4Test;
+import org.apache.pekko.kafka.tests.javadsl.LogCapturingJunit4;
+import org.apache.pekko.pattern.Patterns;
 import java.time.Duration;
 import java.util.List;
 import java.util.Optional;
@@ -21,9 +21,9 @@ import java.util.stream.Collectors;
 import org.apache.kafka.common.PartitionInfo;
 
 // #metadata
-import akka.actor.ActorSystem;
+import org.apache.pekko.actor.ActorSystem;
 import java.util.concurrent.TimeUnit;
-import akka.testkit.javadsl.TestKit;
+import org.apache.pekko.testkit.javadsl.TestKit;
 import org.junit.AfterClass;
 import org.junit.Rule;
 import org.junit.Test;
diff --git a/tests/src/test/java/docs/javadsl/MetadataClientTest.java b/tests/src/test/java/docs/javadsl/MetadataClientTest.java
index 49d2ba52..fc82f8dd 100644
--- a/tests/src/test/java/docs/javadsl/MetadataClientTest.java
+++ b/tests/src/test/java/docs/javadsl/MetadataClientTest.java
@@ -6,14 +6,14 @@
 package docs.javadsl;
 
 // #metadataClient
-import akka.actor.ActorSystem;
-import akka.kafka.ConsumerSettings;
-import akka.kafka.javadsl.MetadataClient;
-import akka.kafka.testkit.javadsl.TestcontainersKafkaJunit4Test;
-import akka.testkit.javadsl.TestKit;
-import akka.util.Timeout;
+import org.apache.pekko.actor.ActorSystem;
+import org.apache.pekko.kafka.ConsumerSettings;
+import org.apache.pekko.kafka.javadsl.MetadataClient;
+import org.apache.pekko.kafka.testkit.TestcontainersKafkaJunit4Test;
+import org.apache.pekko.testkit.javadsl.TestKit;
+import org.apache.pekko.util.Timeout;
 // #metadataClient
-import akka.kafka.tests.javadsl.LogCapturingJunit4;
+import org.apache.pekko.kafka.tests.javadsl.LogCapturingJunit4;
 import org.apache.kafka.common.PartitionInfo;
 import org.apache.kafka.common.TopicPartition;
 import org.hamcrest.core.IsInstanceOf;
diff --git a/tests/src/test/java/docs/javadsl/ProducerSettingsTest.java b/tests/src/test/java/docs/javadsl/ProducerSettingsTest.java
index 3a06bf76..37e9bba9 100644
--- a/tests/src/test/java/docs/javadsl/ProducerSettingsTest.java
+++ b/tests/src/test/java/docs/javadsl/ProducerSettingsTest.java
@@ -5,13 +5,13 @@
 
 package docs.javadsl;
 
-import akka.actor.ActorSystem;
-import akka.kafka.ProducerSettings;
-import akka.kafka.ProducerSettingsSpec$;
+import org.apache.pekko.actor.ActorSystem;
+import org.apache.pekko.kafka.ProducerSettings;
+import org.apache.pekko.kafka.ProducerSettingsSpec$;
 // #discovery-settings
-import akka.kafka.javadsl.DiscoverySupport;
+import org.apache.pekko.kafka.javadsl.DiscoverySupport;
 // #discovery-settings
-import akka.testkit.javadsl.TestKit;
+import org.apache.pekko.testkit.javadsl.TestKit;
 import com.typesafe.config.Config;
 import com.typesafe.config.ConfigFactory;
 import org.apache.kafka.common.serialization.StringSerializer;
diff --git a/tests/src/test/java/docs/javadsl/ProducerTest.java b/tests/src/test/java/docs/javadsl/ProducerTest.java
index bc257a98..1eada873 100644
--- a/tests/src/test/java/docs/javadsl/ProducerTest.java
+++ b/tests/src/test/java/docs/javadsl/ProducerTest.java
@@ -5,19 +5,19 @@
 
 package docs.javadsl;
 
-import akka.Done;
-import akka.actor.ActorSystem;
-import akka.kafka.*;
-import akka.kafka.javadsl.Consumer;
-import akka.kafka.javadsl.Producer;
+import org.apache.pekko.Done;
+import org.apache.pekko.actor.ActorSystem;
+import org.apache.pekko.kafka.*;
+import org.apache.pekko.kafka.javadsl.Consumer;
+import org.apache.pekko.kafka.javadsl.Producer;
 // #testkit
-import akka.kafka.testkit.javadsl.TestcontainersKafkaTest;
+import org.apache.pekko.kafka.testkit.TestcontainersKafkaTest;
 // #testkit
-import akka.kafka.tests.javadsl.LogCapturingExtension;
-import akka.stream.javadsl.Sink;
-import akka.stream.javadsl.Source;
+import org.apache.pekko.kafka.tests.javadsl.LogCapturingExtension;
+import org.apache.pekko.stream.javadsl.Sink;
+import org.apache.pekko.stream.javadsl.Source;
 // #testkit
-import akka.testkit.javadsl.TestKit;
+import org.apache.pekko.testkit.javadsl.TestKit;
 // #testkit
 import com.typesafe.config.Config;
 import org.apache.kafka.clients.consumer.ConsumerRecord;
diff --git a/tests/src/test/java/docs/javadsl/SchemaRegistrySerializationTest.java b/tests/src/test/java/docs/javadsl/SchemaRegistrySerializationTest.java
index 542939fd..e2953977 100644
--- a/tests/src/test/java/docs/javadsl/SchemaRegistrySerializationTest.java
+++ b/tests/src/test/java/docs/javadsl/SchemaRegistrySerializationTest.java
@@ -5,18 +5,18 @@
 
 package docs.javadsl;
 
-import akka.Done;
-import akka.actor.ActorSystem;
-import akka.kafka.ConsumerSettings;
-import akka.kafka.ProducerSettings;
-import akka.kafka.Subscriptions;
-import akka.kafka.javadsl.Consumer;
-import akka.kafka.javadsl.Producer;
-import akka.kafka.testkit.KafkaTestkitTestcontainersSettings;
-import akka.kafka.testkit.javadsl.TestcontainersKafkaJunit4Test;
-import akka.stream.javadsl.Sink;
-import akka.stream.javadsl.Source;
-import akka.testkit.javadsl.TestKit;
+import org.apache.pekko.Done;
+import org.apache.pekko.actor.ActorSystem;
+import org.apache.pekko.kafka.ConsumerSettings;
+import org.apache.pekko.kafka.ProducerSettings;
+import org.apache.pekko.kafka.Subscriptions;
+import org.apache.pekko.kafka.javadsl.Consumer;
+import org.apache.pekko.kafka.javadsl.Producer;
+import org.apache.pekko.kafka.testkit.KafkaTestkitTestcontainersSettings;
+import org.apache.pekko.kafka.testkit.TestcontainersKafkaJunit4Test;
+import org.apache.pekko.stream.javadsl.Sink;
+import org.apache.pekko.stream.javadsl.Source;
+import org.apache.pekko.testkit.javadsl.TestKit;
 import docs.scaladsl.SampleAvroClass;
 // #imports
 import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
diff --git a/tests/src/test/java/docs/javadsl/SendProducerTest.java b/tests/src/test/java/docs/javadsl/SendProducerTest.java
index ccfca7ec..823594b7 100644
--- a/tests/src/test/java/docs/javadsl/SendProducerTest.java
+++ b/tests/src/test/java/docs/javadsl/SendProducerTest.java
@@ -5,13 +5,13 @@
 
 package docs.javadsl;
 
-import akka.actor.ActorSystem;
-import akka.kafka.ProducerMessage;
-import akka.kafka.ProducerSettings;
-import akka.kafka.javadsl.SendProducer;
-import akka.kafka.testkit.javadsl.TestcontainersKafkaTest;
-import akka.kafka.tests.javadsl.LogCapturingExtension;
-import akka.testkit.javadsl.TestKit;
+import org.apache.pekko.actor.ActorSystem;
+import org.apache.pekko.kafka.ProducerMessage;
+import org.apache.pekko.kafka.ProducerSettings;
+import org.apache.pekko.kafka.javadsl.SendProducer;
+import org.apache.pekko.kafka.testkit.TestcontainersKafkaTest;
+import org.apache.pekko.kafka.tests.javadsl.LogCapturingExtension;
+import org.apache.pekko.testkit.javadsl.TestKit;
 import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.apache.kafka.clients.producer.ProducerRecord;
 import org.apache.kafka.clients.producer.RecordMetadata;
diff --git a/tests/src/test/java/docs/javadsl/SerializationTest.java b/tests/src/test/java/docs/javadsl/SerializationTest.java
index c8c580e5..57de7c48 100644
--- a/tests/src/test/java/docs/javadsl/SerializationTest.java
+++ b/tests/src/test/java/docs/javadsl/SerializationTest.java
@@ -5,19 +5,19 @@
 
 package docs.javadsl;
 
-import akka.Done;
-import akka.actor.ActorSystem;
-import akka.kafka.ConsumerSettings;
-import akka.kafka.ProducerSettings;
-import akka.kafka.Subscriptions;
-import akka.kafka.javadsl.Consumer;
-import akka.kafka.javadsl.Producer;
-import akka.kafka.testkit.javadsl.TestcontainersKafkaTest;
-import akka.kafka.tests.javadsl.LogCapturingExtension;
-import akka.stream.*;
-import akka.stream.javadsl.Sink;
-import akka.stream.javadsl.Source;
-import akka.testkit.javadsl.TestKit;
+import org.apache.pekko.Done;
+import org.apache.pekko.actor.ActorSystem;
+import org.apache.pekko.kafka.ConsumerSettings;
+import org.apache.pekko.kafka.ProducerSettings;
+import org.apache.pekko.kafka.Subscriptions;
+import org.apache.pekko.kafka.javadsl.Consumer;
+import org.apache.pekko.kafka.javadsl.Producer;
+import org.apache.pekko.kafka.testkit.TestcontainersKafkaTest;
+import org.apache.pekko.kafka.tests.javadsl.LogCapturingExtension;
+import org.apache.pekko.stream.*;
+import org.apache.pekko.stream.javadsl.Sink;
+import org.apache.pekko.stream.javadsl.Source;
+import org.apache.pekko.testkit.javadsl.TestKit;
 // #jackson-imports
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
diff --git a/tests/src/test/java/docs/javadsl/TestkitSamplesTest.java b/tests/src/test/java/docs/javadsl/TestkitSamplesTest.java
index a8dc9d47..0833b1b1 100644
--- a/tests/src/test/java/docs/javadsl/TestkitSamplesTest.java
+++ b/tests/src/test/java/docs/javadsl/TestkitSamplesTest.java
@@ -5,20 +5,20 @@
 
 package docs.javadsl;
 
-import akka.Done;
-import akka.NotUsed;
-import akka.actor.ActorSystem;
-import akka.japi.Pair;
-import akka.kafka.CommitterSettings;
-import akka.kafka.ConsumerMessage;
-import akka.kafka.ProducerMessage;
-import akka.kafka.javadsl.Committer;
-import akka.kafka.javadsl.Consumer;
-import akka.kafka.tests.javadsl.LogCapturingJunit4;
-import akka.stream.javadsl.Flow;
-import akka.stream.javadsl.Keep;
-import akka.stream.javadsl.Source;
-import akka.testkit.javadsl.TestKit;
+import org.apache.pekko.Done;
+import org.apache.pekko.NotUsed;
+import org.apache.pekko.actor.ActorSystem;
+import org.apache.pekko.japi.Pair;
+import org.apache.pekko.kafka.CommitterSettings;
+import org.apache.pekko.kafka.ConsumerMessage;
+import org.apache.pekko.kafka.ProducerMessage;
+import org.apache.pekko.kafka.javadsl.Committer;
+import org.apache.pekko.kafka.javadsl.Consumer;
+import org.apache.pekko.kafka.tests.javadsl.LogCapturingJunit4;
+import org.apache.pekko.stream.javadsl.Flow;
+import org.apache.pekko.stream.javadsl.Keep;
+import org.apache.pekko.stream.javadsl.Source;
+import org.apache.pekko.testkit.javadsl.TestKit;
 import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.apache.kafka.clients.producer.ProducerRecord;
 import org.junit.AfterClass;
@@ -29,9 +29,9 @@ import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.CoreMatchers.*;
 
 // #factories
-import akka.kafka.testkit.ConsumerResultFactory;
-import akka.kafka.testkit.ProducerResultFactory;
-import akka.kafka.testkit.javadsl.ConsumerControlFactory;
+import org.apache.pekko.kafka.testkit.ConsumerResultFactory;
+import org.apache.pekko.kafka.testkit.ProducerResultFactory;
+import org.apache.pekko.kafka.testkit.javadsl.ConsumerControlFactory;
 // #factories
 
 import java.util.Arrays;
diff --git a/tests/src/test/java/docs/javadsl/TestkitTestcontainersTest.java b/tests/src/test/java/docs/javadsl/TestkitTestcontainersTest.java
index 3b4ec9ab..dc1927e0 100644
--- a/tests/src/test/java/docs/javadsl/TestkitTestcontainersTest.java
+++ b/tests/src/test/java/docs/javadsl/TestkitTestcontainersTest.java
@@ -6,9 +6,9 @@
 package docs.javadsl;
 
 // #testcontainers-settings
-import akka.actor.ActorSystem;
-import akka.kafka.testkit.KafkaTestkitTestcontainersSettings;
-import akka.kafka.testkit.javadsl.TestcontainersKafkaTest;
+import org.apache.pekko.actor.ActorSystem;
+import org.apache.pekko.kafka.testkit.KafkaTestkitTestcontainersSettings;
+import org.apache.pekko.kafka.testkit.TestcontainersKafkaTest;
 import org.junit.jupiter.api.AfterAll;
 import org.junit.jupiter.api.TestInstance;
 
diff --git a/tests/src/test/java/docs/javadsl/TransactionsExampleTest.java b/tests/src/test/java/docs/javadsl/TransactionsExampleTest.java
index 08d7df87..06e0f979 100644
--- a/tests/src/test/java/docs/javadsl/TransactionsExampleTest.java
+++ b/tests/src/test/java/docs/javadsl/TransactionsExampleTest.java
@@ -5,17 +5,17 @@
 
 package docs.javadsl;
 
-import akka.Done;
-import akka.NotUsed;
-import akka.actor.ActorSystem;
-import akka.kafka.*;
-import akka.kafka.javadsl.Consumer;
-import akka.kafka.javadsl.Transactional;
-import akka.kafka.testkit.javadsl.TestcontainersKafkaJunit4Test;
-import akka.kafka.tests.javadsl.LogCapturingJunit4;
-import akka.stream.RestartSettings;
-import akka.stream.javadsl.*;
-import akka.testkit.javadsl.TestKit;
+import org.apache.pekko.Done;
+import org.apache.pekko.NotUsed;
+import org.apache.pekko.actor.ActorSystem;
+import org.apache.pekko.kafka.*;
+import org.apache.pekko.kafka.javadsl.Consumer;
+import org.apache.pekko.kafka.javadsl.Transactional;
+import org.apache.pekko.kafka.testkit.TestcontainersKafkaJunit4Test;
+import org.apache.pekko.kafka.tests.javadsl.LogCapturingJunit4;
+import org.apache.pekko.stream.RestartSettings;
+import org.apache.pekko.stream.javadsl.*;
+import org.apache.pekko.testkit.javadsl.TestKit;
 import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.apache.kafka.clients.producer.ProducerRecord;
 import org.junit.AfterClass;
diff --git a/tests/src/test/resources/application.conf b/tests/src/test/resources/application.conf
index fb15a0f0..7bfceefc 100644
--- a/tests/src/test/resources/application.conf
+++ b/tests/src/test/resources/application.conf
@@ -1,7 +1,7 @@
-akka {
-  loggers = ["akka.event.slf4j.Slf4jLogger"]
+pekko {
+  loggers = ["org.apache.pekko.event.slf4j.Slf4jLogger"]
   loglevel = "DEBUG"
-  logging-filter = "akka.event.slf4j.Slf4jLoggingFilter"
+  logging-filter = "org.apache.pekko.event.slf4j.Slf4jLoggingFilter"
   logger-startup-timeout = 15s
 
   actor {
@@ -14,9 +14,18 @@ akka {
     timefactor = ${?AKKA_TEST_TIMEFACTOR}
     single-expect-default = 10s
   }
+}
 
-  kafka.consumer {
-    stop-timeout = 10ms
+akka {
+  kafka {
+    consumer {
+      stop-timeout = 10ms
+    }
+    testkit.testcontainers {
+      # enabled for all tests because the cluster is only started once per test run
+      use-schema-registry = true
+      container-logging = true
+    }
   }
 }
 
@@ -30,9 +39,3 @@ our-kafka-consumer: ${akka.kafka.consumer} {
   }
 }
 # #consumer-config-inheritance
-
-akka.kafka.testkit.testcontainers {
-  # enabled for all tests because the cluster is only started once per test run
-  use-schema-registry = true
-  container-logging = true
-}
diff --git a/tests/src/test/resources/logback-test.xml b/tests/src/test/resources/logback-test.xml
index eaced1d3..87d58a10 100644
--- a/tests/src/test/resources/logback-test.xml
+++ b/tests/src/test/resources/logback-test.xml
@@ -13,15 +13,15 @@
         </encoder>
     </appender>
 
-    <appender name="CapturingAppender" class="akka.kafka.tests.CapturingAppender"/>
+    <appender name="CapturingAppender" class="org.apache.pekko.kafka.tests.CapturingAppender"/>
 
-    <logger name="akka.kafka.tests.CapturingAppenderDelegate">
+    <logger name="org.apache.pekko.kafka.tests.CapturingAppenderDelegate">
         <appender-ref ref="STDOUT"/>
     </logger>
 
-    <logger name="akka" level="DEBUG"/>
-    <logger name="akka.actor.TimerScheduler" level="INFO"/>
-    <logger name="akka.kafka" level="DEBUG"/>
+    <logger name="org.apache.pekko" level="DEBUG"/>
+    <logger name="org.apache.pekko.actor.TimerScheduler" level="INFO"/>
+    <logger name="org.apache.pekko.kafka" level="DEBUG"/>
 
     <logger name="org.apache.zookeeper" level="WARN"/>
     <logger name="org.I0Itec.zkclient" level="WARN"/>
diff --git a/tests/src/test/scala/docs/scaladsl/AssignmentSpec.scala b/tests/src/test/scala/docs/scaladsl/AssignmentSpec.scala
index 05c53587..c49972f8 100644
--- a/tests/src/test/scala/docs/scaladsl/AssignmentSpec.scala
+++ b/tests/src/test/scala/docs/scaladsl/AssignmentSpec.scala
@@ -5,12 +5,12 @@
 
 package docs.scaladsl
 
-import akka.Done
-import akka.kafka.Subscriptions
-import akka.kafka.scaladsl.{ Consumer, Producer, SpecBase }
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaLike
-import akka.stream.scaladsl.{ Sink, Source }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.Done
+import org.apache.pekko.kafka.Subscriptions
+import org.apache.pekko.kafka.scaladsl.{ Consumer, Producer, SpecBase }
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike
+import org.apache.pekko.stream.scaladsl.{ Sink, Source }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
 import org.apache.kafka.clients.producer.ProducerRecord
 import org.apache.kafka.common.TopicPartition
 
diff --git a/tests/src/test/scala/docs/scaladsl/AtLeastOnce.scala b/tests/src/test/scala/docs/scaladsl/AtLeastOnce.scala
index a1838bc8..7d9ef129 100644
--- a/tests/src/test/scala/docs/scaladsl/AtLeastOnce.scala
+++ b/tests/src/test/scala/docs/scaladsl/AtLeastOnce.scala
@@ -6,15 +6,15 @@
 package docs.scaladsl
 
 // #oneToMany
-import akka.{ Done, NotUsed }
-import akka.kafka.ConsumerMessage.{ CommittableOffset, CommittableOffsetBatch }
-import akka.kafka.ProducerMessage.Envelope
-import akka.kafka.scaladsl.Consumer.DrainingControl
-import akka.kafka.{ ProducerMessage, Subscriptions }
-import akka.kafka.scaladsl.{ Committer, Consumer, Producer }
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaLike
-import akka.stream.scaladsl.{ Keep, Sink }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.{ Done, NotUsed }
+import org.apache.pekko.kafka.ConsumerMessage.{ CommittableOffset, CommittableOffsetBatch }
+import org.apache.pekko.kafka.ProducerMessage.Envelope
+import org.apache.pekko.kafka.scaladsl.Consumer.DrainingControl
+import org.apache.pekko.kafka.{ ProducerMessage, Subscriptions }
+import org.apache.pekko.kafka.scaladsl.{ Committer, Consumer, Producer }
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike
+import org.apache.pekko.stream.scaladsl.{ Keep, Sink }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
 import org.apache.kafka.clients.producer.ProducerRecord
 
 import scala.collection.immutable
diff --git a/tests/src/test/scala/docs/scaladsl/ClusterShardingExample.scala b/tests/src/test/scala/docs/scaladsl/ClusterShardingExample.scala
index 0bff4189..1a0ed5db 100644
--- a/tests/src/test/scala/docs/scaladsl/ClusterShardingExample.scala
+++ b/tests/src/test/scala/docs/scaladsl/ClusterShardingExample.scala
@@ -5,17 +5,17 @@
 
 package docs.scaladsl
 
-import akka.NotUsed
-import akka.actor.typed.scaladsl.Behaviors
-import akka.actor.typed.scaladsl.adapter._
-import akka.actor.typed.{ ActorSystem, Behavior }
-import akka.cluster.sharding.external.ExternalShardAllocationStrategy
-import akka.cluster.sharding.typed.ClusterShardingSettings
-import akka.cluster.sharding.typed.scaladsl.{ ClusterSharding, Entity, EntityTypeKey }
-import akka.kafka.cluster.sharding.KafkaClusterSharding
-import akka.kafka.scaladsl.Consumer
-import akka.kafka.{ ConsumerRebalanceEvent, ConsumerSettings, Subscriptions }
-import akka.stream.scaladsl.{ Flow, Sink }
+import org.apache.pekko.NotUsed
+import org.apache.pekko.actor.typed.scaladsl.Behaviors
+import org.apache.pekko.actor.typed.scaladsl.adapter._
+import org.apache.pekko.actor.typed.{ ActorSystem, Behavior }
+import org.apache.pekko.cluster.sharding.external.ExternalShardAllocationStrategy
+import org.apache.pekko.cluster.sharding.typed.ClusterShardingSettings
+import org.apache.pekko.cluster.sharding.typed.scaladsl.{ ClusterSharding, Entity, EntityTypeKey }
+import org.apache.pekko.kafka.cluster.sharding.KafkaClusterSharding
+import org.apache.pekko.kafka.scaladsl.Consumer
+import org.apache.pekko.kafka.{ ConsumerRebalanceEvent, ConsumerSettings, Subscriptions }
+import org.apache.pekko.stream.scaladsl.{ Flow, Sink }
 import org.apache.kafka.common.serialization.{ ByteArrayDeserializer, StringDeserializer }
 
 import scala.concurrent.Future
@@ -70,12 +70,12 @@ object ClusterShardingExample {
 
   // #rebalance-listener
   // obtain an Akka classic ActorRef that will handle consumer group rebalance events
-  val rebalanceListener: akka.actor.typed.ActorRef[ConsumerRebalanceEvent] =
+  val rebalanceListener: org.apache.pekko.actor.typed.ActorRef[ConsumerRebalanceEvent] =
     KafkaClusterSharding(system.toClassic).rebalanceListener(typeKey)
 
   // convert the rebalance listener to a classic ActorRef until Alpakka Kafka supports Akka Typed
-  import akka.actor.typed.scaladsl.adapter._
-  val rebalanceListenerClassic: akka.actor.ActorRef = rebalanceListener.toClassic
+  import org.apache.pekko.actor.typed.scaladsl.adapter._
+  val rebalanceListenerClassic: org.apache.pekko.actor.ActorRef = rebalanceListener.toClassic
 
   val consumerSettings =
     ConsumerSettings(system.toClassic, new StringDeserializer, new ByteArrayDeserializer)
diff --git a/tests/src/test/scala/docs/scaladsl/ConsumerExample.scala b/tests/src/test/scala/docs/scaladsl/ConsumerExample.scala
index 056207f1..a22abd85 100644
--- a/tests/src/test/scala/docs/scaladsl/ConsumerExample.scala
+++ b/tests/src/test/scala/docs/scaladsl/ConsumerExample.scala
@@ -7,17 +7,17 @@ package docs.scaladsl
 
 import java.util.concurrent.atomic.{ AtomicLong, AtomicReference }
 
-import akka.Done
-import akka.actor.typed.Behavior
-import akka.actor.typed.scaladsl.Behaviors
-import akka.actor.{ Actor, ActorLogging, Props }
-import akka.kafka._
-import akka.kafka.scaladsl.Consumer.DrainingControl
-import akka.kafka.scaladsl._
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaLike
-import akka.stream.RestartSettings
-import akka.stream.scaladsl.{ Keep, RestartSource, Sink }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.Done
+import org.apache.pekko.actor.typed.Behavior
+import org.apache.pekko.actor.typed.scaladsl.Behaviors
+import org.apache.pekko.actor.{ Actor, ActorLogging, Props }
+import org.apache.pekko.kafka._
+import org.apache.pekko.kafka.scaladsl.Consumer.DrainingControl
+import org.apache.pekko.kafka.scaladsl._
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike
+import org.apache.pekko.stream.RestartSettings
+import org.apache.pekko.stream.scaladsl.{ Keep, RestartSource, Sink }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
 import org.apache.kafka.clients.consumer.{ ConsumerConfig, ConsumerRecord }
 import org.apache.kafka.clients.producer.ProducerRecord
 import org.apache.kafka.common.TopicPartition
@@ -336,7 +336,7 @@ class ConsumerExample extends DocsSpecBase with TestcontainersKafkaLike {
     val revokedPromise = Promise[Done]()
     // format: off
     //#withRebalanceListenerActor
-    import akka.kafka.{TopicPartitionsAssigned, TopicPartitionsRevoked}
+    import org.apache.pekko.kafka.{TopicPartitionsAssigned, TopicPartitionsRevoked}
 
     class RebalanceListener extends Actor with ActorLogging {
       def receive: Receive = {
@@ -385,7 +385,7 @@ class ConsumerExample extends DocsSpecBase with TestcontainersKafkaLike {
 
     // format: off
     //#withTypedRebalanceListenerActor
-    import akka.kafka.{TopicPartitionsAssigned, TopicPartitionsRevoked}
+    import org.apache.pekko.kafka.{TopicPartitionsAssigned, TopicPartitionsRevoked}
     
     def rebalanceListener(): Behavior[ConsumerRebalanceEvent] = Behaviors.receive {
       case (context, TopicPartitionsAssigned(subscription, topicPartitions)) =>
@@ -407,13 +407,13 @@ class ConsumerExample extends DocsSpecBase with TestcontainersKafkaLike {
     val guardian = Behaviors.setup[Nothing] { context =>
     //#withTypedRebalanceListenerActor
     
-    val typedRef: akka.actor.typed.ActorRef[ConsumerRebalanceEvent] =
+    val typedRef: org.apache.pekko.actor.typed.ActorRef[ConsumerRebalanceEvent] =
       context.spawn(rebalanceListener(), "rebalance-listener")
 
     // adds support for actors to a classic actor system and context
-    import akka.actor.typed.scaladsl.adapter._
+    import org.apache.pekko.actor.typed.scaladsl.adapter._
       
-    val classicRef: akka.actor.ActorRef = typedRef.toClassic  
+    val classicRef: org.apache.pekko.actor.ActorRef = typedRef.toClassic  
 
     val subscription = Subscriptions
       .topics(topic)
@@ -440,7 +440,7 @@ class ConsumerExample extends DocsSpecBase with TestcontainersKafkaLike {
     }
 
     // fixme: get typed system from existing `system`
-    val typed = akka.actor.typed.ActorSystem[Nothing](guardian, "typed-rebalance-listener-example")
+    val typed = org.apache.pekko.actor.typed.ActorSystem[Nothing](guardian, "typed-rebalance-listener-example")
     typed.whenTerminated.futureValue shouldBe Done
   }
 
diff --git a/tests/src/test/scala/docs/scaladsl/DocsSpecBase.scala b/tests/src/test/scala/docs/scaladsl/DocsSpecBase.scala
index a5add9b1..6a0597ab 100644
--- a/tests/src/test/scala/docs/scaladsl/DocsSpecBase.scala
+++ b/tests/src/test/scala/docs/scaladsl/DocsSpecBase.scala
@@ -5,10 +5,10 @@
 
 package docs.scaladsl
 
-import akka.NotUsed
-import akka.kafka.testkit.scaladsl.KafkaSpec
-import akka.kafka.testkit.internal.TestFrameworkInterface
-import akka.stream.scaladsl.Flow
+import org.apache.pekko.NotUsed
+import org.apache.pekko.kafka.testkit.scaladsl.KafkaSpec
+import org.apache.pekko.kafka.testkit.internal.TestFrameworkInterface
+import org.apache.pekko.stream.scaladsl.Flow
 import org.scalatest.Suite
 import org.scalatest.concurrent.{ Eventually, IntegrationPatience, ScalaFutures }
 import org.scalatest.flatspec.AnyFlatSpecLike
diff --git a/tests/src/test/scala/docs/scaladsl/FetchMetadata.scala b/tests/src/test/scala/docs/scaladsl/FetchMetadata.scala
index eedca8c7..fe52b5de 100644
--- a/tests/src/test/scala/docs/scaladsl/FetchMetadata.scala
+++ b/tests/src/test/scala/docs/scaladsl/FetchMetadata.scala
@@ -5,17 +5,17 @@
 
 package docs.scaladsl
 
-import akka.kafka.scaladsl.MetadataClient
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaLike
+import org.apache.pekko.kafka.scaladsl.MetadataClient
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike
 import org.scalatest.TryValues
 import org.scalatest.time.{ Seconds, Span }
 
 // #metadata
 // #metadataClient
-import akka.actor.ActorRef
-import akka.kafka.{ KafkaConsumerActor, Metadata }
-import akka.pattern.ask
-import akka.util.Timeout
+import org.apache.pekko.actor.ActorRef
+import org.apache.pekko.kafka.{ KafkaConsumerActor, Metadata }
+import org.apache.pekko.pattern.ask
+import org.apache.pekko.util.Timeout
 import org.apache.kafka.common.TopicPartition
 
 import scala.concurrent.Future
diff --git a/tests/src/test/scala/docs/scaladsl/PartitionExamples.scala b/tests/src/test/scala/docs/scaladsl/PartitionExamples.scala
index c009d27d..9b7e5225 100644
--- a/tests/src/test/scala/docs/scaladsl/PartitionExamples.scala
+++ b/tests/src/test/scala/docs/scaladsl/PartitionExamples.scala
@@ -5,14 +5,14 @@
 
 package docs.scaladsl
 
-import akka.actor.ActorRef
-import akka.actor.typed.scaladsl.Behaviors
-import akka.kafka.scaladsl.Consumer
-import akka.kafka.testkit.KafkaTestkitTestcontainersSettings
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike
-import akka.kafka.{ KafkaConsumerActor, Subscriptions }
-import akka.stream.scaladsl.{ Keep, Sink }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.actor.ActorRef
+import org.apache.pekko.actor.typed.scaladsl.Behaviors
+import org.apache.pekko.kafka.scaladsl.Consumer
+import org.apache.pekko.kafka.testkit.KafkaTestkitTestcontainersSettings
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike
+import org.apache.pekko.kafka.{ KafkaConsumerActor, Subscriptions }
+import org.apache.pekko.stream.scaladsl.{ Keep, Sink }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
 import org.apache.kafka.common.{ Metric, MetricName, TopicPartition }
 
 import scala.annotation.nowarn
@@ -86,7 +86,7 @@ class PartitionExamples extends DocsSpecBase with TestcontainersKafkaPerClassLik
     val _ = Behaviors.setup[Nothing] { context =>
       // #consumerActorTyped
       // adds support for actors to a classic actor system and context
-      import akka.actor.typed.scaladsl.adapter._
+      import org.apache.pekko.actor.typed.scaladsl.adapter._
 
       // Consumer is represented by actor
       // #consumerActorTyped
diff --git a/tests/src/test/scala/docs/scaladsl/ProducerExample.scala b/tests/src/test/scala/docs/scaladsl/ProducerExample.scala
index cd797830..5899fbbc 100644
--- a/tests/src/test/scala/docs/scaladsl/ProducerExample.scala
+++ b/tests/src/test/scala/docs/scaladsl/ProducerExample.scala
@@ -5,13 +5,13 @@
 
 package docs.scaladsl
 
-import akka.Done
-import akka.kafka.ProducerMessage.MultiResultPart
-import akka.kafka.scaladsl.{ Consumer, Producer }
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaLike
-import akka.kafka.{ ProducerMessage, ProducerSettings, Subscriptions }
-import akka.stream.scaladsl.{ Keep, Sink, Source }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.Done
+import org.apache.pekko.kafka.ProducerMessage.MultiResultPart
+import org.apache.pekko.kafka.scaladsl.{ Consumer, Producer }
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike
+import org.apache.pekko.kafka.{ ProducerMessage, ProducerSettings, Subscriptions }
+import org.apache.pekko.stream.scaladsl.{ Keep, Sink, Source }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
 import org.apache.kafka.clients.producer.ProducerRecord
 import org.apache.kafka.common.serialization.StringSerializer
 
diff --git a/tests/src/test/scala/docs/scaladsl/SchemaRegistrySerializationSpec.scala b/tests/src/test/scala/docs/scaladsl/SchemaRegistrySerializationSpec.scala
index 2f708884..307562f1 100644
--- a/tests/src/test/scala/docs/scaladsl/SchemaRegistrySerializationSpec.scala
+++ b/tests/src/test/scala/docs/scaladsl/SchemaRegistrySerializationSpec.scala
@@ -7,14 +7,14 @@ package docs.scaladsl
 
 import java.nio.charset.StandardCharsets
 
-import akka.Done
-import akka.kafka._
-import akka.kafka.scaladsl._
-import akka.kafka.testkit.KafkaTestkitTestcontainersSettings
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike
-import akka.stream.scaladsl.{ Keep, Sink, Source }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
-import akka.stream.testkit.scaladsl.TestSink
+import org.apache.pekko.Done
+import org.apache.pekko.kafka._
+import org.apache.pekko.kafka.scaladsl._
+import org.apache.pekko.kafka.testkit.KafkaTestkitTestcontainersSettings
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike
+import org.apache.pekko.stream.scaladsl.{ Keep, Sink, Source }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.stream.testkit.scaladsl.TestSink
 import io.confluent.kafka.serializers.KafkaAvroDeserializerConfig
 import org.apache.avro.specific.SpecificRecordBase
 import org.apache.avro.util.Utf8
diff --git a/tests/src/test/scala/docs/scaladsl/SendProducerSpec.scala b/tests/src/test/scala/docs/scaladsl/SendProducerSpec.scala
index 1c544760..d7b7730c 100644
--- a/tests/src/test/scala/docs/scaladsl/SendProducerSpec.scala
+++ b/tests/src/test/scala/docs/scaladsl/SendProducerSpec.scala
@@ -5,12 +5,12 @@
 
 package docs.scaladsl
 
-import akka.Done
-import akka.kafka.ProducerMessage.MultiResult
-import akka.kafka.scaladsl.{ Consumer, SendProducer }
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaLike
-import akka.kafka.{ ConsumerSettings, ProducerMessage, Subscriptions }
-import akka.stream.scaladsl.{ Keep, Sink }
+import org.apache.pekko.Done
+import org.apache.pekko.kafka.ProducerMessage.MultiResult
+import org.apache.pekko.kafka.scaladsl.{ Consumer, SendProducer }
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike
+import org.apache.pekko.kafka.{ ConsumerSettings, ProducerMessage, Subscriptions }
+import org.apache.pekko.stream.scaladsl.{ Keep, Sink }
 import org.apache.kafka.clients.producer.{ ProducerRecord, RecordMetadata }
 
 import scala.collection.immutable
diff --git a/tests/src/test/scala/docs/scaladsl/SerializationSpec.scala b/tests/src/test/scala/docs/scaladsl/SerializationSpec.scala
index 11592800..7d2055fb 100644
--- a/tests/src/test/scala/docs/scaladsl/SerializationSpec.scala
+++ b/tests/src/test/scala/docs/scaladsl/SerializationSpec.scala
@@ -5,14 +5,14 @@
 
 package docs.scaladsl
 
-import akka.Done
-import akka.kafka._
-import akka.kafka.scaladsl.Consumer.DrainingControl
-import akka.kafka.scaladsl._
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaLike
-import akka.stream.{ ActorAttributes, Supervision }
-import akka.stream.scaladsl.{ Sink, Source }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.Done
+import org.apache.pekko.kafka._
+import org.apache.pekko.kafka.scaladsl.Consumer.DrainingControl
+import org.apache.pekko.kafka.scaladsl._
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike
+import org.apache.pekko.stream.{ ActorAttributes, Supervision }
+import org.apache.pekko.stream.scaladsl.{ Sink, Source }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
 import org.apache.kafka.clients.producer.ProducerRecord
 import org.apache.kafka.common.serialization._
 
diff --git a/tests/src/test/scala/docs/scaladsl/TestkitSamplesSpec.scala b/tests/src/test/scala/docs/scaladsl/TestkitSamplesSpec.scala
index 951a816f..f39d9503 100644
--- a/tests/src/test/scala/docs/scaladsl/TestkitSamplesSpec.scala
+++ b/tests/src/test/scala/docs/scaladsl/TestkitSamplesSpec.scala
@@ -5,14 +5,14 @@
 
 package docs.scaladsl
 
-import akka.actor.ActorSystem
-import akka.kafka.ConsumerMessage.CommittableOffset
-import akka.kafka.scaladsl.{ Committer, Consumer }
-import akka.kafka.{ CommitterSettings, ConsumerMessage, ProducerMessage }
-import akka.stream.scaladsl.{ Flow, Keep, Source }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
-import akka.testkit.TestKit
-import akka.{ Done, NotUsed }
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.kafka.ConsumerMessage.CommittableOffset
+import org.apache.pekko.kafka.scaladsl.{ Committer, Consumer }
+import org.apache.pekko.kafka.{ CommitterSettings, ConsumerMessage, ProducerMessage }
+import org.apache.pekko.stream.scaladsl.{ Flow, Keep, Source }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.testkit.TestKit
+import org.apache.pekko.{ Done, NotUsed }
 import org.apache.kafka.clients.consumer.ConsumerRecord
 import org.apache.kafka.clients.producer.ProducerRecord
 import org.scalatest.BeforeAndAfterAll
@@ -39,8 +39,8 @@ class TestkitSamplesSpec
     val committerSettings = CommitterSettings(system)
 
     // #factories
-    import akka.kafka.testkit.scaladsl.ConsumerControlFactory
-    import akka.kafka.testkit.{ ConsumerResultFactory, ProducerResultFactory }
+    import org.apache.pekko.kafka.testkit.scaladsl.ConsumerControlFactory
+    import org.apache.pekko.kafka.testkit.{ ConsumerResultFactory, ProducerResultFactory }
 
     // create elements emitted by the mocked Consumer
     val elements = (0 to 10).map { i =>
diff --git a/tests/src/test/scala/docs/scaladsl/TransactionsExample.scala b/tests/src/test/scala/docs/scaladsl/TransactionsExample.scala
index 25c65660..1c84f3e8 100644
--- a/tests/src/test/scala/docs/scaladsl/TransactionsExample.scala
+++ b/tests/src/test/scala/docs/scaladsl/TransactionsExample.scala
@@ -7,14 +7,21 @@ package docs.scaladsl
 
 import java.util.concurrent.atomic.AtomicReference
 
-import akka.Done
-import akka.kafka.scaladsl.Consumer.{ Control, DrainingControl }
-import akka.kafka.scaladsl.{ Consumer, Transactional }
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaLike
-import akka.kafka.{ ConsumerSettings, ProducerMessage, ProducerSettings, Repeated, Subscriptions, TransactionsOps }
-import akka.stream.RestartSettings
-import akka.stream.scaladsl.{ Keep, RestartSource, Sink }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.Done
+import org.apache.pekko.kafka.scaladsl.Consumer.{ Control, DrainingControl }
+import org.apache.pekko.kafka.scaladsl.{ Consumer, Transactional }
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike
+import org.apache.pekko.kafka.{
+  ConsumerSettings,
+  ProducerMessage,
+  ProducerSettings,
+  Repeated,
+  Subscriptions,
+  TransactionsOps
+}
+import org.apache.pekko.stream.RestartSettings
+import org.apache.pekko.stream.scaladsl.{ Keep, RestartSource, Sink }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
 import org.apache.kafka.clients.producer.ProducerRecord
 
 import scala.concurrent.Await
diff --git a/tests/src/test/scala/akka/kafka/ConfigSettingsSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/ConfigSettingsSpec.scala
similarity index 87%
rename from tests/src/test/scala/akka/kafka/ConfigSettingsSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/ConfigSettingsSpec.scala
index ae32f999..2209d29e 100644
--- a/tests/src/test/scala/akka/kafka/ConfigSettingsSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/ConfigSettingsSpec.scala
@@ -3,10 +3,10 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
-import akka.kafka.internal.ConfigSettings
-import akka.kafka.tests.scaladsl.LogCapturing
+import org.apache.pekko.kafka.internal.ConfigSettings
+import org.apache.pekko.kafka.tests.scaladsl.LogCapturing
 import com.typesafe.config.ConfigFactory
 import org.scalatest.matchers.should.Matchers
 import org.scalatest.wordspec.AnyWordSpec
diff --git a/tests/src/test/scala/akka/kafka/ConsumerSettingsSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/ConsumerSettingsSpec.scala
similarity index 96%
rename from tests/src/test/scala/akka/kafka/ConsumerSettingsSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/ConsumerSettingsSpec.scala
index 2000d918..f0f08c7c 100644
--- a/tests/src/test/scala/akka/kafka/ConsumerSettingsSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/ConsumerSettingsSpec.scala
@@ -3,11 +3,11 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
-import akka.actor.ActorSystem
-import akka.kafka.tests.scaladsl.LogCapturing
-import akka.testkit.TestKit
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.kafka.tests.scaladsl.LogCapturing
+import org.apache.pekko.testkit.TestKit
 import com.typesafe.config.ConfigFactory
 import org.apache.kafka.common.config.SslConfigs
 import org.apache.kafka.common.serialization.{ ByteArrayDeserializer, StringDeserializer }
@@ -207,7 +207,7 @@ class ConsumerSettingsSpec
       .resolve()
 
     "read bootstrap servers from config" in {
-      import akka.kafka.scaladsl.DiscoverySupport
+      import org.apache.pekko.kafka.scaladsl.DiscoverySupport
       implicit val actorSystem = ActorSystem("test", config)
 
       DiscoverySupport.bootstrapServers(config.getConfig("discovery-consumer")).futureValue shouldBe "cat:1233,dog:1234"
@@ -220,7 +220,7 @@ class ConsumerSettingsSpec
       implicit val executionContext: ExecutionContext = actorSystem.dispatcher
 
       // #discovery-settings
-      import akka.kafka.scaladsl.DiscoverySupport
+      import org.apache.pekko.kafka.scaladsl.DiscoverySupport
 
       val consumerConfig = config.getConfig("discovery-consumer")
       val settings = ConsumerSettings(consumerConfig, new StringDeserializer, new StringDeserializer)
@@ -250,8 +250,8 @@ object ConsumerSettingsSpec {
       }
       // #discovery-service
       // #discovery-with-config
-      akka.discovery.method = config
-      akka.discovery.config.services = {
+      pekko.discovery.method = config
+      pekko.discovery.config.services = {
         kafkaService1 = {
           endpoints = [
             { host = "cat", port = 1233 }
diff --git a/tests/src/test/scala/akka/kafka/ProducerSettingsSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/ProducerSettingsSpec.scala
similarity index 95%
rename from tests/src/test/scala/akka/kafka/ProducerSettingsSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/ProducerSettingsSpec.scala
index abf5b5e4..9da53069 100644
--- a/tests/src/test/scala/akka/kafka/ProducerSettingsSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/ProducerSettingsSpec.scala
@@ -3,11 +3,11 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
-import akka.actor.ActorSystem
-import akka.kafka.tests.scaladsl.LogCapturing
-import akka.testkit.TestKit
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.kafka.tests.scaladsl.LogCapturing
+import org.apache.pekko.testkit.TestKit
 import com.typesafe.config.ConfigFactory
 import org.apache.kafka.common.config.SslConfigs
 import org.apache.kafka.common.serialization.{ ByteArraySerializer, StringSerializer }
@@ -25,6 +25,7 @@ class ProducerSettingsSpec
 
   "ProducerSettings" must {
 
+    // TODO: Are we going to change configuration?
     "handle serializers defined in config" in {
       val conf = ConfigFactory
         .parseString(
@@ -193,7 +194,7 @@ class ProducerSettingsSpec
       implicit val actorSystem = ActorSystem("test", config)
 
       // #discovery-settings
-      import akka.kafka.scaladsl.DiscoverySupport
+      import org.apache.pekko.kafka.scaladsl.DiscoverySupport
 
       val producerConfig = config.getConfig("discovery-producer")
       val settings = ProducerSettings(producerConfig, new StringSerializer, new StringSerializer)
@@ -210,7 +211,7 @@ class ProducerSettingsSpec
     "fail if using non-async creation with enrichAsync" in {
       implicit val actorSystem = ActorSystem("test", config)
 
-      import akka.kafka.scaladsl.DiscoverySupport
+      import org.apache.pekko.kafka.scaladsl.DiscoverySupport
 
       val producerConfig = config.getConfig("discovery-producer")
       val settings = ProducerSettings(producerConfig, new StringSerializer, new StringSerializer)
@@ -235,8 +236,8 @@ object ProducerSettingsSpec {
           resolve-timeout = 10 ms
         }
         // #discovery-service
-        akka.discovery.method = config
-        akka.discovery.config.services = {
+        pekko.discovery.method = config
+        pekko.discovery.config.services = {
           kafkaService1 = {
             endpoints = [
               { host = "cat", port = 1233 }
diff --git a/tests/src/test/scala/akka/kafka/Repeated.scala b/tests/src/test/scala/org/apache/pekko/kafka/Repeated.scala
similarity index 97%
rename from tests/src/test/scala/akka/kafka/Repeated.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/Repeated.scala
index eac51cd0..d0fd04f8 100644
--- a/tests/src/test/scala/akka/kafka/Repeated.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/Repeated.scala
@@ -3,7 +3,7 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 
 import org.scalatest._
 
diff --git a/tests/src/test/scala/akka/kafka/TransactionsOps.scala b/tests/src/test/scala/org/apache/pekko/kafka/TransactionsOps.scala
similarity index 93%
rename from tests/src/test/scala/akka/kafka/TransactionsOps.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/TransactionsOps.scala
index d3b7b199..a9806de4 100644
--- a/tests/src/test/scala/akka/kafka/TransactionsOps.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/TransactionsOps.scala
@@ -3,19 +3,19 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka
+package org.apache.pekko.kafka
 import java.util.concurrent.atomic.AtomicInteger
 
-import akka.{ Done, NotUsed }
-import akka.actor.ActorSystem
-import akka.kafka.ConsumerMessage.PartitionOffset
-import akka.kafka.ProducerMessage.MultiMessage
-import akka.kafka.scaladsl.Consumer.Control
-import akka.kafka.scaladsl.{ Consumer, Producer, Transactional }
-import akka.stream.Materializer
-import akka.stream.scaladsl.{ Flow, Sink, Source }
-import akka.stream.testkit.TestSubscriber
-import akka.stream.testkit.scaladsl.TestSink
+import org.apache.pekko.{ Done, NotUsed }
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.kafka.ConsumerMessage.PartitionOffset
+import org.apache.pekko.kafka.ProducerMessage.MultiMessage
+import org.apache.pekko.kafka.scaladsl.Consumer.Control
+import org.apache.pekko.kafka.scaladsl.{ Consumer, Producer, Transactional }
+import org.apache.pekko.stream.Materializer
+import org.apache.pekko.stream.scaladsl.{ Flow, Sink, Source }
+import org.apache.pekko.stream.testkit.TestSubscriber
+import org.apache.pekko.stream.testkit.scaladsl.TestSink
 import org.apache.kafka.clients.consumer.ConsumerConfig
 import org.apache.kafka.clients.producer.{ ProducerConfig, ProducerRecord }
 import org.scalatest.TestSuite
diff --git a/tests/src/test/scala/akka/kafka/internal/CommitCollectorStageSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/internal/CommitCollectorStageSpec.scala
similarity index 95%
rename from tests/src/test/scala/akka/kafka/internal/CommitCollectorStageSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/internal/CommitCollectorStageSpec.scala
index 24c68561..6bbf8ddc 100644
--- a/tests/src/test/scala/akka/kafka/internal/CommitCollectorStageSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/internal/CommitCollectorStageSpec.scala
@@ -3,23 +3,23 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
 import java.util.concurrent.atomic.AtomicLong
-import akka.Done
-import akka.actor.ActorSystem
-import akka.event.LoggingAdapter
-import akka.kafka.ConsumerMessage.{ Committable, CommittableOffset, CommittableOffsetBatch }
-import akka.kafka.scaladsl.{ Committer, Consumer }
-import akka.kafka.testkit.ConsumerResultFactory
-import akka.kafka.testkit.scaladsl.{ ConsumerControlFactory, Slf4jToAkkaLoggingAdapter }
-import akka.kafka.tests.scaladsl.LogCapturing
-import akka.kafka.{ CommitWhen, CommitterSettings, Repeated }
-import akka.stream.scaladsl.Keep
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
-import akka.stream.testkit.scaladsl.{ TestSink, TestSource }
-import akka.stream.testkit.{ TestPublisher, TestSubscriber }
-import akka.testkit.TestKit
+import org.apache.pekko.Done
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.event.LoggingAdapter
+import org.apache.pekko.kafka.ConsumerMessage.{ Committable, CommittableOffset, CommittableOffsetBatch }
+import org.apache.pekko.kafka.scaladsl.{ Committer, Consumer }
+import org.apache.pekko.kafka.testkit.ConsumerResultFactory
+import org.apache.pekko.kafka.testkit.scaladsl.{ ConsumerControlFactory, Slf4jToAkkaLoggingAdapter }
+import org.apache.pekko.kafka.tests.scaladsl.LogCapturing
+import org.apache.pekko.kafka.{ CommitWhen, CommitterSettings, Repeated }
+import org.apache.pekko.stream.scaladsl.Keep
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.stream.testkit.scaladsl.{ TestSink, TestSource }
+import org.apache.pekko.stream.testkit.{ TestPublisher, TestSubscriber }
+import org.apache.pekko.testkit.TestKit
 import org.apache.kafka.clients.consumer.OffsetAndMetadata
 import org.apache.kafka.common.TopicPartition
 import org.scalatest.concurrent.{ Eventually, IntegrationPatience, ScalaFutures }
@@ -465,7 +465,7 @@ class CommitCollectorStageSpec(_system: ActorSystem)
       promisedCommit.future
     }
 
-    private[akka] val underlying =
+    private[pekko] val underlying =
       new KafkaAsyncConsumerCommitterRef(consumerActor = null, commitSettings.maxInterval)(system.dispatcher) {
 
         override def commitSingle(topicPartition: TopicPartition, offset: OffsetAndMetadata): Future[Done] = {
diff --git a/tests/src/test/scala/akka/kafka/internal/CommittingProducerSinkSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/internal/CommittingProducerSinkSpec.scala
similarity index 96%
rename from tests/src/test/scala/akka/kafka/internal/CommittingProducerSinkSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/internal/CommittingProducerSinkSpec.scala
index d611691f..c384430c 100644
--- a/tests/src/test/scala/akka/kafka/internal/CommittingProducerSinkSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/internal/CommittingProducerSinkSpec.scala
@@ -3,24 +3,24 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
 import java.util.concurrent.atomic.AtomicLong
 
-import akka.Done
-import akka.actor.ActorSystem
-import akka.event.LoggingAdapter
-import akka.kafka.internal.KafkaConsumerActor.Internal
-import akka.kafka.scaladsl.Consumer.DrainingControl
-import akka.kafka.scaladsl.Producer
-import akka.kafka.testkit.ConsumerResultFactory
-import akka.kafka.testkit.scaladsl.{ ConsumerControlFactory, Slf4jToAkkaLoggingAdapter }
-import akka.kafka.tests.scaladsl.LogCapturing
-import akka.kafka._
-import akka.stream.scaladsl.{ Keep, Source }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
-import akka.stream.{ ActorAttributes, Supervision }
-import akka.testkit.{ TestKit, TestProbe }
+import org.apache.pekko.Done
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.event.LoggingAdapter
+import org.apache.pekko.kafka.internal.KafkaConsumerActor.Internal
+import org.apache.pekko.kafka.scaladsl.Consumer.DrainingControl
+import org.apache.pekko.kafka.scaladsl.Producer
+import org.apache.pekko.kafka.testkit.ConsumerResultFactory
+import org.apache.pekko.kafka.testkit.scaladsl.{ ConsumerControlFactory, Slf4jToAkkaLoggingAdapter }
+import org.apache.pekko.kafka.tests.scaladsl.LogCapturing
+import org.apache.pekko.kafka._
+import org.apache.pekko.stream.scaladsl.{ Keep, Source }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.stream.{ ActorAttributes, Supervision }
+import org.apache.pekko.testkit.{ TestKit, TestProbe }
 import org.apache.kafka.clients.consumer.ConsumerRecord
 import org.apache.kafka.clients.producer._
 import org.apache.kafka.common.TopicPartition
@@ -460,7 +460,7 @@ class CommittingProducerSinkSpec(_system: ActorSystem)
     eventually {
       producer.history.asScala should have size 2
     }
-    control.drainAndShutdown().failed.futureValue shouldBe an[akka.kafka.CommitTimeoutException]
+    control.drainAndShutdown().failed.futureValue shouldBe an[org.apache.pekko.kafka.CommitTimeoutException]
   }
 
   it should "choose to ignore producer errors" in assertAllStagesStopped {
@@ -582,7 +582,7 @@ class CommittingProducerSinkSpec(_system: ActorSystem)
     eventually {
       producer.history.asScala should have size 2
     }
-    control.drainAndShutdown().failed.futureValue shouldBe an[akka.kafka.CommitTimeoutException]
+    control.drainAndShutdown().failed.futureValue shouldBe an[org.apache.pekko.kafka.CommitTimeoutException]
   }
 
   it should "ignore commit timeout" in assertAllStagesStopped {
diff --git a/tests/src/test/scala/akka/kafka/internal/CommittingWithMockSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/internal/CommittingWithMockSpec.scala
similarity index 96%
rename from tests/src/test/scala/akka/kafka/internal/CommittingWithMockSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/internal/CommittingWithMockSpec.scala
index 1a525b97..099c28c6 100644
--- a/tests/src/test/scala/akka/kafka/internal/CommittingWithMockSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/internal/CommittingWithMockSpec.scala
@@ -3,22 +3,22 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
 import java.util.concurrent.atomic.AtomicInteger
 
-import akka.Done
-import akka.actor.ActorSystem
-import akka.kafka.ConsumerMessage._
-import akka.kafka._
-import akka.kafka.scaladsl.Consumer.Control
-import akka.kafka.scaladsl.{ Committer, Consumer }
-import akka.kafka.tests.scaladsl.LogCapturing
-import akka.stream._
-import akka.stream.scaladsl._
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
-import akka.stream.testkit.scaladsl.TestSink
-import akka.testkit.TestKit
+import org.apache.pekko.Done
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.kafka.ConsumerMessage._
+import org.apache.pekko.kafka._
+import org.apache.pekko.kafka.scaladsl.Consumer.Control
+import org.apache.pekko.kafka.scaladsl.{ Committer, Consumer }
+import org.apache.pekko.kafka.tests.scaladsl.LogCapturing
+import org.apache.pekko.stream._
+import org.apache.pekko.stream.scaladsl._
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.stream.testkit.scaladsl.TestSink
+import org.apache.pekko.testkit.TestKit
 import com.typesafe.config.ConfigFactory
 import org.apache.kafka.clients.consumer._
 import org.apache.kafka.common.TopicPartition
diff --git a/tests/src/test/scala/akka/kafka/internal/ConnectionCheckerSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/internal/ConnectionCheckerSpec.scala
similarity index 88%
rename from tests/src/test/scala/akka/kafka/internal/ConnectionCheckerSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/internal/ConnectionCheckerSpec.scala
index e2b8a966..02f2605b 100644
--- a/tests/src/test/scala/akka/kafka/internal/ConnectionCheckerSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/internal/ConnectionCheckerSpec.scala
@@ -3,14 +3,14 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
-
-import akka.actor.{ ActorRef, ActorSystem }
-import akka.kafka.Metadata
-import akka.kafka.ConnectionCheckerSettings
-import akka.kafka.KafkaConnectionFailed
-import akka.kafka.tests.scaladsl.LogCapturing
-import akka.testkit.TestKit
+package org.apache.pekko.kafka.internal
+
+import org.apache.pekko.actor.{ ActorRef, ActorSystem }
+import org.apache.pekko.kafka.Metadata
+import org.apache.pekko.kafka.ConnectionCheckerSettings
+import org.apache.pekko.kafka.KafkaConnectionFailed
+import org.apache.pekko.kafka.tests.scaladsl.LogCapturing
+import org.apache.pekko.testkit.TestKit
 import com.typesafe.config.ConfigFactory
 import org.apache.kafka.common.errors.TimeoutException
 import org.scalatest.wordspec.AnyWordSpecLike
diff --git a/tests/src/test/scala/akka/kafka/internal/ConsumerDummy.scala b/tests/src/test/scala/org/apache/pekko/kafka/internal/ConsumerDummy.scala
similarity index 98%
rename from tests/src/test/scala/akka/kafka/internal/ConsumerDummy.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/internal/ConsumerDummy.scala
index dd49bfc1..5f8e5766 100644
--- a/tests/src/test/scala/akka/kafka/internal/ConsumerDummy.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/internal/ConsumerDummy.scala
@@ -3,13 +3,13 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
 import java.time.Duration
 import java.util
 import java.util.concurrent.atomic.AtomicInteger
 
-import akka.Done
+import org.apache.pekko.Done
 import org.apache.kafka.clients.consumer._
 import org.apache.kafka.common.{ Metric, MetricName, PartitionInfo, TopicPartition }
 import org.slf4j.{ Logger, LoggerFactory }
diff --git a/tests/src/test/scala/akka/kafka/internal/ConsumerMock.scala b/tests/src/test/scala/org/apache/pekko/kafka/internal/ConsumerMock.scala
similarity index 98%
rename from tests/src/test/scala/akka/kafka/internal/ConsumerMock.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/internal/ConsumerMock.scala
index 95094e88..90f54ac5 100644
--- a/tests/src/test/scala/akka/kafka/internal/ConsumerMock.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/internal/ConsumerMock.scala
@@ -3,12 +3,12 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
 import java.util.concurrent.atomic.AtomicBoolean
 
-import akka.testkit.TestKit
-import akka.util.JavaDurationConverters._
+import org.apache.pekko.testkit.TestKit
+import org.apache.pekko.util.JavaDurationConverters._
 import org.apache.kafka.clients.consumer._
 import org.apache.kafka.common.TopicPartition
 import org.mockito.Mockito._
diff --git a/tests/src/test/scala/akka/kafka/internal/ConsumerProgressTrackingSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/internal/ConsumerProgressTrackingSpec.scala
similarity index 98%
rename from tests/src/test/scala/akka/kafka/internal/ConsumerProgressTrackingSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/internal/ConsumerProgressTrackingSpec.scala
index 0f67304b..08dc690a 100644
--- a/tests/src/test/scala/akka/kafka/internal/ConsumerProgressTrackingSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/internal/ConsumerProgressTrackingSpec.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
-import akka.kafka.tests.scaladsl.LogCapturing
+import org.apache.pekko.kafka.tests.scaladsl.LogCapturing
 import org.apache.kafka.clients.consumer.{ Consumer, ConsumerRecord, ConsumerRecords, OffsetAndMetadata }
 import org.apache.kafka.common.TopicPartition
 import org.mockito.Mockito
diff --git a/tests/src/test/scala/akka/kafka/internal/ConsumerResetProtectionSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/internal/ConsumerResetProtectionSpec.scala
similarity index 94%
rename from tests/src/test/scala/akka/kafka/internal/ConsumerResetProtectionSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/internal/ConsumerResetProtectionSpec.scala
index 598da943..e48ae977 100644
--- a/tests/src/test/scala/akka/kafka/internal/ConsumerResetProtectionSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/internal/ConsumerResetProtectionSpec.scala
@@ -3,15 +3,15 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
-
-import akka.actor.ActorSystem
-import akka.event.LoggingAdapter
-import akka.kafka.OffsetResetProtectionSettings
-import akka.kafka.internal.KafkaConsumerActor.Internal.Seek
-import akka.kafka.testkit.scaladsl.Slf4jToAkkaLoggingAdapter
-import akka.kafka.tests.scaladsl.LogCapturing
-import akka.testkit.{ ImplicitSender, TestKit }
+package org.apache.pekko.kafka.internal
+
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.event.LoggingAdapter
+import org.apache.pekko.kafka.OffsetResetProtectionSettings
+import org.apache.pekko.kafka.internal.KafkaConsumerActor.Internal.Seek
+import org.apache.pekko.kafka.testkit.scaladsl.Slf4jToAkkaLoggingAdapter
+import org.apache.pekko.kafka.tests.scaladsl.LogCapturing
+import org.apache.pekko.testkit.{ ImplicitSender, TestKit }
 import org.apache.kafka.clients.consumer.{ ConsumerRecord, ConsumerRecords }
 import org.apache.kafka.common.TopicPartition
 import org.apache.kafka.common.header.internals.RecordHeaders
diff --git a/tests/src/test/scala/akka/kafka/internal/ConsumerSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/internal/ConsumerSpec.scala
similarity index 93%
rename from tests/src/test/scala/akka/kafka/internal/ConsumerSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/internal/ConsumerSpec.scala
index 1e1f3a0e..37f66b39 100644
--- a/tests/src/test/scala/akka/kafka/internal/ConsumerSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/internal/ConsumerSpec.scala
@@ -3,19 +3,19 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
-
-import akka.Done
-import akka.actor.ActorSystem
-import akka.kafka.ConsumerMessage._
-import akka.kafka.scaladsl.Consumer
-import akka.kafka.scaladsl.Consumer.Control
-import akka.kafka.tests.scaladsl.LogCapturing
-import akka.kafka.{ CommitTimeoutException, ConsumerSettings, Repeated, Subscriptions }
-import akka.stream.scaladsl._
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
-import akka.stream.testkit.scaladsl.TestSink
-import akka.testkit.TestKit
+package org.apache.pekko.kafka.internal
+
+import org.apache.pekko.Done
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.kafka.ConsumerMessage._
+import org.apache.pekko.kafka.scaladsl.Consumer
+import org.apache.pekko.kafka.scaladsl.Consumer.Control
+import org.apache.pekko.kafka.tests.scaladsl.LogCapturing
+import org.apache.pekko.kafka.{ CommitTimeoutException, ConsumerSettings, Repeated, Subscriptions }
+import org.apache.pekko.stream.scaladsl._
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.stream.testkit.scaladsl.TestSink
+import org.apache.pekko.testkit.TestKit
 import com.typesafe.config.ConfigFactory
 import org.apache.kafka.clients.consumer._
 import org.apache.kafka.common.serialization.StringDeserializer
diff --git a/tests/src/test/scala/akka/kafka/internal/EnhancedConfigSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/internal/EnhancedConfigSpec.scala
similarity index 90%
rename from tests/src/test/scala/akka/kafka/internal/EnhancedConfigSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/internal/EnhancedConfigSpec.scala
index e9f10f2f..ddc1dce0 100644
--- a/tests/src/test/scala/akka/kafka/internal/EnhancedConfigSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/internal/EnhancedConfigSpec.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
-import akka.kafka.tests.scaladsl.LogCapturing
+import org.apache.pekko.kafka.tests.scaladsl.LogCapturing
 import com.typesafe.config.ConfigFactory
 import org.scalatest.matchers.should.Matchers
 import org.scalatest.wordspec.AnyWordSpec
diff --git a/tests/src/test/scala/akka/kafka/internal/OffsetAggregationSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/internal/OffsetAggregationSpec.scala
similarity index 95%
rename from tests/src/test/scala/akka/kafka/internal/OffsetAggregationSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/internal/OffsetAggregationSpec.scala
index 6989d011..ea8e5817 100644
--- a/tests/src/test/scala/akka/kafka/internal/OffsetAggregationSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/internal/OffsetAggregationSpec.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
-import akka.kafka.tests.scaladsl.LogCapturing
+import org.apache.pekko.kafka.tests.scaladsl.LogCapturing
 import org.apache.kafka.clients.consumer.OffsetAndMetadata
 import org.apache.kafka.common.TopicPartition
 import org.apache.kafka.common.requests.OffsetFetchResponse
diff --git a/tests/src/test/scala/akka/kafka/internal/PartitionedSourceSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/internal/PartitionedSourceSpec.scala
similarity index 97%
rename from tests/src/test/scala/akka/kafka/internal/PartitionedSourceSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/internal/PartitionedSourceSpec.scala
index 0d770dd9..1e1537b4 100644
--- a/tests/src/test/scala/akka/kafka/internal/PartitionedSourceSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/internal/PartitionedSourceSpec.scala
@@ -3,22 +3,22 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
 import java.util.concurrent.atomic.AtomicReference
 import java.util.concurrent.{ CountDownLatch, TimeUnit }
 import java.util.function.UnaryOperator
 
-import akka.Done
-import akka.actor.ActorSystem
-import akka.kafka.ConsumerMessage._
-import akka.kafka.scaladsl.Consumer
-import akka.kafka.tests.scaladsl.LogCapturing
-import akka.kafka.{ ConsumerSettings, Subscriptions }
-import akka.stream.scaladsl._
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
-import akka.stream.testkit.scaladsl.TestSink
-import akka.testkit.TestKit
+import org.apache.pekko.Done
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.kafka.ConsumerMessage._
+import org.apache.pekko.kafka.scaladsl.Consumer
+import org.apache.pekko.kafka.tests.scaladsl.LogCapturing
+import org.apache.pekko.kafka.{ ConsumerSettings, Subscriptions }
+import org.apache.pekko.stream.scaladsl._
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.stream.testkit.scaladsl.TestSink
+import org.apache.pekko.testkit.TestKit
 import com.typesafe.config.ConfigFactory
 import org.apache.kafka.clients.consumer._
 import org.apache.kafka.common.TopicPartition
diff --git a/tests/src/test/scala/akka/kafka/internal/ProducerSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/internal/ProducerSpec.scala
similarity index 96%
rename from tests/src/test/scala/akka/kafka/internal/ProducerSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/internal/ProducerSpec.scala
index 6b7a7d98..6af86887 100644
--- a/tests/src/test/scala/akka/kafka/internal/ProducerSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/internal/ProducerSpec.scala
@@ -3,21 +3,21 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
 import java.util.concurrent.CompletableFuture
-import akka.actor.ActorSystem
-import akka.kafka.ConsumerMessage.{ GroupTopicPartition, PartitionOffset, PartitionOffsetCommittedMarker }
-import akka.kafka.ProducerMessage._
-import akka.kafka.scaladsl.Producer
-import akka.kafka.tests.scaladsl.LogCapturing
-import akka.kafka.{ ConsumerMessage, ProducerMessage, ProducerSettings }
-import akka.stream.scaladsl.{ Flow, Keep, Sink, Source }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
-import akka.stream.testkit.scaladsl.{ TestSink, TestSource }
-import akka.stream.{ ActorAttributes, Supervision }
-import akka.testkit.TestKit
-import akka.{ Done, NotUsed }
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.kafka.ConsumerMessage.{ GroupTopicPartition, PartitionOffset, PartitionOffsetCommittedMarker }
+import org.apache.pekko.kafka.ProducerMessage._
+import org.apache.pekko.kafka.scaladsl.Producer
+import org.apache.pekko.kafka.tests.scaladsl.LogCapturing
+import org.apache.pekko.kafka.{ ConsumerMessage, ProducerMessage, ProducerSettings }
+import org.apache.pekko.stream.scaladsl.{ Flow, Keep, Sink, Source }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.stream.testkit.scaladsl.{ TestSink, TestSource }
+import org.apache.pekko.stream.{ ActorAttributes, Supervision }
+import org.apache.pekko.testkit.TestKit
+import org.apache.pekko.{ Done, NotUsed }
 import com.typesafe.config.ConfigFactory
 import org.apache.kafka.clients.consumer.{ ConsumerGroupMetadata, OffsetAndMetadata }
 import org.apache.kafka.clients.producer._
diff --git a/tests/src/test/scala/akka/kafka/internal/SubscriptionsSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/internal/SubscriptionsSpec.scala
similarity index 89%
rename from tests/src/test/scala/akka/kafka/internal/SubscriptionsSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/internal/SubscriptionsSpec.scala
index b04045d5..18f66004 100644
--- a/tests/src/test/scala/akka/kafka/internal/SubscriptionsSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/internal/SubscriptionsSpec.scala
@@ -3,13 +3,13 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.internal
+package org.apache.pekko.kafka.internal
 
 import java.net.URLEncoder
 
-import akka.kafka.tests.scaladsl.LogCapturing
-import akka.kafka.{ Subscription, Subscriptions }
-import akka.util.ByteString
+import org.apache.pekko.kafka.tests.scaladsl.LogCapturing
+import org.apache.pekko.kafka.{ Subscription, Subscriptions }
+import org.apache.pekko.util.ByteString
 import org.apache.kafka.common.TopicPartition
 import org.scalatest.matchers.should.Matchers
 import org.scalatest.wordspec.AnyWordSpec
diff --git a/tests/src/test/scala/akka/kafka/javadsl/ControlSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/javadsl/ControlSpec.scala
similarity index 91%
rename from tests/src/test/scala/akka/kafka/javadsl/ControlSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/javadsl/ControlSpec.scala
index 7edff1ad..546faf22 100644
--- a/tests/src/test/scala/akka/kafka/javadsl/ControlSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/javadsl/ControlSpec.scala
@@ -3,15 +3,15 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.javadsl
+package org.apache.pekko.kafka.javadsl
 
 import java.util
 import java.util.concurrent.{ CompletionStage, Executor, Executors }
 import java.util.concurrent.atomic.AtomicBoolean
 
-import akka.Done
-import akka.kafka.internal.ConsumerControlAsJava
-import akka.kafka.tests.scaladsl.LogCapturing
+import org.apache.pekko.Done
+import org.apache.pekko.kafka.internal.ConsumerControlAsJava
+import org.apache.pekko.kafka.tests.scaladsl.LogCapturing
 import org.apache.kafka.common.{ Metric, MetricName }
 import org.scalatest.concurrent.ScalaFutures
 import org.scalatest.matchers.should.Matchers
@@ -24,7 +24,7 @@ import scala.language.reflectiveCalls
 object ControlSpec {
   def createControl(stopFuture: Future[Done] = Future.successful(Done),
       shutdownFuture: Future[Done] = Future.successful(Done)) = {
-    val control = new akka.kafka.scaladsl.ControlSpec.ControlImpl(stopFuture, shutdownFuture)
+    val control = new org.apache.pekko.kafka.scaladsl.ControlSpec.ControlImpl(stopFuture, shutdownFuture)
     val wrapped = new ConsumerControlAsJava(control)
     new Consumer.Control {
       def shutdownCalled: AtomicBoolean = control.shutdownCalled
diff --git a/tests/src/test/scala/akka/kafka/scaladsl/CommittableSinkSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/CommittableSinkSpec.scala
similarity index 90%
rename from tests/src/test/scala/akka/kafka/scaladsl/CommittableSinkSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/scaladsl/CommittableSinkSpec.scala
index ffbbf818..47183eb9 100644
--- a/tests/src/test/scala/akka/kafka/scaladsl/CommittableSinkSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/CommittableSinkSpec.scala
@@ -3,14 +3,14 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
-
-import akka.Done
-import akka.kafka._
-import akka.kafka.scaladsl.Consumer.DrainingControl
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaLike
-import akka.stream.scaladsl.{ Keep, Sink, Source }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+package org.apache.pekko.kafka.scaladsl
+
+import org.apache.pekko.Done
+import org.apache.pekko.kafka._
+import org.apache.pekko.kafka.scaladsl.Consumer.DrainingControl
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike
+import org.apache.pekko.stream.scaladsl.{ Keep, Sink, Source }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
 import org.apache.kafka.clients.producer.ProducerRecord
 
 import scala.collection.immutable
diff --git a/tests/src/test/scala/akka/kafka/scaladsl/CommittingSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/CommittingSpec.scala
similarity index 96%
rename from tests/src/test/scala/akka/kafka/scaladsl/CommittingSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/scaladsl/CommittingSpec.scala
index 094f7327..e68cb155 100644
--- a/tests/src/test/scala/akka/kafka/scaladsl/CommittingSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/CommittingSpec.scala
@@ -3,23 +3,23 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
 import java.util.concurrent.atomic.AtomicInteger
 import java.util.function.IntUnaryOperator
 
-import akka.actor.ActorRef
-import akka.kafka.ConsumerMessage.{ CommittableOffsetBatch, GroupTopicPartition }
-import akka.kafka.ProducerMessage.MultiMessage
-import akka.kafka._
-import akka.kafka.internal.CommittableOffsetBatchImpl
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaLike
-import akka.stream.RestartSettings
-import akka.stream.scaladsl.{ Keep, RestartSource, Sink, Source }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
-import akka.stream.testkit.scaladsl.TestSink
-import akka.testkit.TestProbe
-import akka.{ Done, NotUsed }
+import org.apache.pekko.actor.ActorRef
+import org.apache.pekko.kafka.ConsumerMessage.{ CommittableOffsetBatch, GroupTopicPartition }
+import org.apache.pekko.kafka.ProducerMessage.MultiMessage
+import org.apache.pekko.kafka._
+import org.apache.pekko.kafka.internal.CommittableOffsetBatchImpl
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike
+import org.apache.pekko.stream.RestartSettings
+import org.apache.pekko.stream.scaladsl.{ Keep, RestartSource, Sink, Source }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.stream.testkit.scaladsl.TestSink
+import org.apache.pekko.testkit.TestProbe
+import org.apache.pekko.{ Done, NotUsed }
 import org.apache.kafka.clients.producer.ProducerRecord
 import org.apache.kafka.common.TopicPartition
 import org.scalatest.Inside
diff --git a/tests/src/test/scala/akka/kafka/scaladsl/ConnectionCheckerSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/ConnectionCheckerSpec.scala
similarity index 87%
rename from tests/src/test/scala/akka/kafka/scaladsl/ConnectionCheckerSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/scaladsl/ConnectionCheckerSpec.scala
index 1405c9c8..4ca3b034 100644
--- a/tests/src/test/scala/akka/kafka/scaladsl/ConnectionCheckerSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/ConnectionCheckerSpec.scala
@@ -3,14 +3,14 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
-
-import akka.kafka._
-import akka.kafka.testkit.KafkaTestkitTestcontainersSettings
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike
-import akka.stream.scaladsl.{ Keep, Sink }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
-import akka.stream.testkit.scaladsl.TestSink
+package org.apache.pekko.kafka.scaladsl
+
+import org.apache.pekko.kafka._
+import org.apache.pekko.kafka.testkit.KafkaTestkitTestcontainersSettings
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike
+import org.apache.pekko.stream.scaladsl.{ Keep, Sink }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.stream.testkit.scaladsl.TestSink
 import org.apache.kafka.clients.consumer.ConsumerConfig
 import org.apache.kafka.common.serialization.StringDeserializer
 
diff --git a/tests/src/test/scala/akka/kafka/scaladsl/ControlSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/ControlSpec.scala
similarity index 93%
rename from tests/src/test/scala/akka/kafka/scaladsl/ControlSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/scaladsl/ControlSpec.scala
index dfe488bc..c25d353a 100644
--- a/tests/src/test/scala/akka/kafka/scaladsl/ControlSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/ControlSpec.scala
@@ -3,13 +3,13 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
 import java.util.concurrent.atomic.AtomicBoolean
 
-import akka.Done
-import akka.kafka.scaladsl.Consumer.DrainingControl
-import akka.kafka.tests.scaladsl.LogCapturing
+import org.apache.pekko.Done
+import org.apache.pekko.kafka.scaladsl.Consumer.DrainingControl
+import org.apache.pekko.kafka.tests.scaladsl.LogCapturing
 import org.apache.kafka.common.{ Metric, MetricName }
 import org.scalatest.concurrent.ScalaFutures
 import org.scalatest.wordspec.AnyWordSpec
diff --git a/tests/src/test/scala/akka/kafka/scaladsl/IntegrationSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/IntegrationSpec.scala
similarity index 95%
rename from tests/src/test/scala/akka/kafka/scaladsl/IntegrationSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/scaladsl/IntegrationSpec.scala
index 1fb88f05..671adfaf 100644
--- a/tests/src/test/scala/akka/kafka/scaladsl/IntegrationSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/IntegrationSpec.scala
@@ -3,21 +3,21 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
 import java.util.concurrent.atomic.AtomicLong
 
-import akka.Done
-import akka.kafka.ConsumerMessage.CommittableOffsetBatch
-import akka.kafka._
-import akka.kafka.scaladsl.Consumer.DrainingControl
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaLike
-import akka.pattern.ask
-import akka.stream.scaladsl.{ Keep, Sink, Source }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
-import akka.stream.testkit.scaladsl.TestSink
-import akka.testkit.TestProbe
-import akka.util.Timeout
+import org.apache.pekko.Done
+import org.apache.pekko.kafka.ConsumerMessage.CommittableOffsetBatch
+import org.apache.pekko.kafka._
+import org.apache.pekko.kafka.scaladsl.Consumer.DrainingControl
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike
+import org.apache.pekko.pattern.ask
+import org.apache.pekko.stream.scaladsl.{ Keep, Sink, Source }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.stream.testkit.scaladsl.TestSink
+import org.apache.pekko.testkit.TestProbe
+import org.apache.pekko.util.Timeout
 import org.apache.kafka.clients.consumer.ConsumerConfig
 import org.apache.kafka.clients.producer.ProducerRecord
 import org.apache.kafka.common.{ Metric, MetricName, TopicPartition }
diff --git a/tests/src/test/scala/akka/kafka/scaladsl/MetadataClientSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/MetadataClientSpec.scala
similarity index 96%
rename from tests/src/test/scala/akka/kafka/scaladsl/MetadataClientSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/scaladsl/MetadataClientSpec.scala
index be1960cc..77fa817d 100644
--- a/tests/src/test/scala/akka/kafka/scaladsl/MetadataClientSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/MetadataClientSpec.scala
@@ -3,10 +3,10 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaLike
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
 import org.apache.kafka.common.{ PartitionInfo, TopicPartition }
 
 import scala.language.postfixOps
diff --git a/tests/src/test/scala/akka/kafka/scaladsl/MisconfiguredConsumerSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/MisconfiguredConsumerSpec.scala
similarity index 84%
rename from tests/src/test/scala/akka/kafka/scaladsl/MisconfiguredConsumerSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/scaladsl/MisconfiguredConsumerSpec.scala
index 1de2e720..fb07029e 100644
--- a/tests/src/test/scala/akka/kafka/scaladsl/MisconfiguredConsumerSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/MisconfiguredConsumerSpec.scala
@@ -3,14 +3,14 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
-
-import akka.actor.ActorSystem
-import akka.kafka.tests.scaladsl.LogCapturing
-import akka.kafka.{ ConsumerSettings, Subscriptions }
-import akka.stream.scaladsl.Sink
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
-import akka.testkit.TestKit
+package org.apache.pekko.kafka.scaladsl
+
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.kafka.tests.scaladsl.LogCapturing
+import org.apache.pekko.kafka.{ ConsumerSettings, Subscriptions }
+import org.apache.pekko.stream.scaladsl.Sink
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.testkit.TestKit
 import org.apache.kafka.common.TopicPartition
 import org.apache.kafka.common.serialization.StringDeserializer
 import org.scalatest.concurrent.{ Eventually, IntegrationPatience, ScalaFutures }
diff --git a/tests/src/test/scala/akka/kafka/scaladsl/MisconfiguredProducerSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/MisconfiguredProducerSpec.scala
similarity index 78%
rename from tests/src/test/scala/akka/kafka/scaladsl/MisconfiguredProducerSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/scaladsl/MisconfiguredProducerSpec.scala
index ae7bca32..d042fd94 100644
--- a/tests/src/test/scala/akka/kafka/scaladsl/MisconfiguredProducerSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/MisconfiguredProducerSpec.scala
@@ -3,14 +3,14 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
-import akka.actor.ActorSystem
-import akka.kafka.ProducerSettings
-import akka.kafka.tests.scaladsl.LogCapturing
-import akka.stream.scaladsl.Source
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
-import akka.testkit.TestKit
+import org.apache.pekko.actor.ActorSystem
+import org.apache.pekko.kafka.ProducerSettings
+import org.apache.pekko.kafka.tests.scaladsl.LogCapturing
+import org.apache.pekko.stream.scaladsl.Source
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.testkit.TestKit
 import org.apache.kafka.clients.producer.ProducerRecord
 import org.apache.kafka.common.serialization.StringSerializer
 import org.scalatest.concurrent.{ Eventually, IntegrationPatience, ScalaFutures }
diff --git a/tests/src/test/scala/akka/kafka/scaladsl/MultiConsumerSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/MultiConsumerSpec.scala
similarity index 94%
rename from tests/src/test/scala/akka/kafka/scaladsl/MultiConsumerSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/scaladsl/MultiConsumerSpec.scala
index 2cd17293..de8fe590 100644
--- a/tests/src/test/scala/akka/kafka/scaladsl/MultiConsumerSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/MultiConsumerSpec.scala
@@ -3,12 +3,12 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
-import akka.Done
-import akka.kafka.testkit.KafkaTestkitTestcontainersSettings
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.Done
+import org.apache.pekko.kafka.testkit.KafkaTestkitTestcontainersSettings
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
 
 import scala.collection.immutable
 import scala.concurrent.duration._
diff --git a/tests/src/test/scala/akka/kafka/scaladsl/PartitionedSourcesSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/PartitionedSourcesSpec.scala
similarity index 96%
rename from tests/src/test/scala/akka/kafka/scaladsl/PartitionedSourcesSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/scaladsl/PartitionedSourcesSpec.scala
index 87e27c97..8b466d5f 100644
--- a/tests/src/test/scala/akka/kafka/scaladsl/PartitionedSourcesSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/PartitionedSourcesSpec.scala
@@ -3,21 +3,21 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
 import java.util.concurrent.{ CountDownLatch, TimeUnit }
 import java.util.concurrent.atomic.{ AtomicBoolean, AtomicLong }
 import java.util.function.LongBinaryOperator
 
-import akka.Done
-import akka.kafka._
-import akka.kafka.scaladsl.Consumer.DrainingControl
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaLike
-import akka.stream.{ KillSwitches, OverflowStrategy }
-import akka.stream.scaladsl.{ Keep, Sink, Source }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
-import akka.stream.testkit.scaladsl.TestSink
-import akka.testkit.TestProbe
+import org.apache.pekko.Done
+import org.apache.pekko.kafka._
+import org.apache.pekko.kafka.scaladsl.Consumer.DrainingControl
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike
+import org.apache.pekko.stream.{ KillSwitches, OverflowStrategy }
+import org.apache.pekko.stream.scaladsl.{ Keep, Sink, Source }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.stream.testkit.scaladsl.TestSink
+import org.apache.pekko.testkit.TestProbe
 import org.apache.kafka.clients.consumer.{ ConsumerConfig, ConsumerRecord }
 import org.apache.kafka.clients.producer.ProducerRecord
 import org.apache.kafka.common.TopicPartition
@@ -553,7 +553,7 @@ class PartitionedSourcesSpec extends SpecBase with TestcontainersKafkaLike with
                   exceptionTriggered.set(true)
                   Future.failed(new RuntimeException("FAIL"))
                 } else {
-                  akka.pattern.after(50.millis, system.scheduler)(Future.successful(m))
+                  org.apache.pekko.pattern.after(50.millis, system.scheduler)(Future.successful(m))
                 }
               }
               .log(s"subsource $tp pre commit")
diff --git a/tests/src/test/scala/akka/kafka/scaladsl/RebalanceExtSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/RebalanceExtSpec.scala
similarity index 96%
rename from tests/src/test/scala/akka/kafka/scaladsl/RebalanceExtSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/scaladsl/RebalanceExtSpec.scala
index e73c9ee2..f3f6ab9b 100644
--- a/tests/src/test/scala/akka/kafka/scaladsl/RebalanceExtSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/RebalanceExtSpec.scala
@@ -3,17 +3,17 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
 import java.util.concurrent.atomic.AtomicInteger
 
-import akka.kafka.ConsumerMessage.{ CommittableMessage, CommittableOffset }
-import akka.kafka._
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaLike
-import akka.stream._
-import akka.stream.scaladsl.{ Flow, Keep, Sink, Source }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
-import akka.{ Done, NotUsed }
+import org.apache.pekko.kafka.ConsumerMessage.{ CommittableMessage, CommittableOffset }
+import org.apache.pekko.kafka._
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike
+import org.apache.pekko.stream._
+import org.apache.pekko.stream.scaladsl.{ Flow, Keep, Sink, Source }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.{ Done, NotUsed }
 import org.apache.kafka.clients.consumer.ConsumerConfig
 import org.apache.kafka.common.TopicPartition
 import org.scalatest._
@@ -198,7 +198,7 @@ class RebalanceExtSpec extends SpecBase with TestcontainersKafkaLike with Inside
       val topicMetadata: TopicPartitionMetaData =
         createTopicMapsAndPublishMessages(topicCount, partitionCount, perPartitionMessageCount)
       val group1 = createGroupId(1)
-      val consumerSettings1 = consumerSettings(group1, "3", classOf[AlpakkaAssignor].getName)
+      val consumerSettings1 = consumerSettings(group1, "3", classOf[PekkoConnectorsAssignor].getName)
 
       // let producers publish all messages
       Await.result(Future.sequence(topicMetadata.producerTpsAck), remainingOrDefault)
@@ -207,7 +207,7 @@ class RebalanceExtSpec extends SpecBase with TestcontainersKafkaLike with Inside
         topicMetadata.tps.sortBy(a => (a.topic, a.partition))
       val t1p0 = topic1PartitionList(0)
       val t1p1 = topic1PartitionList(1)
-      AlpakkaAssignor.clientIdToPartitionMap.set(
+      PekkoConnectorsAssignor.clientIdToPartitionMap.set(
         Map(
           consumerClientId1 -> Set(t1p0),
           consumerClientId2 -> Set(t1p1)))
@@ -243,7 +243,7 @@ class RebalanceExtSpec extends SpecBase with TestcontainersKafkaLike with Inside
       // consumer-2::SubSource-topic-1-1-1-A:verify messageId=10 is received in the business logic function
 
       // consumer-2::define post-abort partition distribution
-      AlpakkaAssignor.clientIdToPartitionMap.set(
+      PekkoConnectorsAssignor.clientIdToPartitionMap.set(
         Map(
           consumerClientId2 -> Set(t1p0, t1p1)))
       // consumer-1::SubSource-topic-1-1-0-A:unblock messageId=2 from batch (1,2,3)
diff --git a/tests/src/test/scala/akka/kafka/scaladsl/RebalanceSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/RebalanceSpec.scala
similarity index 91%
rename from tests/src/test/scala/akka/kafka/scaladsl/RebalanceSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/scaladsl/RebalanceSpec.scala
index cf40fdb1..b1876576 100644
--- a/tests/src/test/scala/akka/kafka/scaladsl/RebalanceSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/RebalanceSpec.scala
@@ -3,19 +3,19 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
 import java.util
 import java.util.concurrent.atomic.AtomicReference
 
-import akka.kafka._
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaLike
-import akka.stream.scaladsl.{ Keep, Source }
-import akka.stream.testkit.TestSubscriber
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
-import akka.stream.testkit.scaladsl.TestSink
-import akka.testkit.TestProbe
-import akka.{ Done, NotUsed }
+import org.apache.pekko.kafka._
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike
+import org.apache.pekko.stream.scaladsl.{ Keep, Source }
+import org.apache.pekko.stream.testkit.TestSubscriber
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.stream.testkit.scaladsl.TestSink
+import org.apache.pekko.testkit.TestProbe
+import org.apache.pekko.{ Done, NotUsed }
 import org.apache.kafka.clients.consumer.{ ConsumerConfig, ConsumerPartitionAssignor, ConsumerRecord }
 import org.apache.kafka.clients.consumer.internals.AbstractPartitionAssignor
 import org.apache.kafka.common.TopicPartition
@@ -50,12 +50,12 @@ class RebalanceSpec extends SpecBase with TestcontainersKafkaLike with Inside {
       val tp1 = new TopicPartition(topic1, partition1)
       val consumerSettings = consumerDefaults
         .withProperty(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "500") // 500 is the default value
-        .withProperty(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, classOf[AlpakkaAssignor].getName)
+        .withProperty(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, classOf[PekkoConnectorsAssignor].getName)
         .withGroupId(group1)
 
       awaitProduce(produce(topic1, 0 to count.toInt, partition1))
 
-      AlpakkaAssignor.clientIdToPartitionMap.set(
+      PekkoConnectorsAssignor.clientIdToPartitionMap.set(
         Map(
           consumerClientId1 -> Set(tp0, tp1)))
 
@@ -75,7 +75,7 @@ class RebalanceSpec extends SpecBase with TestcontainersKafkaLike with Inside {
       log.debug("read one message from probe1 with partition 1")
       probe1.requestNext()
 
-      AlpakkaAssignor.clientIdToPartitionMap.set(
+      PekkoConnectorsAssignor.clientIdToPartitionMap.set(
         Map(
           consumerClientId1 -> Set(tp0),
           consumerClientId2 -> Set(tp1)))
@@ -146,12 +146,12 @@ class RebalanceSpec extends SpecBase with TestcontainersKafkaLike with Inside {
       val tp1 = new TopicPartition(topic1, partition1)
       val consumerSettings = consumerDefaults
         .withProperty(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "500") // 500 is the default value
-        .withProperty(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, classOf[AlpakkaAssignor].getName)
+        .withProperty(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, classOf[PekkoConnectorsAssignor].getName)
         .withGroupId(group1)
 
       awaitProduce(produce(topic1, 0 to count.toInt, partition1))
 
-      AlpakkaAssignor.clientIdToPartitionMap.set(
+      PekkoConnectorsAssignor.clientIdToPartitionMap.set(
         Map(
           consumerClientId1 -> Set(tp0, tp1)))
 
@@ -177,7 +177,7 @@ class RebalanceSpec extends SpecBase with TestcontainersKafkaLike with Inside {
         .find { case (tp, _) => tp.partition() == partition1 }
         .foreach { case (_, probe) => probe.requestNext() }
 
-      AlpakkaAssignor.clientIdToPartitionMap.set(
+      PekkoConnectorsAssignor.clientIdToPartitionMap.set(
         Map(
           consumerClientId1 -> Set(tp0),
           consumerClientId2 -> Set(tp1)))
@@ -230,7 +230,7 @@ class RebalanceSpec extends SpecBase with TestcontainersKafkaLike with Inside {
   }
 }
 
-object AlpakkaAssignor {
+object PekkoConnectorsAssignor {
   final val clientIdToPartitionMap = new AtomicReference[Map[String, Set[TopicPartition]]]()
 }
 
@@ -242,7 +242,7 @@ object AlpakkaAssignor {
  * Pass a client.id -> Set[TopicPartition] map to `AlpakkaAssignor.clientIdToPartitionMap` **before** you anticipate a
  * rebalance to occur in your test.
  */
-class AlpakkaAssignor extends AbstractPartitionAssignor {
+class PekkoConnectorsAssignor extends AbstractPartitionAssignor {
   val log: Logger = LoggerFactory.getLogger(getClass)
 
   override def name(): String = "alpakka-test"
@@ -251,7 +251,7 @@ class AlpakkaAssignor extends AbstractPartitionAssignor {
       partitionsPerTopic: util.Map[String, Integer],
       subscriptions: util.Map[String, ConsumerPartitionAssignor.Subscription])
       : util.Map[String, util.List[TopicPartition]] = {
-    val clientIdToPartitionMap = AlpakkaAssignor.clientIdToPartitionMap.get()
+    val clientIdToPartitionMap = PekkoConnectorsAssignor.clientIdToPartitionMap.get()
 
     val mapTps = clientIdToPartitionMap.values.flatten.toSet
     val subscriptionTps = partitionsPerTopic.asScala.flatMap {
diff --git a/tests/src/test/scala/akka/kafka/scaladsl/ReconnectSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/ReconnectSpec.scala
similarity index 92%
rename from tests/src/test/scala/akka/kafka/scaladsl/ReconnectSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/scaladsl/ReconnectSpec.scala
index 7462d8f8..cc538c7a 100644
--- a/tests/src/test/scala/akka/kafka/scaladsl/ReconnectSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/ReconnectSpec.scala
@@ -3,13 +3,13 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
-import akka.Done
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaLike
-import akka.stream.scaladsl.{ Keep, Sink, Source, SourceQueueWithComplete, Tcp }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
-import akka.stream.{ KillSwitches, OverflowStrategy, UniqueKillSwitch }
+import org.apache.pekko.Done
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike
+import org.apache.pekko.stream.scaladsl.{ Keep, Sink, Source, SourceQueueWithComplete, Tcp }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.stream.{ KillSwitches, OverflowStrategy, UniqueKillSwitch }
 import org.apache.kafka.clients.producer.ProducerRecord
 
 import scala.concurrent.duration._
diff --git a/tests/src/test/scala/akka/kafka/scaladsl/RetentionPeriodSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/RetentionPeriodSpec.scala
similarity index 95%
rename from tests/src/test/scala/akka/kafka/scaladsl/RetentionPeriodSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/scaladsl/RetentionPeriodSpec.scala
index 87fcc9b5..f5584bb7 100644
--- a/tests/src/test/scala/akka/kafka/scaladsl/RetentionPeriodSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/RetentionPeriodSpec.scala
@@ -3,17 +3,17 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
 import java.util.concurrent.ConcurrentLinkedQueue
 
-import akka.Done
-import akka.kafka._
-import akka.kafka.testkit.KafkaTestkitTestcontainersSettings
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike
-import akka.stream.scaladsl.Keep
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
-import akka.stream.testkit.scaladsl.TestSink
+import org.apache.pekko.Done
+import org.apache.pekko.kafka._
+import org.apache.pekko.kafka.testkit.KafkaTestkitTestcontainersSettings
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike
+import org.apache.pekko.stream.scaladsl.Keep
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.stream.testkit.scaladsl.TestSink
 
 import scala.concurrent.Await
 import scala.concurrent.duration._
diff --git a/tests/src/test/scala/akka/kafka/scaladsl/SpecBase.scala b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/SpecBase.scala
similarity index 74%
rename from tests/src/test/scala/akka/kafka/scaladsl/SpecBase.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/scaladsl/SpecBase.scala
index b80a42b1..5368620b 100644
--- a/tests/src/test/scala/akka/kafka/scaladsl/SpecBase.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/SpecBase.scala
@@ -3,12 +3,12 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
-import akka.kafka.Repeated
-import akka.kafka.tests.scaladsl.LogCapturing
+import org.apache.pekko.kafka.Repeated
+import org.apache.pekko.kafka.tests.scaladsl.LogCapturing
 // #testkit
-import akka.kafka.testkit.scaladsl.ScalatestKafkaSpec
+import org.apache.pekko.kafka.testkit.scaladsl.ScalatestKafkaSpec
 import org.scalatest.concurrent.{ Eventually, IntegrationPatience, ScalaFutures }
 import org.scalatest.wordspec.AnyWordSpecLike
 import org.scalatest.matchers.should.Matchers
@@ -31,7 +31,7 @@ abstract class SpecBase(kafkaPort: Int)
 // #testkit
 
 // #testcontainers
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaLike
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike
 
 class TestcontainersSampleSpec extends SpecBase with TestcontainersKafkaLike {
   // ...
@@ -39,8 +39,8 @@ class TestcontainersSampleSpec extends SpecBase with TestcontainersKafkaLike {
 // #testcontainers
 
 // #testcontainers-settings
-import akka.kafka.testkit.KafkaTestkitTestcontainersSettings
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike
+import org.apache.pekko.kafka.testkit.KafkaTestkitTestcontainersSettings
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaPerClassLike
 
 class TestcontainersNewSettingsSampleSpec extends SpecBase with TestcontainersKafkaPerClassLike {
 
diff --git a/tests/src/test/scala/akka/kafka/scaladsl/TimestampSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/TimestampSpec.scala
similarity index 90%
rename from tests/src/test/scala/akka/kafka/scaladsl/TimestampSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/scaladsl/TimestampSpec.scala
index 71b432dd..ce152d1e 100644
--- a/tests/src/test/scala/akka/kafka/scaladsl/TimestampSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/TimestampSpec.scala
@@ -3,12 +3,12 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaLike
-import akka.kafka.Subscriptions
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
-import akka.stream.testkit.scaladsl.TestSink
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike
+import org.apache.pekko.kafka.Subscriptions
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.stream.testkit.scaladsl.TestSink
 import org.apache.kafka.common.TopicPartition
 import org.scalatest.Inside
 import org.scalatest.concurrent.IntegrationPatience
diff --git a/tests/src/test/scala/akka/kafka/scaladsl/TransactionsSpec.scala b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/TransactionsSpec.scala
similarity index 97%
rename from tests/src/test/scala/akka/kafka/scaladsl/TransactionsSpec.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/scaladsl/TransactionsSpec.scala
index 45b6612e..e3351edc 100644
--- a/tests/src/test/scala/akka/kafka/scaladsl/TransactionsSpec.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/scaladsl/TransactionsSpec.scala
@@ -3,18 +3,18 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.scaladsl
+package org.apache.pekko.kafka.scaladsl
 
 import java.util.concurrent.atomic.AtomicBoolean
 
-import akka.Done
-import akka.kafka.ConsumerMessage.PartitionOffset
-import akka.kafka.scaladsl.Consumer.{ Control, DrainingControl }
-import akka.kafka.testkit.scaladsl.TestcontainersKafkaLike
-import akka.kafka.{ ProducerMessage, _ }
-import akka.stream.{ OverflowStrategy, RestartSettings }
-import akka.stream.scaladsl.{ Keep, RestartSource, Sink, Source }
-import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
+import org.apache.pekko.Done
+import org.apache.pekko.kafka.ConsumerMessage.PartitionOffset
+import org.apache.pekko.kafka.scaladsl.Consumer.{ Control, DrainingControl }
+import org.apache.pekko.kafka.testkit.scaladsl.TestcontainersKafkaLike
+import org.apache.pekko.kafka.{ ProducerMessage, _ }
+import org.apache.pekko.stream.{ OverflowStrategy, RestartSettings }
+import org.apache.pekko.stream.scaladsl.{ Keep, RestartSource, Sink, Source }
+import org.apache.pekko.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
 import org.apache.kafka.clients.consumer.ConsumerConfig
 import org.apache.kafka.clients.producer.ProducerRecord
 import org.scalatest.RecoverMethods._
diff --git a/tests/src/test/scala/akka/kafka/tests/CapturingAppender.scala b/tests/src/test/scala/org/apache/pekko/kafka/tests/CapturingAppender.scala
similarity index 83%
rename from tests/src/test/scala/akka/kafka/tests/CapturingAppender.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/tests/CapturingAppender.scala
index f94261ee..93156a79 100644
--- a/tests/src/test/scala/akka/kafka/tests/CapturingAppender.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/tests/CapturingAppender.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.tests
+package org.apache.pekko.kafka.tests
 
-import akka.annotation.InternalApi
+import org.apache.pekko.annotation.InternalApi
 import ch.qos.logback.classic.spi.ILoggingEvent
 import ch.qos.logback.core.AppenderBase
 
@@ -14,7 +14,7 @@ import ch.qos.logback.core.AppenderBase
  *
  * INTERNAL API
  */
-@InternalApi private[akka] object CapturingAppender {
+@InternalApi private[pekko] object CapturingAppender {
   import LogbackUtil._
 
   private val CapturingAppenderName = "CapturingAppender"
@@ -40,10 +40,10 @@ import ch.qos.logback.core.AppenderBase
  *
  * Logging from tests can be silenced by this appender. When there is a test failure
  * the captured logging events are flushed to the appenders defined for the
- * akka.actor.testkit.typed.internal.CapturingAppenderDelegate logger.
+ * org.apache.pekko.actor.testkit.typed.internal.CapturingAppenderDelegate logger.
  *
- * The flushing on test failure is handled by [[akka.actor.testkit.typed.scaladsl.LogCapturing]]
- * for ScalaTest and [[akka.actor.testkit.typed.javadsl.LogCapturing]] for JUnit.
+ * The flushing on test failure is handled by [[org.apache.pekko.actor.testkit.typed.scaladsl.LogCapturing]]
+ * for ScalaTest and [[org.apache.pekko.actor.testkit.typed.javadsl.LogCapturing]] for JUnit.
  *
  * Use configuration like the following the logback-test.xml:
  *
@@ -59,7 +59,7 @@ import ch.qos.logback.core.AppenderBase
  *     </root>
  * }}}
  */
-@InternalApi private[akka] class CapturingAppender extends AppenderBase[ILoggingEvent] {
+@InternalApi private[pekko] class CapturingAppender extends AppenderBase[ILoggingEvent] {
   import LogbackUtil._
 
   private var buffer: Vector[ILoggingEvent] = Vector.empty
diff --git a/tests/src/test/scala/akka/kafka/tests/LogbackUtil.scala b/tests/src/test/scala/org/apache/pekko/kafka/tests/LogbackUtil.scala
similarity index 91%
rename from tests/src/test/scala/akka/kafka/tests/LogbackUtil.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/tests/LogbackUtil.scala
index ad018e1e..00d24454 100644
--- a/tests/src/test/scala/akka/kafka/tests/LogbackUtil.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/tests/LogbackUtil.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.tests
+package org.apache.pekko.kafka.tests
 
-import akka.annotation.InternalApi
+import org.apache.pekko.annotation.InternalApi
 import ch.qos.logback.classic.Level
 import org.slf4j.LoggerFactory
 
@@ -14,7 +14,7 @@ import org.slf4j.LoggerFactory
  *
  * INTERNAL API
  */
-@InternalApi private[akka] object LogbackUtil {
+@InternalApi private[pekko] object LogbackUtil {
   def loggerNameOrRoot(loggerName: String): String =
     if (loggerName == "") org.slf4j.Logger.ROOT_LOGGER_NAME else loggerName
 
diff --git a/tests/src/test/scala/akka/kafka/tests/javadsl/LogCapturingExtension.scala b/tests/src/test/scala/org/apache/pekko/kafka/tests/javadsl/LogCapturingExtension.scala
similarity index 92%
rename from tests/src/test/scala/akka/kafka/tests/javadsl/LogCapturingExtension.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/tests/javadsl/LogCapturingExtension.scala
index 6a29d7e3..11d051fa 100644
--- a/tests/src/test/scala/akka/kafka/tests/javadsl/LogCapturingExtension.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/tests/javadsl/LogCapturingExtension.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.tests.javadsl
+package org.apache.pekko.kafka.tests.javadsl
 
-import akka.kafka.tests.CapturingAppender
+import org.apache.pekko.kafka.tests.CapturingAppender
 import org.junit.jupiter.api.extension.{ AfterTestExecutionCallback, BeforeTestExecutionCallback, ExtensionContext }
 
 class LogCapturingExtension extends BeforeTestExecutionCallback with AfterTestExecutionCallback {
diff --git a/tests/src/test/scala/akka/kafka/tests/javadsl/LogCapturingJunit4.scala b/tests/src/test/scala/org/apache/pekko/kafka/tests/javadsl/LogCapturingJunit4.scala
similarity index 95%
rename from tests/src/test/scala/akka/kafka/tests/javadsl/LogCapturingJunit4.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/tests/javadsl/LogCapturingJunit4.scala
index 022af7c0..50efff7a 100644
--- a/tests/src/test/scala/akka/kafka/tests/javadsl/LogCapturingJunit4.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/tests/javadsl/LogCapturingJunit4.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.tests.javadsl
+package org.apache.pekko.kafka.tests.javadsl
 
-import akka.kafka.tests.CapturingAppender
+import org.apache.pekko.kafka.tests.CapturingAppender
 
 import scala.util.control.NonFatal
 import org.junit.rules.TestRule
diff --git a/tests/src/test/scala/akka/kafka/tests/scaladsl/LogCapturing.scala b/tests/src/test/scala/org/apache/pekko/kafka/tests/scaladsl/LogCapturing.scala
similarity index 95%
rename from tests/src/test/scala/akka/kafka/tests/scaladsl/LogCapturing.scala
rename to tests/src/test/scala/org/apache/pekko/kafka/tests/scaladsl/LogCapturing.scala
index 3838ea94..701308cc 100644
--- a/tests/src/test/scala/akka/kafka/tests/scaladsl/LogCapturing.scala
+++ b/tests/src/test/scala/org/apache/pekko/kafka/tests/scaladsl/LogCapturing.scala
@@ -3,9 +3,9 @@
  * Copyright (C) 2016 - 2020 Lightbend Inc. <https://www.lightbend.com>
  */
 
-package akka.kafka.tests.scaladsl
+package org.apache.pekko.kafka.tests.scaladsl
 
-import akka.kafka.tests.CapturingAppender
+import org.apache.pekko.kafka.tests.CapturingAppender
 
 import scala.util.control.NonFatal
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@pekko.apache.org
For additional commands, e-mail: commits-help@pekko.apache.org