You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by bl...@apache.org on 2019/08/30 22:26:47 UTC

[flink] branch master updated: [FLINK-12847][kinesis] update flink-connector-kinesis to use Apache 2.0 license code

This is an automated email from the ASF dual-hosted git repository.

bli pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
     new 08d446c  [FLINK-12847][kinesis] update flink-connector-kinesis to use Apache 2.0 license code
08d446c is described below

commit 08d446c2176ff586e3c74c61af3f6797614f051e
Author: Dyana Rose <dy...@salecycle.com>
AuthorDate: Wed Jul 31 10:29:38 2019 +0100

    [FLINK-12847][kinesis] update flink-connector-kinesis to use Apache 2.0 license code
    
    The Kinesis Connector will now be able to be built and included in the build artefacts as it no longer pulls in any Amazon licensed code.
    
    This closes #9494.
---
 .travis.yml                                        | 118 ++++++++++-----------
 docs/dev/connectors/kinesis.md                     |  28 +----
 docs/dev/connectors/kinesis.zh.md                  |  27 +----
 flink-connectors/flink-connector-kinesis/pom.xml   |  24 +++--
 .../src/main/resources/META-INF/NOTICE             |  22 ++--
 .../resources/META-INF/licenses/LICENSE.amazon     |  39 -------
 flink-connectors/pom.xml                           |  18 +---
 flink-end-to-end-tests/pom.xml                     |  22 +---
 tools/travis/stage.sh                              |   5 +-
 9 files changed, 94 insertions(+), 209 deletions(-)

diff --git a/.travis.yml b/.travis.yml
index 8db3525..f58873d 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -81,179 +81,179 @@ jobs:
     - if: type in (pull_request, push)
       stage: compile
       script: ./tools/travis_controller.sh compile
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11"
       name: compile
     - if: type in (pull_request, push)
       stage: test
       script: ./tools/travis_controller.sh core
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11"
       name: core
     - if: type in (pull_request, push)
       script: ./tools/travis_controller.sh python
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11"
       name: python
     - if: type in (pull_request, push)
       script: ./tools/travis_controller.sh libraries
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11"
       name: libraries
     - if: type in (pull_request, push)
       script: ./tools/travis_controller.sh blink_planner
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11"
       name: blink_planner
     - if: type in (pull_request, push)
       script: ./tools/travis_controller.sh connectors
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11"
       name: connectors
     - if: type in (pull_request, push)
       script: ./tools/travis_controller.sh kafka/gelly
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11"
       name: kafka/gelly
     - if: type in (pull_request, push)
       script: ./tools/travis_controller.sh tests
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11"
       name: tests
     - if: type in (pull_request, push)
       script: ./tools/travis_controller.sh misc
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11"
       name: misc
     - if: type in (pull_request, push)
       stage: cleanup
       script: ./tools/travis_controller.sh cleanup
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11"
       name: cleanup
     # hadoop 2.4.1 profile
     - if: type = cron
       stage: compile
       script: ./tools/travis_controller.sh compile
-      env: PROFILE="-Dhadoop.version=2.4.1 -Pinclude-kinesis -Pskip-hive-tests"
+      env: PROFILE="-Dhadoop.version=2.4.1 -Pskip-hive-tests"
       name: compile - hadoop 2.4.1
     - if: type = cron
       stage: test
       script: ./tools/travis_controller.sh core
-      env: PROFILE="-Dhadoop.version=2.4.1 -Pinclude-kinesis -Pskip-hive-tests"
+      env: PROFILE="-Dhadoop.version=2.4.1 -Pskip-hive-tests"
       name: core - hadoop 2.4.1
     - if: type = cron
       script: ./tools/travis_controller.sh libraries
-      env: PROFILE="-Dhadoop.version=2.4.1 -Pinclude-kinesis -Pskip-hive-tests"
+      env: PROFILE="-Dhadoop.version=2.4.1 -Pskip-hive-tests"
       name: libraries - hadoop 2.4.1
     - if: type = cron
       script: ./tools/travis_controller.sh blink_planner
-      env: PROFILE="-Dhadoop.version=2.4.1 -Pinclude-kinesis -Pskip-hive-tests"
+      env: PROFILE="-Dhadoop.version=2.4.1 -Pskip-hive-tests"
       name: blink_planner - hadoop 2.4.1
     - if: type = cron
       script: ./tools/travis_controller.sh connectors
-      env: PROFILE="-Dhadoop.version=2.4.1 -Pinclude-kinesis -Pskip-hive-tests"
+      env: PROFILE="-Dhadoop.version=2.4.1 -Pskip-hive-tests"
       name: connectors - hadoop 2.4.1
     - if: type = cron
       script: ./tools/travis_controller.sh kafka/gelly
-      env: PROFILE="-Dhadoop.version=2.4.1 -Pinclude-kinesis -Pskip-hive-tests"
+      env: PROFILE="-Dhadoop.version=2.4.1 -Pskip-hive-tests"
       name: kafka/gelly - hadoop 2.4.1
     - if: type = cron
       script: ./tools/travis_controller.sh tests
-      env: PROFILE="-Dhadoop.version=2.4.1 -Pinclude-kinesis -Pskip-hive-tests"
+      env: PROFILE="-Dhadoop.version=2.4.1 -Pskip-hive-tests"
       name: tests - hadoop 2.4.1
     - if: type = cron
       script: ./tools/travis_controller.sh misc
-      env: PROFILE="-Dhadoop.version=2.4.1 -Pinclude-kinesis -Pskip-hive-tests"
+      env: PROFILE="-Dhadoop.version=2.4.1 -Pskip-hive-tests"
       name: misc - hadoop 2.4.1
     - if: type = cron
       stage: cleanup
       script: ./tools/travis_controller.sh cleanup
-      env: PROFILE="-Dhadoop.version=2.4.1 -Pinclude-kinesis -Pskip-hive-tests"
+      env: PROFILE="-Dhadoop.version=2.4.1 -Pskip-hive-tests"
       name: cleanup - hadoop 2.4.1
     # scala 2.12 profile
     - if: type = cron
       stage: compile
       script: ./tools/travis_controller.sh compile
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.12 -Phive-1.2.1"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.12 -Phive-1.2.1"
       name: compile - scala 2.12
     - if: type = cron
       stage: test
       script: ./tools/travis_controller.sh core
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.12 -Phive-1.2.1"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.12 -Phive-1.2.1"
       name: core - scala 2.12
     - if: type = cron
       script: ./tools/travis_controller.sh libraries
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.12 -Phive-1.2.1"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.12 -Phive-1.2.1"
       name: libraries - scala 2.12
     - if: type = cron
       script: ./tools/travis_controller.sh blink_planner
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.12 -Phive-1.2.1"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.12 -Phive-1.2.1"
       name: blink_planner - scala 2.12
     - if: type = cron
       script: ./tools/travis_controller.sh connectors
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.12 -Phive-1.2.1"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.12 -Phive-1.2.1"
       name: connectors - scala 2.12
     - if: type = cron
       script: ./tools/travis_controller.sh kafka/gelly
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.12 -Phive-1.2.1"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.12 -Phive-1.2.1"
       name: kafka/gelly - scala 2.12
     - if: type = cron
       script: ./tools/travis_controller.sh tests
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.12 -Phive-1.2.1"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.12 -Phive-1.2.1"
       name: tests - scala 2.12
     - if: type = cron
       script: ./tools/travis_controller.sh misc
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.12 -Phive-1.2.1"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.12 -Phive-1.2.1"
       name: misc - scala 2.12
     - if: type = cron
       stage: cleanup
       script: ./tools/travis_controller.sh cleanup
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.12 -Phive-1.2.1"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.12 -Phive-1.2.1"
       name: cleanup - scala 2.12
     # JDK9 profile
     - if: type = cron
       jdk: "openjdk9"
       stage: compile
       script: ./tools/travis_controller.sh compile
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
       name: compile - jdk 9
     - if: type = cron
       jdk: "openjdk9"
       stage: test
       script: ./tools/travis_controller.sh core
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
       name: core - jdk 9
     - if: type = cron
       jdk: "openjdk9"
       script: ./tools/travis_controller.sh python
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
       name: python - jdk 9
     - if: type = cron
       jdk: "openjdk9"
       script: ./tools/travis_controller.sh libraries
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
       name: libraries - jdk 9
     - if: type = cron
       jdk: "openjdk9"
       script: ./tools/travis_controller.sh blink_planner
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
       name: blink_planner - jdk 9
     - if: type = cron
       jdk: "openjdk9"
       script: ./tools/travis_controller.sh connectors
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
       name: connectors - jdk 9
     - if: type = cron
       jdk: "openjdk9"
       script: ./tools/travis_controller.sh kafka/gelly
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
       name: kafka/gelly - jdk 9
     - if: type = cron
       jdk: "openjdk9"
       script: ./tools/travis_controller.sh tests
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
       name: tests - jdk 9
     - if: type = cron
       jdk: "openjdk9"
       script: ./tools/travis_controller.sh misc
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
       name: misc - jdk 9
     - if: type = cron
       jdk: "openjdk9"
       stage: cleanup
       script: ./tools/travis_controller.sh cleanup
-      env: PROFILE="-Dhadoop.version=2.8.3 -Pinclude-kinesis -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
+      env: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11 -Djdk9"
       name: cleanup - jdk 9
     # Documentation 404 check
     - if: type = cron
@@ -265,91 +265,91 @@ jobs:
     # E2E profiles - Hadoop 2.8
     - if: type = cron
       stage: test
-      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3 -De2e-metrics -Dinclude-kinesis"
+      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3 -De2e-metrics"
       script: ./tools/travis/nightly.sh split_misc.sh
       name: e2e - misc - hadoop 2.8
     - if: type = cron
-      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3 -Dinclude-kinesis"
+      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3"
       script: ./tools/travis/nightly.sh split_ha.sh
       name: e2e - ha - hadoop 2.8
     - if: type = cron
-      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3 -Dinclude-kinesis"
+      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3"
       script: ./tools/travis/nightly.sh split_sticky.sh
       name: e2e - sticky - hadoop 2.8
     - if: type = cron
-      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3 -Dinclude-kinesis"
+      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3"
       script: ./tools/travis/nightly.sh split_checkpoints.sh
       name: e2e - checkpoints - hadoop 2.8
     - if: type = cron
-      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3 -Dinclude-kinesis"
+      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3"
       script: ./tools/travis/nightly.sh split_container.sh
       name: e2e - container - hadoop 2.8
     - if: type = cron
-      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3 -Dinclude-kinesis"
+      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3"
       script: ./tools/travis/nightly.sh split_heavy.sh
       name: e2e - heavy - hadoop 2.8
       # E2E profiles - Scala 2.12
     - if: type = cron
-      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3 -Dscala-2.12 -De2e-metrics -Dinclude-kinesis"
+      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3 -Dscala-2.12 -De2e-metrics"
       script: ./tools/travis/nightly.sh split_misc.sh
       name: e2e - misc - scala 2.12
     - if: type = cron
-      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3 -Dscala-2.12 -Dinclude-kinesis"
+      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3 -Dscala-2.12"
       script: ./tools/travis/nightly.sh split_ha.sh
       name: e2e - ha - scala 2.12
     - if: type = cron
-      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3 -Dscala-2.12 -Dinclude-kinesis"
+      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3 -Dscala-2.12"
       script: ./tools/travis/nightly.sh split_sticky.sh
       name: e2e - sticky - scala 2.12
     - if: type = cron
-      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3 -Dscala-2.12 -Dinclude-kinesis"
+      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3 -Dscala-2.12"
       script: ./tools/travis/nightly.sh split_checkpoints.sh
       name: e2e - checkpoints - scala 2.12
     - if: type = cron
-      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3 -Dscala-2.12 -Dinclude-kinesis"
+      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3 -Dscala-2.12"
       script: ./tools/travis/nightly.sh split_container.sh
       name: e2e - container - scala 2.12
     - if: type = cron
-      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3 -Dscala-2.12 -Dinclude-kinesis"
+      env: PROFILE="-Dinclude-hadoop -Dhadoop.version=2.8.3 -Dscala-2.12"
       script: ./tools/travis/nightly.sh split_heavy.sh
       name: e2e - heavy - scala 2.12
       # E2E profiles - Hadoop-free
     - if: type = cron
-      env: PROFILE="-De2e-metrics -Dinclude-kinesis"
+      env: PROFILE="-De2e-metrics"
       script: ./tools/travis/nightly.sh split_misc_hadoopfree.sh
       name: e2e - misc
     - if: type = cron
-      env: PROFILE="-Dinclude-kinesis"
+      env: PROFILE=""
       script: ./tools/travis/nightly.sh split_ha.sh
       name: e2e - ha
     - if: type = cron
-      env: PROFILE="-Dinclude-kinesis"
+      env: PROFILE=""
       script: ./tools/travis/nightly.sh split_sticky.sh
       name: e2e - sticky
     - if: type = cron
-      env: PROFILE="-Dinclude-kinesis"
+      env: PROFILE=""
       script: ./tools/travis/nightly.sh split_checkpoints.sh
       name: e2e - checkpoints
     - if: type = cron
-      env: PROFILE="-Dinclude-kinesis"
+      env: PROFILE=""
       script: ./tools/travis/nightly.sh split_container.sh
       name: e2e - container
     - if: type = cron
-      env: PROFILE="-Dinclude-kinesis"
+      env: PROFILE=""
       script: ./tools/travis/nightly.sh split_heavy.sh
       name: e2e - heavy
     - # E2E profiles - Java 9
     - if: type = cron
       stage: test
       jdk: "openjdk9"
-      env: PROFILE="-De2e-metrics -Dinclude-kinesis -Djdk9"
+      env: PROFILE="-De2e-metrics -Djdk9"
       script: ./tools/travis/nightly.sh split_ha.sh
       name: e2e - ha - jdk9
     - if: type = cron
-      env: PROFILE="-Dinclude-kinesis -Djdk9"
+      env: PROFILE="-Djdk9"
       script: ./tools/travis/nightly.sh split_sticky.sh
       name: e2e - sticky - jdk 9
     - if: type = cron
-      env: PROFILE="-Dinclude-kinesis -Djdk9"
+      env: PROFILE="-Djdk9"
       script: ./tools/travis/nightly.sh split_checkpoints.sh
       name: e2e - checkpoints - jdk 9
diff --git a/docs/dev/connectors/kinesis.md b/docs/dev/connectors/kinesis.md
index 3383706..98a6290 100644
--- a/docs/dev/connectors/kinesis.md
+++ b/docs/dev/connectors/kinesis.md
@@ -38,32 +38,10 @@ To use the connector, add the following Maven dependency to your project:
 </dependency>
 {% endhighlight %}
 
-**The `flink-connector-kinesis{{ site.scala_version_suffix }}` has a dependency on code licensed under the [Amazon Software License](https://aws.amazon.com/asl/) (ASL).
-Linking to the flink-connector-kinesis will include ASL licensed code into your application.**
-
-The `flink-connector-kinesis{{ site.scala_version_suffix }}` artifact is not deployed to Maven central as part of
-Flink releases because of the licensing issue. Therefore, you need to build the connector yourself from the source.
-
-Download the Flink source or check it out from the git repository. Then, use the following Maven command to build the module:
-{% highlight bash %}
-mvn clean install -Pinclude-kinesis -DskipTests
-# In Maven 3.3 the shading of flink-dist doesn't work properly in one run, so we need to run mvn for flink-dist again.
-cd flink-dist
-mvn clean install -Pinclude-kinesis -DskipTests
-{% endhighlight %}
-
-<span class="label label-danger">Attention</span> For Flink versions 1.4.2 and below, the KPL client version
-used by default in the Kinesis connectors, KPL 0.12.5, is no longer supported by AWS Kinesis Streams
-(see [here](https://docs.aws.amazon.com/streams/latest/dev/kinesis-kpl-upgrades.html)).
-This means that when building the Kinesis connector, you will need to specify a higher version KPL client (above 0.12.6)
-in order for the Flink Kinesis Producer to work. You can do this by specifying the preferred version via the
-`aws.kinesis-kpl.version` property, like so:
-{% highlight bash %}
-mvn clean install -Pinclude-kinesis -Daws.kinesis-kpl.version=0.12.6 -DskipTests
-{% endhighlight %}
+<span class="label label-danger">Attention</span> Prior to Flink version 1.10.0 the `flink-connector-kinesis{{ site.scala_version_suffix }}` has a dependency on code licensed under the [Amazon Software License](https://aws.amazon.com/asl/).
+Linking to the prior versions of flink-connector-kinesis will include this code into your application.
 
-The streaming connectors are not part of the binary distribution. See how to link with them for cluster
-execution [here]({{site.baseurl}}/dev/projectsetup/dependencies.html).
+Due to the licensing issue, the `flink-connector-kinesis{{ site.scala_version_suffix }}` artifact is not deployed to Maven central for the prior versions. Please see the version specific documentation for further information.
 
 ## Using the Amazon Kinesis Streams Service
 Follow the instructions from the [Amazon Kinesis Streams Developer Guide](https://docs.aws.amazon.com/streams/latest/dev/learning-kinesis-module-one-create-stream.html)
diff --git a/docs/dev/connectors/kinesis.zh.md b/docs/dev/connectors/kinesis.zh.md
index 59e313f..4d7ceea 100644
--- a/docs/dev/connectors/kinesis.zh.md
+++ b/docs/dev/connectors/kinesis.zh.md
@@ -38,32 +38,11 @@ To use the connector, add the following Maven dependency to your project:
 </dependency>
 {% endhighlight %}
 
-**The `flink-connector-kinesis{{ site.scala_version_suffix }}` has a dependency on code licensed under the [Amazon Software License](https://aws.amazon.com/asl/) (ASL).
-Linking to the flink-connector-kinesis will include ASL licensed code into your application.**
-
-The `flink-connector-kinesis{{ site.scala_version_suffix }}` artifact is not deployed to Maven central as part of
-Flink releases because of the licensing issue. Therefore, you need to build the connector yourself from the source.
-
-Download the Flink source or check it out from the git repository. Then, use the following Maven command to build the module:
-{% highlight bash %}
-mvn clean install -Pinclude-kinesis -DskipTests
-# In Maven 3.3 the shading of flink-dist doesn't work properly in one run, so we need to run mvn for flink-dist again.
-cd flink-dist
-mvn clean install -Pinclude-kinesis -DskipTests
-{% endhighlight %}
 
-<span class="label label-danger">Attention</span> For Flink versions 1.4.2 and below, the KPL client version
-used by default in the Kinesis connectors, KPL 0.12.5, is no longer supported by AWS Kinesis Streams
-(see [here](https://docs.aws.amazon.com/streams/latest/dev/kinesis-kpl-upgrades.html)).
-This means that when building the Kinesis connector, you will need to specify a higher version KPL client (above 0.12.6)
-in order for the Flink Kinesis Producer to work. You can do this by specifying the preferred version via the
-`aws.kinesis-kpl.version` property, like so:
-{% highlight bash %}
-mvn clean install -Pinclude-kinesis -Daws.kinesis-kpl.version=0.12.6 -DskipTests
-{% endhighlight %}
+<span class="label label-danger">Attention</span> Prior to Flink version 1.10.0 the `flink-connector-kinesis{{ site.scala_version_suffix }}` has a dependency on code licensed under the [Amazon Software License](https://aws.amazon.com/asl/).
+Linking to the prior versions of flink-connector-kinesis will include this code into your application.
 
-The streaming connectors are not part of the binary distribution. See how to link with them for cluster
-execution [here]({{site.baseurl}}/dev/projectsetup/dependencies.html).
+Due to the licensing issue, the `flink-connector-kinesis{{ site.scala_version_suffix }}` artifact is not deployed to Maven central for the prior versions. Please see the version specific documentation for further information.
 
 ## Using the Amazon Kinesis Streams Service
 Follow the instructions from the [Amazon Kinesis Streams Developer Guide](https://docs.aws.amazon.com/streams/latest/dev/learning-kinesis-module-one-create-stream.html)
diff --git a/flink-connectors/flink-connector-kinesis/pom.xml b/flink-connectors/flink-connector-kinesis/pom.xml
index b26e67b..415fbb8 100644
--- a/flink-connectors/flink-connector-kinesis/pom.xml
+++ b/flink-connectors/flink-connector-kinesis/pom.xml
@@ -33,10 +33,10 @@ under the License.
 	<artifactId>flink-connector-kinesis_${scala.binary.version}</artifactId>
 	<name>flink-connector-kinesis</name>
 	<properties>
-		<aws.sdk.version>1.11.319</aws.sdk.version>
-		<aws.kinesis-kcl.version>1.9.0</aws.kinesis-kcl.version>
-		<aws.kinesis-kpl.version>0.12.9</aws.kinesis-kpl.version>
-		<aws.dynamodbstreams-kinesis-adapter.version>1.4.0</aws.dynamodbstreams-kinesis-adapter.version>
+		<aws.sdk.version>1.11.603</aws.sdk.version>
+		<aws.kinesis-kcl.version>1.11.2</aws.kinesis-kcl.version>
+		<aws.kinesis-kpl.version>0.13.1</aws.kinesis-kpl.version>
+		<aws.dynamodbstreams-kinesis-adapter.version>1.5.0</aws.dynamodbstreams-kinesis-adapter.version>
 	</properties>
 
 	<packaging>jar</packaging>
@@ -95,10 +95,6 @@ under the License.
 			<scope>test</scope>
 		</dependency>
 
-		<!-- Note:
-			The below dependencies are licenced under the Amazon Software License.
-			Flink includes the "flink-connector-kinesis" only as an optional dependency for that reason.
-		-->
 		<dependency>
 			<groupId>com.amazonaws</groupId>
 			<artifactId>aws-java-sdk-kinesis</artifactId>
@@ -151,6 +147,18 @@ under the License.
 
 	</dependencies>
 
+	<dependencyManagement>
+		<dependencies>
+
+			<dependency>
+				<groupId>org.apache.httpcomponents</groupId>
+				<artifactId>httpclient</artifactId>
+				<version>4.5.9</version>
+			</dependency>
+
+		</dependencies>
+	</dependencyManagement>
+
 	<build>
 		<plugins>
 			<plugin>
diff --git a/flink-connectors/flink-connector-kinesis/src/main/resources/META-INF/NOTICE b/flink-connectors/flink-connector-kinesis/src/main/resources/META-INF/NOTICE
index f4540e3..e3af608 100644
--- a/flink-connectors/flink-connector-kinesis/src/main/resources/META-INF/NOTICE
+++ b/flink-connectors/flink-connector-kinesis/src/main/resources/META-INF/NOTICE
@@ -6,21 +6,19 @@ The Apache Software Foundation (http://www.apache.org/).
 
 This project bundles the following dependencies under the Apache Software License 2.0. (http://www.apache.org/licenses/LICENSE-2.0.txt) 
 
-- com.amazonaws:aws-java-sdk-core:1.11.319
-- com.amazonaws:aws-java-sdk-kinesis:1.11.319
-- com.amazonaws:aws-java-sdk-sts:1.11.319
-- com.amazonaws:jmespath-java:1.11.319
-- org.apache.httpcomponents:httpclient:4.5.3
+- com.amazonaws:amazon-kinesis-client:1.11.2
+- com.amazonaws:amazon-kinesis-producer:0.13.1
+- com.amazonaws:aws-java-sdk-core:1.11.603
+- com.amazonaws:aws-java-sdk-kinesis:1.11.603
+- com.amazonaws:aws-java-sdk-kms:1.11.603
+- com.amazonaws:aws-java-sdk-s3:1.11.603
+- com.amazonaws:aws-java-sdk-sts:1.11.603
+- com.amazonaws:dynamodb-streams-kinesis-adapter:1.5.0
+- com.amazonaws:jmespath-java:1.11.603
+- org.apache.httpcomponents:httpclient:4.5.9
 - org.apache.httpcomponents:httpcore:4.4.6
 
 This project bundles the following dependencies under the BSD license.
 See bundled license files for details.
 
 - com.google.protobuf:protobuf-java:2.6.1
-
-This project bundles the following dependencies under the Amazon Softward License.
-See bundled license files for details.
-
-- com.amazonaws:amazon-kinesis-client:1.9.0
-- com.amazonaws:amazon-kinesis-producer:0.12.9
-
diff --git a/flink-connectors/flink-connector-kinesis/src/main/resources/META-INF/licenses/LICENSE.amazon b/flink-connectors/flink-connector-kinesis/src/main/resources/META-INF/licenses/LICENSE.amazon
deleted file mode 100644
index e933f69..0000000
--- a/flink-connectors/flink-connector-kinesis/src/main/resources/META-INF/licenses/LICENSE.amazon
+++ /dev/null
@@ -1,39 +0,0 @@
-Amazon Software License
-
-This Amazon Software License (“License”) governs your use, reproduction, and distribution of the accompanying software as specified below.
-1. Definitions
-
-“Licensor” means any person or entity that distributes its Work.
-
-“Software” means the original work of authorship made available under this License.
-
-“Work” means the Software and any additions to or derivative works of the Software that are made available under this License.
-
-The terms “reproduce,” “reproduction,” “derivative works,” and “distribution” have the meaning as provided under U.S. copyright law; provided, however, that for the purposes of this License, derivative works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work.
-
-Works, including the Software, are “made available” under this License by including in or with the Work either (a) a copyright notice referencing the applicability of this License to the Work, or (b) a copy of this License.
-2. License Grants
-
-2.1 Copyright Grant. Subject to the terms and conditions of this License, each Licensor grants to you a perpetual, worldwide, non-exclusive, royalty-free, copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, sublicense and distribute its Work and any resulting derivative works in any form.
-
-2.2 Patent Grant. Subject to the terms and conditions of this License, each Licensor grants to you a perpetual, worldwide, non-exclusive, royalty-free patent license to make, have made, use, sell, offer for sale, import, and otherwise transfer its Work, in whole or in part. The foregoing license applies only to the patent claims licensable by Licensor that would be infringed by Licensor’s Work (or portion thereof) individually and excluding any combinations with any other materials or te [...]
-3. Limitations
-
-3.1 Redistribution. You may reproduce or distribute the Work only if (a) you do so under this License, (b) you include a complete copy of this License with your distribution, and (c) you retain without modification any copyright, patent, trademark, or attribution notices that are present in the Work.
-
-3.2 Derivative Works. You may specify that additional or different terms apply to the use, reproduction, and distribution of your derivative works of the Work (“Your Terms”) only if (a) Your Terms provide that the use limitation in Section 3.3 applies to your derivative works, and (b) you identify the specific derivative works that are subject to Your Terms. Notwithstanding Your Terms, this License (including the redistribution requirements in Section 3.1) will continue to apply to the W [...]
-
-3.3 Use Limitation. The Work and any derivative works thereof only may be used or intended for use with the web services, computing platforms or applications provided by Amazon.com, Inc. or its affiliates, including Amazon Web Services, Inc.
-
-3.4 Patent Claims. If you bring or threaten to bring a patent claim against any Licensor (including any claim, cross-claim or counterclaim in a lawsuit) to enforce any patents that you allege are infringed by any Work, then your rights under this License from such Licensor (including the grants in Sections 2.1 and 2.2) will terminate immediately.
-
-3.5 Trademarks. This License does not grant any rights to use any Licensor’s or its affiliates’ names, logos, or trademarks, except as necessary to reproduce the notices described in this License.
-
-3.6 Termination. If you violate any term of this License, then your rights under this License (including the grants in Sections 2.1 and 2.2) will terminate immediately.
-4. Disclaimer of Warranty.
-
-THE WORK IS PROVIDED “AS IS” WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WARRANTIES OR CONDITIONS OF M ERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE OR NON-INFRINGEMENT. YOU BEAR THE RISK OF UNDERTAKING ANY ACTIVITIES UNDER THIS LICENSE. SOME STATES’ CONSUMER LAWS DO NOT ALLOW EXCLUSION OF AN IMPLIED WARRANTY, SO THIS DISCLAIMER MAY NOT APPLY TO YOU.
-5. Limitation of Liability.
-
-EXCEPT AS PROHIBITED BY APPLICABLE LAW, IN NO EVENT AND UNDER NO LEGAL THEORY, WHETHER IN TORT (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE SHALL ANY LICENSOR BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF OR RELATED TO THIS LICENSE, THE USE OR INABILITY TO USE THE WORK (INCLUDING BUT NOT LIMITED TO LOSS OF GOODWILL, BUSINESS INTERRUPTION, LOST PROFITS OR DATA, COMPUTER FAILURE OR MALFUNCTION, OR ANY OTHER COMM [...]
-
diff --git a/flink-connectors/pom.xml b/flink-connectors/pom.xml
index 0718e3d..41a5eaa 100644
--- a/flink-connectors/pom.xml
+++ b/flink-connectors/pom.xml
@@ -57,6 +57,7 @@ under the License.
 		<module>flink-connector-filesystem</module>
 		<module>flink-connector-kafka</module>
 		<module>flink-connector-gcp-pubsub</module>
+		<module>flink-connector-kinesis</module>
 	</modules>
 
 	<!-- override these root dependencies as 'provided', so they don't end up
@@ -114,23 +115,6 @@ under the License.
 				<module>flink-connector-kafka-0.8</module>
 			</modules>
 		</profile>
-		<!--
-			We include the kinesis module only optionally because it contains a dependency
-			licenced under the "Amazon Software License".
-			In accordance with the discussion in https://issues.apache.org/jira/browse/LEGAL-198
-			this is an optional module for Flink.
-		-->
-		<profile>
-			<id>include-kinesis</id>
-			<activation>
-				<property>
-					<name>include-kinesis</name>
-				</property>
-			</activation>
-			<modules>
-				<module>flink-connector-kinesis</module>
-			</modules>
-		</profile>
 	</profiles>
 
 	<build>
diff --git a/flink-end-to-end-tests/pom.xml b/flink-end-to-end-tests/pom.xml
index 4ae2c2d..b7ac551 100644
--- a/flink-end-to-end-tests/pom.xml
+++ b/flink-end-to-end-tests/pom.xml
@@ -70,29 +70,9 @@ under the License.
 		<module>flink-streaming-kafka010-test</module>
 		<module>flink-plugins-test</module>
 		<module>flink-tpch-test</module>
+		<module>flink-streaming-kinesis-test</module>
 	</modules>
 
-	<!-- See main pom.xml for explanation of profiles -->
-	<profiles>
-		<!--
-			We include the kinesis module only optionally because it contains a dependency
-			licenced under the "Amazon Software License".
-			In accordance with the discussion in https://issues.apache.org/jira/browse/LEGAL-198
-			this is an optional module for Flink.
-		-->
-		<profile>
-			<id>include-kinesis</id>
-			<activation>
-				<property>
-					<name>include-kinesis</name>
-				</property>
-			</activation>
-			<modules>
-				<module>flink-streaming-kinesis-test</module>
-			</modules>
-		</profile>
-	</profiles>
-
 	<build>
 		<plugins>
 			<plugin>
diff --git a/tools/travis/stage.sh b/tools/travis/stage.sh
index 7bd7a97..cd90337 100644
--- a/tools/travis/stage.sh
+++ b/tools/travis/stage.sh
@@ -102,6 +102,7 @@ flink-connectors/flink-connector-kafka-base,\
 flink-connectors/flink-connector-nifi,\
 flink-connectors/flink-connector-rabbitmq,\
 flink-connectors/flink-connector-twitter,\
+flink-connectors/flink-connector-kinesis,\
 flink-metrics/flink-metrics-dropwizard,\
 flink-metrics/flink-metrics-graphite,\
 flink-metrics/flink-metrics-jmx,\
@@ -129,10 +130,6 @@ MODULES_CONNECTORS_JDK9_EXCLUSIONS="\
 MODULES_TESTS="\
 flink-tests"
 
-if [[ ${PROFILE} == *"include-kinesis"* ]]; then
-    MODULES_CONNECTORS="$MODULES_CONNECTORS,flink-connectors/flink-connector-kinesis"
-fi
-
 # we can only build the Kafka 0.8 connector when building for Scala 2.11
 if [[ $PROFILE == *"scala-2.11"* ]]; then
     MODULES_CONNECTORS="$MODULES_CONNECTORS,flink-connectors/flink-connector-kafka-0.8"