You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by ji...@apache.org on 2019/07/31 07:06:18 UTC

[flink] branch master updated: [hotfix] [travis] Fix the python travis failure (#9286)

This is an automated email from the ASF dual-hosted git repository.

jincheng pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
     new 6dac7a7  [hotfix] [travis] Fix the python travis failure (#9286)
6dac7a7 is described below

commit 6dac7a7f7fd72aba903ac339f9f6bfcd44897d39
Author: dianfu <fu...@alibaba-inc.com>
AuthorDate: Wed Jul 31 15:05:51 2019 +0800

    [hotfix] [travis] Fix the python travis failure (#9286)
---
 docs/ops/cli.md                                        | 18 ++++++++----------
 docs/ops/cli.zh.md                                     | 18 ++++++++----------
 .../src/main/flink-bin/bin/pyflink-gateway-server.sh   |  5 ++---
 tools/travis_controller.sh                             |  3 ++-
 4 files changed, 20 insertions(+), 24 deletions(-)

diff --git a/docs/ops/cli.md b/docs/ops/cli.md
index 6d24f29..4e84267 100644
--- a/docs/ops/cli.md
+++ b/docs/ops/cli.md
@@ -100,40 +100,38 @@ These examples about how to submit a job in CLI.
 
 -   Run Python Table program:
 
-        ./bin/flink run -py examples/python/table/batch/word_count.py -j <path/to/flink-table.jar>
+        ./bin/flink run -py examples/python/table/batch/word_count.py
 
 -   Run Python Table program with pyFiles:
 
-        ./bin/flink run -py examples/python/table/batch/word_count.py -j <path/to/flink-table.jar> \
+        ./bin/flink run -py examples/python/table/batch/word_count.py \
                                 -pyfs file:///user.txt,hdfs:///$namenode_address/username.txt
 
 -   Run Python Table program with pyFiles and pyModule:
 
-        ./bin/flink run -pym batch.word_count -pyfs examples/python/table/batch -j <path/to/flink-table.jar>
+        ./bin/flink run -pym batch.word_count -pyfs examples/python/table/batch
 
 -   Run Python Table program with parallelism 16:
 
-        ./bin/flink run -p 16 -py examples/python/table/batch/word_count.py -j <path/to/flink-table.jar>
+        ./bin/flink run -p 16 -py examples/python/table/batch/word_count.py
 
 -   Run Python Table program with flink log output disabled:
 
-        ./bin/flink run -q -py examples/python/table/batch/word_count.py -j <path/to/flink-table.jar>
+        ./bin/flink run -q -py examples/python/table/batch/word_count.py
 
 -   Run Python Table program in detached mode:
 
-        ./bin/flink run -d -py examples/python/table/batch/word_count.py -j <path/to/flink-table.jar>
+        ./bin/flink run -d -py examples/python/table/batch/word_count.py
 
 -   Run Python Table program on a specific JobManager:
 
         ./bin/flink run -m myJMHost:8081 \
-                               -py examples/python/table/batch/word_count.py \
-                               -j <path/to/flink-table.jar>
+                               -py examples/python/table/batch/word_count.py
 
 -   Run Python Table program using a [per-job YARN cluster]({{site.baseurl}}/ops/deployment/yarn_setup.html#run-a-single-flink-job-on-hadoop-yarn) with 2 TaskManagers:
 
         ./bin/flink run -m yarn-cluster -yn 2 \
-                               -py examples/python/table/batch/word_count.py \
-                               -j <path/to/flink-table.jar>
+                               -py examples/python/table/batch/word_count.py
 </div>
 
 ### Job Management Examples
diff --git a/docs/ops/cli.zh.md b/docs/ops/cli.zh.md
index 8370fd8..b8cc94a 100644
--- a/docs/ops/cli.zh.md
+++ b/docs/ops/cli.zh.md
@@ -100,40 +100,38 @@ available.
 
 -   提交一个Python Table的作业:
 
-        ./bin/flink run -py WordCount.py -j <path/to/flink-table.jar>
+        ./bin/flink run -py WordCount.py
 
 -   提交一个有多个依赖的Python Table的作业:
 
-        ./bin/flink run -py examples/python/table/batch/word_count.py -j <path/to/flink-table.jar> \
+        ./bin/flink run -py examples/python/table/batch/word_count.py \
                                 -pyfs file:///user.txt,hdfs:///$namenode_address/username.txt
 
 -   提交一个有多个依赖的Python Table的作业,Python作业的主入口通过pym选项指定:
 
-        ./bin/flink run -pym batch.word_count -pyfs examples/python/table/batch -j <path/to/flink-table.jar>
+        ./bin/flink run -pym batch.word_count -pyfs examples/python/table/batch
 
 -   提交一个指定并发度为16的Python Table的作业:
 
-        ./bin/flink run -p 16 -py examples/python/table/batch/word_count.py -j <path/to/flink-table.jar>
+        ./bin/flink run -p 16 -py examples/python/table/batch/word_count.py
 
 -   提交一个关闭flink日志输出的Python Table的作业:
 
-        ./bin/flink run -q -py examples/python/table/batch/word_count.py -j <path/to/flink-table.jar>
+        ./bin/flink run -q -py examples/python/table/batch/word_count.py
 
 -   提交一个运行在detached模式下的Python Table的作业:
 
-        ./bin/flink run -d -py examples/python/table/batch/word_count.py -j <path/to/flink-table.jar>
+        ./bin/flink run -d -py examples/python/table/batch/word_count.py
 
 -   提交一个运行在指定JobManager上的Python Table的作业:
 
         ./bin/flink run -m myJMHost:8081 \
-                            -py examples/python/table/batch/word_count.py \
-                            -j <path/to/flink-table.jar>
+                            -py examples/python/table/batch/word_count.py
 
 -   提交一个运行在有两个TaskManager的[per-job YARN cluster]({{site.baseurl}}/ops/deployment/yarn_setup.html#run-a-single-flink-job-on-hadoop-yarn)的Python Table的作业:
 
         ./bin/flink run -m yarn-cluster -yn 2 \
-                                 -py examples/python/table/batch/word_count.py \
-                                 -j <path/to/flink-table.jar>
+                                 -py examples/python/table/batch/word_count.py
                                  
 </div>
 
diff --git a/flink-dist/src/main/flink-bin/bin/pyflink-gateway-server.sh b/flink-dist/src/main/flink-bin/bin/pyflink-gateway-server.sh
index 16fe6b3..4cd642c 100644
--- a/flink-dist/src/main/flink-bin/bin/pyflink-gateway-server.sh
+++ b/flink-dist/src/main/flink-bin/bin/pyflink-gateway-server.sh
@@ -50,7 +50,6 @@ done
 log=$FLINK_LOG_DIR/flink-$FLINK_IDENT_STRING-python-$HOSTNAME.log
 log_setting=(-Dlog.file="$log" -Dlog4j.configuration=file:"$FLINK_CONF_DIR"/log4j-cli.properties -Dlogback.configurationFile=file:"$FLINK_CONF_DIR"/logback.xml)
 
-TABLE_JAR_PATH=`echo "$FLINK_HOME"/lib/flink-table*.jar`
 PYTHON_JAR_PATH=`echo "$FLINK_HOME"/opt/flink-python*.jar`
 
 FLINK_TEST_CLASSPATH=""
@@ -94,8 +93,8 @@ fi
 ARGS_COUNT=${#ARGS[@]}
 if [[ ${ARGS[0]} == "local" ]]; then
   ARGS=("${ARGS[@]:1:$ARGS_COUNT}")
-  exec $JAVA_RUN $JVM_ARGS "${log_setting[@]}" -cp ${FLINK_CLASSPATH}:${TABLE_JAR_PATH}:${PYTHON_JAR_PATH}:${FLINK_TEST_CLASSPATH} ${DRIVER} ${ARGS[@]}
+  exec $JAVA_RUN $JVM_ARGS "${log_setting[@]}" -cp ${FLINK_CLASSPATH}:${PYTHON_JAR_PATH}:${FLINK_TEST_CLASSPATH} ${DRIVER} ${ARGS[@]}
 else
   ARGS=("${ARGS[@]:1:$ARGS_COUNT}")
-  exec "$FLINK_BIN_DIR"/flink run ${ARGS[@]} -c ${DRIVER} -j ${TABLE_JAR_PATH}
+  exec "$FLINK_BIN_DIR"/flink run ${ARGS[@]} -c ${DRIVER}
 fi
diff --git a/tools/travis_controller.sh b/tools/travis_controller.sh
index 3f3f5d8..256235b 100755
--- a/tools/travis_controller.sh
+++ b/tools/travis_controller.sh
@@ -146,7 +146,8 @@ if [ $STAGE == "$STAGE_COMPILE" ]; then
             ! -path "$CACHE_FLINK_DIR/flink-runtime/target/flink-runtime*tests.jar" \
             ! -path "$CACHE_FLINK_DIR/flink-streaming-java/target/flink-streaming-java*tests.jar" \
             ! -path "$CACHE_FLINK_DIR/flink-dist/target/flink-*-bin/flink-*/lib/flink-dist*.jar" \
-            ! -path "$CACHE_FLINK_DIR/flink-dist/target/flink-*-bin/flink-*/lib/flink-table*.jar" \
+            ! -path "$CACHE_FLINK_DIR/flink-dist/target/flink-*-bin/flink-*/lib/flink-table_*.jar" \
+            ! -path "$CACHE_FLINK_DIR/flink-dist/target/flink-*-bin/flink-*/lib/flink-table-blink*.jar" \
             ! -path "$CACHE_FLINK_DIR/flink-dist/target/flink-*-bin/flink-*/opt/flink-python*.jar" \
             ! -path "$CACHE_FLINK_DIR/flink-connectors/flink-connector-elasticsearch-base/target/flink-*.jar" \
             ! -path "$CACHE_FLINK_DIR/flink-connectors/flink-connector-kafka-base/target/flink-*.jar" \