You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@flink.apache.org by GitBox <gi...@apache.org> on 2018/11/07 14:30:14 UTC

[GitHub] twalthr closed pull request #7035: [BP-1.7][FLINK-10805] Fix failing end-to-end tests

twalthr closed pull request #7035: [BP-1.7][FLINK-10805] Fix failing end-to-end tests
URL: https://github.com/apache/flink/pull/7035
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/flink-end-to-end-tests/test-scripts/common.sh b/flink-end-to-end-tests/test-scripts/common.sh
index 4f628fc6d12..f17d81b8938 100644
--- a/flink-end-to-end-tests/test-scripts/common.sh
+++ b/flink-end-to-end-tests/test-scripts/common.sh
@@ -418,7 +418,7 @@ function wait_job_terminal_state {
   echo "Waiting for job ($job) to reach terminal state $terminal_state ..."
 
   while : ; do
-    N=$(grep -o "Job $job reached globally terminal state $terminal_state" $FLINK_DIR/log/*standalonesession*.log | tail -1)
+    N=$(grep -o "Job $job reached globally terminal state $terminal_state" $FLINK_DIR/log/*standalonesession*.log | tail -1 || true)
 
     if [[ -z $N ]]; then
       sleep 1
diff --git a/flink-end-to-end-tests/test-scripts/elasticsearch-common.sh b/flink-end-to-end-tests/test-scripts/elasticsearch-common.sh
index 834e84528b3..9e752a7c921 100644
--- a/flink-end-to-end-tests/test-scripts/elasticsearch-common.sh
+++ b/flink-end-to-end-tests/test-scripts/elasticsearch-common.sh
@@ -45,7 +45,7 @@ function wait_elasticsearch_working {
     echo "Waiting for Elasticsearch node to work..."
 
     for ((i=1;i<=60;i++)); do
-        curl -XGET 'http://localhost:9200'
+        curl -XGET 'http://localhost:9200' || true
 
         # make sure the elasticsearch node is actually working
         if [ $? -ne 0 ]; then
@@ -69,7 +69,7 @@ function verify_result_line_number {
     fi
 
     while : ; do
-      curl "localhost:9200/${index}/_search?q=*&pretty&size=21" > $TEST_DATA_DIR/output
+      curl "localhost:9200/${index}/_search?q=*&pretty&size=21" > $TEST_DATA_DIR/output || true
 
       if [ -n "$(grep "\"total\" : $numRecords" $TEST_DATA_DIR/output)" ]; then
           echo "Elasticsearch end to end test pass."
@@ -88,7 +88,7 @@ function verify_result_hash {
   local hash=$4
 
   while : ; do
-    curl "localhost:9200/${index}/_search?q=*&pretty" > $TEST_DATA_DIR/es_output
+    curl "localhost:9200/${index}/_search?q=*&pretty" > $TEST_DATA_DIR/es_output || true
 
     if [ -n "$(grep "\"total\" : $numRecords" $TEST_DATA_DIR/es_output)" ]; then
       break
diff --git a/flink-end-to-end-tests/test-scripts/test_confluent_schema_registry.sh b/flink-end-to-end-tests/test-scripts/test_confluent_schema_registry.sh
index 323234fa683..c6def99c2af 100755
--- a/flink-end-to-end-tests/test-scripts/test_confluent_schema_registry.sh
+++ b/flink-end-to-end-tests/test-scripts/test_confluent_schema_registry.sh
@@ -18,7 +18,7 @@
 ################################################################################
 
 source "$(dirname "$0")"/common.sh
-source "$(dirname "$0")"/kafka-common.sh
+source "$(dirname "$0")"/kafka-common.sh 0.10.2.0 3.2.0 3.2
 
 function verify_output {
   local expected=$(printf $1)
diff --git a/flink-end-to-end-tests/test-scripts/test_sql_client.sh b/flink-end-to-end-tests/test-scripts/test_sql_client.sh
index ce3d1944877..ee89ed394e7 100755
--- a/flink-end-to-end-tests/test-scripts/test_sql_client.sh
+++ b/flink-end-to-end-tests/test-scripts/test_sql_client.sh
@@ -18,7 +18,7 @@
 ################################################################################
 
 source "$(dirname "$0")"/common.sh
-source "$(dirname "$0")"/kafka-common.sh
+source "$(dirname "$0")"/kafka-common.sh 0.10.2.0 3.2.0 3.2
 source "$(dirname "$0")"/elasticsearch-common.sh
 
 SQL_TOOLBOX_JAR=$END_TO_END_DIR/flink-sql-client-test/target/SqlToolbox.jar
@@ -306,7 +306,7 @@ EOF
 
 echo "Executing SQL: Kafka JSON -> Kafka Avro"
 
-read -r -d '' SQL_STATEMENT_1 << EOF
+SQL_STATEMENT_1=$(cat << EOF
 INSERT INTO AvroBothTable
   SELECT
     CAST(TUMBLE_START(rowtime, INTERVAL '1' HOUR) AS VARCHAR) AS event_timestamp,
@@ -320,6 +320,7 @@ INSERT INTO AvroBothTable
     event.message,
     TUMBLE(rowtime, INTERVAL '1' HOUR)
 EOF
+)
 
 echo "$SQL_STATEMENT_1"
 
@@ -331,11 +332,12 @@ $FLINK_DIR/bin/sql-client.sh embedded \
 
 echo "Executing SQL: Kafka Avro -> Filesystem CSV"
 
-read -r -d '' SQL_STATEMENT_2 << EOF
+SQL_STATEMENT_2=$(cat << EOF
 INSERT INTO CsvSinkTable
   SELECT AvroBothTable.*, RegReplace('Test constant folding.', 'Test', 'Success') AS constant
   FROM AvroBothTable
 EOF
+)
 
 echo "$SQL_STATEMENT_2"
 
@@ -360,13 +362,14 @@ check_result_hash "SQL Client Kafka" $RESULT "0a1bf8bf716069b7269f575f87a802c0"
 
 echo "Executing SQL: Values -> Elasticsearch (upsert)"
 
-read -r -d '' SQL_STATEMENT_3 << EOF
+SQL_STATEMENT_3=$(cat << EOF
 INSERT INTO ElasticsearchUpsertSinkTable
   SELECT user_id, user_name, COUNT(*) AS user_count
   FROM (VALUES (1, 'Bob'), (22, 'Alice'), (42, 'Greg'), (42, 'Greg'), (42, 'Greg'), (1, 'Bob'))
     AS UserCountTable(user_id, user_name)
   GROUP BY user_id, user_name
 EOF
+)
 
 JOB_ID=$($FLINK_DIR/bin/sql-client.sh embedded \
   --library $SQL_JARS_DIR \
@@ -380,7 +383,7 @@ verify_result_hash "SQL Client Elasticsearch Upsert" "$ELASTICSEARCH_INDEX" 3 "9
 
 echo "Executing SQL: Values -> Elasticsearch (append, no key)"
 
-read -r -d '' SQL_STATEMENT_4 << EOF
+SQL_STATEMENT_4=$(cat << EOF
 INSERT INTO ElasticsearchAppendSinkTable
   SELECT *
   FROM (
@@ -393,6 +396,7 @@ INSERT INTO ElasticsearchAppendSinkTable
       (1, 'Bob', CAST(0 AS BIGINT)))
     AS UserCountTable(user_id, user_name, user_count)
 EOF
+)
 
 JOB_ID=$($FLINK_DIR/bin/sql-client.sh embedded \
   --library $SQL_JARS_DIR \
@@ -407,7 +411,7 @@ verify_result_line_number 9 "$ELASTICSEARCH_INDEX"
 
 echo "Executing SQL: Match recognize -> Elasticsearch"
 
-read -r -d '' SQL_STATEMENT_5 << EOF
+SQL_STATEMENT_5=$(cat << EOF
 INSERT INTO ElasticsearchAppendSinkTable
   SELECT 1 as user_id, T.userName as user_name, cast(1 as BIGINT) as user_count
   FROM (
@@ -423,6 +427,7 @@ INSERT INTO ElasticsearchAppendSinkTable
         A as user = 'Alice'
   ) T
 EOF
+)
 
 JOB_ID=$($FLINK_DIR/bin/sql-client.sh embedded \
   --library $SQL_JARS_DIR \


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services