You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by ma...@apache.org on 2022/02/18 08:52:14 UTC

[flink] branch release-1.13 updated (f71cbb9 -> b3e520d)

This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a change to branch release-1.13
in repository https://gitbox.apache.org/repos/asf/flink.git.


    from f71cbb9  [FLINK-25883][python] Set the default value of DEFAULT_BUNDLE_PROCESSOR_CACHE_SHUTDOWN_THRESHOLD_S to 30 days
     new f445136  [FLINK-26105][e2e] Removes printing of logs to stdout for pyflink_test
     new b3e520d  [FLINK-26105][e2e] Fixes log file extension

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 flink-end-to-end-tests/test-scripts/common.sh              | 14 +++++++-------
 flink-end-to-end-tests/test-scripts/common_ha.sh           |  2 +-
 .../test-scripts/queryable_state_base.sh                   |  4 ++--
 flink-end-to-end-tests/test-scripts/test_cli.sh            |  2 +-
 flink-end-to-end-tests/test-scripts/test_pyflink.sh        | 14 --------------
 .../test-scripts/test_rocksdb_state_memory_control.sh      |  2 +-
 tools/ci/controller_utils.sh                               |  2 +-
 tools/ci/maven-utils.sh                                    |  2 +-
 8 files changed, 14 insertions(+), 28 deletions(-)

[flink] 01/02: [FLINK-26105][e2e] Removes printing of logs to stdout for pyflink_test

Posted by ma...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a commit to branch release-1.13
in repository https://gitbox.apache.org/repos/asf/flink.git

commit f44513693d6a93af60584fa10f9fd6fb5b088869
Author: Matthias Pohl <ma...@ververica.com>
AuthorDate: Thu Feb 17 13:18:17 2022 +0100

    [FLINK-26105][e2e] Removes printing of logs to stdout for pyflink_test
    
    I removed the test_pyflink.sh to align it with how errors are
    handled in other e2e tests. The logs are accessible through the
    build artifacts and shouldn't spill into stdout. The rolling
    filenames in flink might lead to a strange ordering anyway.
---
 flink-end-to-end-tests/test-scripts/test_pyflink.sh | 14 --------------
 1 file changed, 14 deletions(-)

diff --git a/flink-end-to-end-tests/test-scripts/test_pyflink.sh b/flink-end-to-end-tests/test-scripts/test_pyflink.sh
index 24ffc59..489b6e0 100755
--- a/flink-end-to-end-tests/test-scripts/test_pyflink.sh
+++ b/flink-end-to-end-tests/test-scripts/test_pyflink.sh
@@ -253,16 +253,6 @@ function read_msg_from_kafka {
     --consumer-property group.id=$3 --timeout-ms 90000 2> /dev/null
 }
 
-function cat_jm_logs {
-     local log_file_name=${3:-standalonesession}
-     cat $FLINK_LOG_DIR/*$log_file_name*.log
-}
-
-function cat_tm_logs {
-	local logfile="$FLINK_LOG_DIR/flink*taskexecutor*log"
-	cat ${logfile}
-}
-
 send_msg_to_kafka "${PAYMENT_MSGS[*]}"
 
 JOB_ID=$(${FLINK_DIR}/bin/flink run \
@@ -308,9 +298,5 @@ if [[ "${EXPECTED_MSG[*]}" != "${SORTED_READ_MSG[*]}" ]]; then
     echo "Output from Flink program does not match expected output."
     echo -e "EXPECTED Output: --${EXPECTED_MSG[*]}--"
     echo -e "ACTUAL: --${SORTED_READ_MSG[*]}--"
-    jm_log=$(cat_jm_logs)
-    echo "JobManager logs: " ${jm_log}
-    tm_log=$(cat_tm_logs)
-    echo "TaskManager logs: " ${tm_log}
     exit 1
 fi

[flink] 02/02: [FLINK-26105][e2e] Fixes log file extension

Posted by ma...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mapohl pushed a commit to branch release-1.13
in repository https://gitbox.apache.org/repos/asf/flink.git

commit b3e520dee26c2bc82a6e80f5b22dc22361f4d65e
Author: Matthias Pohl <ma...@ververica.com>
AuthorDate: Wed Feb 16 16:16:12 2022 +0100

    [FLINK-26105][e2e] Fixes log file extension
    
    Rolling log file naming strategy is enabled by default which
    might cause test instabilities do to the relevant substring
    not being present in the *.log file. This change expands the
    grep and find calls to also consider *.log.[0-9]+ files.
---
 flink-end-to-end-tests/test-scripts/common.sh              | 14 +++++++-------
 flink-end-to-end-tests/test-scripts/common_ha.sh           |  2 +-
 .../test-scripts/queryable_state_base.sh                   |  4 ++--
 flink-end-to-end-tests/test-scripts/test_cli.sh            |  2 +-
 .../test-scripts/test_rocksdb_state_memory_control.sh      |  2 +-
 tools/ci/controller_utils.sh                               |  2 +-
 tools/ci/maven-utils.sh                                    |  2 +-
 7 files changed, 14 insertions(+), 14 deletions(-)

diff --git a/flink-end-to-end-tests/test-scripts/common.sh b/flink-end-to-end-tests/test-scripts/common.sh
index 790abfa..e20209f 100644
--- a/flink-end-to-end-tests/test-scripts/common.sh
+++ b/flink-end-to-end-tests/test-scripts/common.sh
@@ -475,7 +475,7 @@ function wait_for_job_state_transition {
   echo "Waiting for job ($job) to switch from state ${initial_state} to state ${next_state} ..."
 
   while : ; do
-    N=$(grep -o "($job) switched from state ${initial_state} to ${next_state}" $FLINK_LOG_DIR/*standalonesession*.log | tail -1)
+    N=$(grep -o "($job) switched from state ${initial_state} to ${next_state}" $FLINK_LOG_DIR/*standalonesession*.log* | tail -1)
 
     if [[ -z $N ]]; then
       sleep 1
@@ -510,7 +510,7 @@ function wait_job_terminal_state {
   echo "Waiting for job ($job) to reach terminal state $expected_terminal_state ..."
 
   while : ; do
-    local N=$(grep -o "Job $job reached terminal state .*" $FLINK_LOG_DIR/*$log_file_name*.log | tail -1 || true)
+    local N=$(grep -o "Job $job reached terminal state .*" $FLINK_LOG_DIR/*$log_file_name*.log* | tail -1 || true)
     if [[ -z $N ]]; then
       sleep 1
     else
@@ -632,7 +632,7 @@ function get_job_metric {
 function get_metric_processed_records {
   OPERATOR=$1
   JOB_NAME="${2:-General purpose test job}"
-  N=$(grep ".${JOB_NAME}.$OPERATOR.numRecordsIn:" $FLINK_LOG_DIR/*taskexecutor*.log | sed 's/.* //g' | tail -1)
+  N=$(grep ".${JOB_NAME}.$OPERATOR.numRecordsIn:" $FLINK_LOG_DIR/*taskexecutor*.log* | sed 's/.* //g' | tail -1)
   if [ -z $N ]; then
     N=0
   fi
@@ -642,7 +642,7 @@ function get_metric_processed_records {
 function get_num_metric_samples {
   OPERATOR=$1
   JOB_NAME="${2:-General purpose test job}"
-  N=$(grep ".${JOB_NAME}.$OPERATOR.numRecordsIn:" $FLINK_LOG_DIR/*taskexecutor*.log | wc -l)
+  N=$(grep ".${JOB_NAME}.$OPERATOR.numRecordsIn:" $FLINK_LOG_DIR/*taskexecutor*.log* | wc -l)
   if [ -z $N ]; then
     N=0
   fi
@@ -692,7 +692,7 @@ function wait_num_of_occurence_in_logs {
     echo "Waiting for text ${text} to appear ${number} of times in logs..."
 
     while : ; do
-      N=$(grep -o "${text}" $FLINK_LOG_DIR/*${logs}*.log | wc -l)
+      N=$(grep -o "${text}" $FLINK_LOG_DIR/*${logs}*.log* | wc -l)
 
       if [ -z $N ]; then
         N=0
@@ -721,7 +721,7 @@ function wait_num_checkpoints {
     echo "Waiting for job ($JOB) to have at least $NUM_CHECKPOINTS completed checkpoints ..."
 
     while : ; do
-      N=$(grep -o "Completed checkpoint [1-9]* for job $JOB" $FLINK_LOG_DIR/*standalonesession*.log | awk '{print $3}' | tail -1)
+      N=$(grep -o "Completed checkpoint [1-9]* for job $JOB" $FLINK_LOG_DIR/*standalonesession*.log* | awk '{print $3}' | tail -1)
 
       if [ -z $N ]; then
         N=0
@@ -763,7 +763,7 @@ function expect_in_taskmanager_logs {
     local expected="$1"
     local timeout=$2
     local i=0
-    local logfile="$FLINK_LOG_DIR/flink*taskexecutor*log"
+    local logfile="$FLINK_LOG_DIR/flink*taskexecutor*log*"
 
 
     while ! grep "${expected}" ${logfile} > /dev/null; do
diff --git a/flink-end-to-end-tests/test-scripts/common_ha.sh b/flink-end-to-end-tests/test-scripts/common_ha.sh
index 4612fec..23f89ee 100644
--- a/flink-end-to-end-tests/test-scripts/common_ha.sh
+++ b/flink-end-to-end-tests/test-scripts/common_ha.sh
@@ -49,7 +49,7 @@ function verify_num_occurences_in_logs() {
     local text="$2"
     local expected_no="$3"
 
-    local actual_no=$(grep -r --include "*${log_pattern}*.log" -e "${text}" "$FLINK_LOG_DIR/" | cut -d ":" -f 1 | uniq | wc -l)
+    local actual_no=$(grep -r --include "*${log_pattern}*.log*" -e "${text}" "$FLINK_LOG_DIR/" | cut -d ":" -f 1 | sed "s/\.[0-9]\{1,\}$//g" | uniq | wc -l)
     [[ "${expected_no}" -eq "${actual_no}" ]]
 }
 
diff --git a/flink-end-to-end-tests/test-scripts/queryable_state_base.sh b/flink-end-to-end-tests/test-scripts/queryable_state_base.sh
index 87957ec..9825931 100644
--- a/flink-end-to-end-tests/test-scripts/queryable_state_base.sh
+++ b/flink-end-to-end-tests/test-scripts/queryable_state_base.sh
@@ -25,7 +25,7 @@ function link_queryable_state_lib {
 
 # Returns the ip address of the queryable state server
 function get_queryable_state_server_ip {
-    local ip=$(cat $FLINK_LOG_DIR/flink*taskexecutor*log \
+    local ip=$(cat $FLINK_LOG_DIR/flink*taskexecutor*log* \
         | grep "Started Queryable State Server" \
         | head -1 \
         | grep -Eo  "\.*[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.*")
@@ -35,7 +35,7 @@ function get_queryable_state_server_ip {
 
 # Returns the ip address of the queryable state server
 function get_queryable_state_proxy_port {
-    local port=$(cat $FLINK_LOG_DIR/flink*taskexecutor*log \
+    local port=$(cat $FLINK_LOG_DIR/flink*taskexecutor*log* \
         | grep "Started Queryable State Proxy Server" \
         | head -1 \
         | grep -Eo  "\.*\:([0-9]{3,5})\.*" | tr -d ":.")
diff --git a/flink-end-to-end-tests/test-scripts/test_cli.sh b/flink-end-to-end-tests/test-scripts/test_cli.sh
index 985ce67..0746469 100755
--- a/flink-end-to-end-tests/test-scripts/test_cli.sh
+++ b/flink-end-to-end-tests/test-scripts/test_cli.sh
@@ -65,7 +65,7 @@ function extract_valid_job_list_by_type_from_job_list_return() {
 }
 
 function extract_task_manager_slot_request_count() {
-    COUNT=`grep "Receive slot request" $FLINK_LOG_DIR/*taskexecutor*.log | wc -l`
+    COUNT=`grep "Receive slot request" $FLINK_LOG_DIR/*taskexecutor*.log* | wc -l`
     echo $COUNT
 }
 
diff --git a/flink-end-to-end-tests/test-scripts/test_rocksdb_state_memory_control.sh b/flink-end-to-end-tests/test-scripts/test_rocksdb_state_memory_control.sh
index 66bfa43..d42e806 100755
--- a/flink-end-to-end-tests/test-scripts/test_rocksdb_state_memory_control.sh
+++ b/flink-end-to-end-tests/test-scripts/test_rocksdb_state_memory_control.sh
@@ -80,7 +80,7 @@ function buildBaseJobCmd {
 function find_max_block_cache_usage() {
   OPERATOR=$1
   JOB_NAME="${2:-General purpose test job}"
-  N=$(grep ".${JOB_NAME}.$OPERATOR.rocksdb.block-cache-usage:" $FLINK_LOG_DIR/*taskexecutor*.log | sed 's/.* //g' | sort -rn | head -n 1)
+  N=$(grep ".${JOB_NAME}.$OPERATOR.rocksdb.block-cache-usage:" $FLINK_LOG_DIR/*taskexecutor*.log* | sed 's/.* //g' | sort -rn | head -n 1)
   if [ -z $N ]; then
     N=0
   fi
diff --git a/tools/ci/controller_utils.sh b/tools/ci/controller_utils.sh
index da0a414..8700379 100644
--- a/tools/ci/controller_utils.sh
+++ b/tools/ci/controller_utils.sh
@@ -33,7 +33,7 @@ print_system_info() {
 
 # locate YARN logs and put them into artifacts directory
 put_yarn_logs_to_artifacts() {
-	for file in `find ./flink-yarn-tests/target -type f -name '*.log' -or -name '*.out'`; do
+	for file in `find ./flink-yarn-tests/target -type f -name '*.log*' -or -name '*.out'`; do
 		TARGET_FILE=`echo "$file" | grep -Eo "container_[0-9_]+/(.*).[a-z]{3}"`
 		TARGET_DIR=`dirname	 "$TARGET_FILE"`
 		mkdir -p "$DEBUG_FILES_OUTPUT_DIR/yarn-tests/$TARGET_DIR"
diff --git a/tools/ci/maven-utils.sh b/tools/ci/maven-utils.sh
index 2404064..2de597e 100755
--- a/tools/ci/maven-utils.sh
+++ b/tools/ci/maven-utils.sh
@@ -73,7 +73,7 @@ function collect_coredumps {
 	local SEARCHDIR=$1
 	local TARGET_DIR=$2
 	echo "Searching for .dump, .dumpstream and related files in '$SEARCHDIR'"
-	for file in `find $SEARCHDIR -type f -regextype posix-extended -iregex '.*\.hprof|.*\.dump|.*\.dumpstream|.*hs.*\.log|.*/core(.[0-9]+)?$'`; do
+	for file in `find $SEARCHDIR -type f -regextype posix-extended -iregex '.*\.hprof|.*\.dump|.*\.dumpstream|.*hs.*\.log(\.[0-9]+)?|.*/core(\.[0-9]+)?$'`; do
 		echo "Moving '$file' to target directory ('$TARGET_DIR')"
 		mv $file $TARGET_DIR/$(echo $file | tr "/" "-")
 	done