You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@flink.apache.org by GitBox <gi...@apache.org> on 2018/11/19 08:39:23 UTC

[GitHub] StephanEwen closed pull request #7077: [FLINK-10736][E2E tests] Switch to new IT_CASE_S3* and static/temp s3 content

StephanEwen closed pull request #7077: [FLINK-10736][E2E tests] Switch to new IT_CASE_S3* and static/temp s3 content
URL: https://github.com/apache/flink/pull/7077
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/flink-end-to-end-tests/test-scripts/common_s3.sh b/flink-end-to-end-tests/test-scripts/common_s3.sh
index 5c16bb75bea..4a778f3ad6b 100644
--- a/flink-end-to-end-tests/test-scripts/common_s3.sh
+++ b/flink-end-to-end-tests/test-scripts/common_s3.sh
@@ -17,40 +17,43 @@
 # limitations under the License.
 ################################################################################
 
-if [[ -z "$ARTIFACTS_AWS_BUCKET" ]]; then
+if [[ -z "$IT_CASE_S3_BUCKET" ]]; then
     echo "Did not find AWS environment variables, NOT running the e2e test."
     exit 0
 else
-    echo "Found AWS bucket $ARTIFACTS_AWS_BUCKET, running the e2e test."
+    echo "Found AWS bucket $IT_CASE_S3_BUCKET, running the e2e test."
 fi
 
-if [[ -z "$ARTIFACTS_AWS_ACCESS_KEY" ]]; then
+if [[ -z "$IT_CASE_S3_ACCESS_KEY" ]]; then
     echo "Did not find AWS environment variables, NOT running the e2e test."
     exit 0
 else
-    echo "Found AWS access key $ARTIFACTS_AWS_ACCESS_KEY, running the e2e test."
+    echo "Found AWS access key $IT_CASE_S3_ACCESS_KEY, running the e2e test."
 fi
 
-if [[ -z "$ARTIFACTS_AWS_SECRET_KEY" ]]; then
+if [[ -z "$IT_CASE_S3_SECRET_KEY" ]]; then
     echo "Did not find AWS environment variables, NOT running the e2e test."
     exit 0
 else
-    echo "Found AWS secret key $ARTIFACTS_AWS_SECRET_KEY, running the e2e test."
+    echo "Found AWS secret key $IT_CASE_S3_SECRET_KEY, running the e2e test."
 fi
 
-AWS_REGION="${AWS_REGION:-eu-west-1}"
-AWS_ACCESS_KEY=$ARTIFACTS_AWS_ACCESS_KEY
-AWS_SECRET_KEY=$ARTIFACTS_AWS_SECRET_KEY
+# config AWS client
+AWS_REGION="${IT_CASE_S3_REGION:-eu-east-1}"
+AWS_ACCESS_KEY=$IT_CASE_S3_ACCESS_KEY
+AWS_SECRET_KEY=$IT_CASE_S3_SECRET_KEY
 
 s3util="java -jar ${END_TO_END_DIR}/flink-e2e-test-utils/target/S3UtilProgram.jar"
 
+SHADED_S3_INPUT=s3://$IT_CASE_S3_BUCKET/static/words
+
 ###################################
 # Setup Flink s3 access.
 #
 # Globals:
 #   FLINK_DIR
-#   ARTIFACTS_AWS_ACCESS_KEY
-#   ARTIFACTS_AWS_SECRET_KEY
+#   IT_CASE_S3_ACCESS_KEY
+#   IT_CASE_S3_SECRET_KEY
 # Arguments:
 #   None
 # Returns:
@@ -68,8 +71,8 @@ function s3_setup {
   trap s3_cleanup EXIT
 
   cp $FLINK_DIR/opt/flink-s3-fs-hadoop-*.jar $FLINK_DIR/lib/
-  echo "s3.access-key: $ARTIFACTS_AWS_ACCESS_KEY" >> "$FLINK_DIR/conf/flink-conf.yaml"
-  echo "s3.secret-key: $ARTIFACTS_AWS_SECRET_KEY" >> "$FLINK_DIR/conf/flink-conf.yaml"
+  echo "s3.access-key: $IT_CASE_S3_ACCESS_KEY" >> "$FLINK_DIR/conf/flink-conf.yaml"
+  echo "s3.secret-key: $IT_CASE_S3_SECRET_KEY" >> "$FLINK_DIR/conf/flink-conf.yaml"
 }
 
 s3_setup
@@ -78,7 +81,7 @@ s3_setup
 # List s3 objects by full path prefix.
 #
 # Globals:
-#   ARTIFACTS_AWS_BUCKET
+#   IT_CASE_S3_BUCKET
 # Arguments:
 #   $1 - s3 full path key prefix
 # Returns:
@@ -86,14 +89,14 @@ s3_setup
 ###################################
 function s3_list {
   AWS_REGION=$AWS_REGION \
-  ${s3util} --action listByFullPathPrefix --s3prefix "$1" --bucket $ARTIFACTS_AWS_BUCKET
+  ${s3util} --action listByFullPathPrefix --s3prefix "$1" --bucket $IT_CASE_S3_BUCKET
 }
 
 ###################################
 # Download s3 object.
 #
 # Globals:
-#   ARTIFACTS_AWS_BUCKET
+#   IT_CASE_S3_BUCKET
 # Arguments:
 #   $1 - local path to save file
 #   $2 - s3 object key
@@ -102,14 +105,14 @@ function s3_list {
 ###################################
 function s3_get {
   AWS_REGION=$AWS_REGION \
-  ${s3util} --action downloadFile --localFile "$1" --s3file "$2" --bucket $ARTIFACTS_AWS_BUCKET
+  ${s3util} --action downloadFile --localFile "$1" --s3file "$2" --bucket $IT_CASE_S3_BUCKET
 }
 
 ###################################
 # Download s3 objects to folder by full path prefix.
 #
 # Globals:
-#   ARTIFACTS_AWS_BUCKET
+#   IT_CASE_S3_BUCKET
 # Arguments:
 #   $1 - local path to save folder with files
 #   $2 - s3 key full path prefix
@@ -121,14 +124,14 @@ function s3_get_by_full_path_and_filename_prefix {
   local file_prefix="${3-}"
   AWS_REGION=$AWS_REGION \
   ${s3util} --action downloadByFullPathAndFileNamePrefix \
-    --localFolder "$1" --s3prefix "$2" --s3filePrefix "${file_prefix}" --bucket $ARTIFACTS_AWS_BUCKET
+    --localFolder "$1" --s3prefix "$2" --s3filePrefix "${file_prefix}" --bucket $IT_CASE_S3_BUCKET
 }
 
 ###################################
 # Upload file to s3 object.
 #
 # Globals:
-#   ARTIFACTS_AWS_BUCKET
+#   IT_CASE_S3_BUCKET
 # Arguments:
 #   $1 - local file to upload
 #   $2 - s3 bucket
@@ -144,8 +147,8 @@ function s3_put {
   contentType="application/octet-stream"
   dateValue=`date -R`
   stringToSign="PUT\n\n${contentType}\n${dateValue}\n${resource}"
-  s3Key=$ARTIFACTS_AWS_ACCESS_KEY
-  s3Secret=$ARTIFACTS_AWS_SECRET_KEY
+  s3Key=$IT_CASE_S3_ACCESS_KEY
+  s3Secret=$IT_CASE_S3_SECRET_KEY
   signature=`echo -en ${stringToSign} | openssl sha1 -hmac ${s3Secret} -binary | base64`
   curl -X PUT -T "${local_file}" \
     -H "Host: ${bucket}.s3.amazonaws.com" \
@@ -174,8 +177,8 @@ function s3_delete {
   contentType="application/octet-stream"
   dateValue=`date -R`
   stringToSign="DELETE\n\n${contentType}\n${dateValue}\n${resource}"
-  s3Key=$ARTIFACTS_AWS_ACCESS_KEY
-  s3Secret=$ARTIFACTS_AWS_SECRET_KEY
+  s3Key=$IT_CASE_S3_ACCESS_KEY
+  s3Secret=$IT_CASE_S3_SECRET_KEY
   signature=`echo -en ${stringToSign} | openssl sha1 -hmac ${s3Secret} -binary | base64`
   curl -X DELETE \
     -H "Host: ${bucket}.s3.amazonaws.com" \
@@ -189,7 +192,7 @@ function s3_delete {
 # Delete s3 objects by full path prefix.
 #
 # Globals:
-#   ARTIFACTS_AWS_BUCKET
+#   IT_CASE_S3_BUCKET
 # Arguments:
 #   $1 - s3 key full path prefix
 # Returns:
@@ -197,7 +200,7 @@ function s3_delete {
 ###################################
 function s3_delete_by_full_path_prefix {
   AWS_REGION=$AWS_REGION \
-  ${s3util} --action deleteByFullPathPrefix --s3prefix "$1" --bucket $ARTIFACTS_AWS_BUCKET
+  ${s3util} --action deleteByFullPathPrefix --s3prefix "$1" --bucket $IT_CASE_S3_BUCKET
 }
 
 ###################################
@@ -206,7 +209,7 @@ function s3_delete_by_full_path_prefix {
 # because SQL is used to query the s3 object.
 #
 # Globals:
-#   ARTIFACTS_AWS_BUCKET
+#   IT_CASE_S3_BUCKET
 # Arguments:
 #   $1 - s3 file object key
 #   $2 - s3 bucket
@@ -215,7 +218,7 @@ function s3_delete_by_full_path_prefix {
 ###################################
 function s3_get_number_of_lines_in_file {
   AWS_REGION=$AWS_REGION \
-  ${s3util} --action numberOfLinesInFile --s3file "$1" --bucket $ARTIFACTS_AWS_BUCKET
+  ${s3util} --action numberOfLinesInFile --s3file "$1" --bucket $IT_CASE_S3_BUCKET
 }
 
 ###################################
@@ -224,7 +227,7 @@ function s3_get_number_of_lines_in_file {
 # because SQL is used to query the s3 objects.
 #
 # Globals:
-#   ARTIFACTS_AWS_BUCKET
+#   IT_CASE_S3_BUCKET
 # Arguments:
 #   $1 - s3 key prefix
 #   $2 - s3 bucket
@@ -236,5 +239,5 @@ function s3_get_number_of_lines_by_prefix {
   local file_prefix="${3-}"
   AWS_REGION=$AWS_REGION \
   ${s3util} --action numberOfLinesInFilesWithFullAndNamePrefix \
-    --s3prefix "$1" --s3filePrefix "${file_prefix}" --bucket $ARTIFACTS_AWS_BUCKET
+    --s3prefix "$1" --s3filePrefix "${file_prefix}" --bucket $IT_CASE_S3_BUCKET
 }
diff --git a/flink-end-to-end-tests/test-scripts/test_shaded_hadoop_s3a.sh b/flink-end-to-end-tests/test-scripts/test_shaded_hadoop_s3a.sh
index 3d838675852..43edb555b99 100755
--- a/flink-end-to-end-tests/test-scripts/test_shaded_hadoop_s3a.sh
+++ b/flink-end-to-end-tests/test-scripts/test_shaded_hadoop_s3a.sh
@@ -22,15 +22,8 @@
 source "$(dirname "$0")"/common.sh
 source "$(dirname "$0")"/common_s3.sh
 
-s3_put $TEST_INFRA_DIR/test-data/words $ARTIFACTS_AWS_BUCKET flink-end-to-end-test-shaded-s3a
-# make sure we delete the file at the end
-function shaded_s3a_cleanup {
-  s3_delete $ARTIFACTS_AWS_BUCKET flink-end-to-end-test-shaded-s3a
-}
-trap shaded_s3a_cleanup EXIT
-
 start_cluster
 
-$FLINK_DIR/bin/flink run -p 1 $FLINK_DIR/examples/batch/WordCount.jar --input s3:/$resource --output $TEST_DATA_DIR/out/wc_out
+$FLINK_DIR/bin/flink run -p 1 $FLINK_DIR/examples/batch/WordCount.jar --input $SHADED_S3_INPUT --output $TEST_DATA_DIR/out/wc_out
 
 check_result_hash "WordCountWithShadedS3A" $TEST_DATA_DIR/out/wc_out "72a690412be8928ba239c2da967328a5"
diff --git a/flink-end-to-end-tests/test-scripts/test_shaded_presto_s3.sh b/flink-end-to-end-tests/test-scripts/test_shaded_presto_s3.sh
index bd33b410dfd..2a52bfd15b9 100755
--- a/flink-end-to-end-tests/test-scripts/test_shaded_presto_s3.sh
+++ b/flink-end-to-end-tests/test-scripts/test_shaded_presto_s3.sh
@@ -22,15 +22,8 @@
 source "$(dirname "$0")"/common.sh
 source "$(dirname "$0")"/common_s3.sh
 
-s3_put $TEST_INFRA_DIR/test-data/words $ARTIFACTS_AWS_BUCKET flink-end-to-end-test-shaded-presto-s3
-# make sure we delete the file at the end
-function shaded_presto_s3_cleanup {
-  s3_delete $ARTIFACTS_AWS_BUCKET flink-end-to-end-test-shaded-presto-s3
-}
-trap shaded_presto_s3_cleanup EXIT
-
 start_cluster
 
-$FLINK_DIR/bin/flink run -p 1 $FLINK_DIR/examples/batch/WordCount.jar --input s3:/$resource --output $TEST_DATA_DIR/out/wc_out
+$FLINK_DIR/bin/flink run -p 1 $FLINK_DIR/examples/batch/WordCount.jar --input $SHADED_S3_INPUT --output $TEST_DATA_DIR/out/wc_out
 
 check_result_hash "WordCountWithShadedPrestoS3" $TEST_DATA_DIR/out/wc_out "72a690412be8928ba239c2da967328a5"
diff --git a/flink-end-to-end-tests/test-scripts/test_streaming_file_sink.sh b/flink-end-to-end-tests/test-scripts/test_streaming_file_sink.sh
index 6c8d0b85435..e810e68bde7 100755
--- a/flink-end-to-end-tests/test-scripts/test_streaming_file_sink.sh
+++ b/flink-end-to-end-tests/test-scripts/test_streaming_file_sink.sh
@@ -24,9 +24,9 @@ source "$(dirname "$0")"/common_s3.sh
 
 set_conf_ssl "mutual"
 
-OUT=out
+OUT=temp/test_streaming_file_sink-$(uuidgen)
 OUTPUT_PATH="$TEST_DATA_DIR/$OUT"
-S3_OUTPUT_PATH="s3://$ARTIFACTS_AWS_BUCKET/$OUT"
+S3_OUTPUT_PATH="s3://$IT_CASE_S3_BUCKET/$OUT"
 
 mkdir -p $OUTPUT_PATH
 
diff --git a/flink-filesystems/flink-s3-fs-hadoop/src/test/java/org/apache/flink/fs/s3hadoop/HadoopS3FileSystemBehaviorITCase.java b/flink-filesystems/flink-s3-fs-hadoop/src/test/java/org/apache/flink/fs/s3hadoop/HadoopS3FileSystemBehaviorITCase.java
index c8aaaeef74d..9dc5c6de1ef 100644
--- a/flink-filesystems/flink-s3-fs-hadoop/src/test/java/org/apache/flink/fs/s3hadoop/HadoopS3FileSystemBehaviorITCase.java
+++ b/flink-filesystems/flink-s3-fs-hadoop/src/test/java/org/apache/flink/fs/s3hadoop/HadoopS3FileSystemBehaviorITCase.java
@@ -36,12 +36,12 @@
  */
 public class HadoopS3FileSystemBehaviorITCase extends FileSystemBehaviorTestSuite {
 
-	private static final String BUCKET = System.getenv("ARTIFACTS_AWS_BUCKET");
+	private static final String BUCKET = System.getenv("IT_CASE_S3_BUCKET");
 
-	private static final String TEST_DATA_DIR = "tests-" + UUID.randomUUID();
+	private static final String TEST_DATA_DIR = "temp/tests-" + UUID.randomUUID();
 
-	private static final String ACCESS_KEY = System.getenv("ARTIFACTS_AWS_ACCESS_KEY");
-	private static final String SECRET_KEY = System.getenv("ARTIFACTS_AWS_SECRET_KEY");
+	private static final String ACCESS_KEY = System.getenv("IT_CASE_S3_ACCESS_KEY");
+	private static final String SECRET_KEY = System.getenv("IT_CASE_S3_SECRET_KEY");
 
 	@BeforeClass
 	public static void checkCredentialsAndSetup() throws IOException {
diff --git a/flink-filesystems/flink-s3-fs-hadoop/src/test/java/org/apache/flink/fs/s3hadoop/HadoopS3FileSystemITCase.java b/flink-filesystems/flink-s3-fs-hadoop/src/test/java/org/apache/flink/fs/s3hadoop/HadoopS3FileSystemITCase.java
index 6dbdac511f4..c75dd79b274 100644
--- a/flink-filesystems/flink-s3-fs-hadoop/src/test/java/org/apache/flink/fs/s3hadoop/HadoopS3FileSystemITCase.java
+++ b/flink-filesystems/flink-s3-fs-hadoop/src/test/java/org/apache/flink/fs/s3hadoop/HadoopS3FileSystemITCase.java
@@ -67,11 +67,11 @@
 		return Arrays.asList("s3", "s3a");
 	}
 
-	private static final String TEST_DATA_DIR = "tests-" + UUID.randomUUID();
+	private static final String TEST_DATA_DIR = "temp/tests-" + UUID.randomUUID();
 
-	private static final String BUCKET = System.getenv("ARTIFACTS_AWS_BUCKET");
-	private static final String ACCESS_KEY = System.getenv("ARTIFACTS_AWS_ACCESS_KEY");
-	private static final String SECRET_KEY = System.getenv("ARTIFACTS_AWS_SECRET_KEY");
+	private static final String BUCKET = System.getenv("IT_CASE_S3_BUCKET");
+	private static final String ACCESS_KEY = System.getenv("IT_CASE_S3_ACCESS_KEY");
+	private static final String SECRET_KEY = System.getenv("IT_CASE_S3_SECRET_KEY");
 
 	/**
 	 * Will be updated by {@link #checkCredentialsAndSetup()} if the test is not skipped.
diff --git a/flink-filesystems/flink-s3-fs-hadoop/src/test/java/org/apache/flink/fs/s3hadoop/HadoopS3RecoverableWriterExceptionTest.java b/flink-filesystems/flink-s3-fs-hadoop/src/test/java/org/apache/flink/fs/s3hadoop/HadoopS3RecoverableWriterExceptionTest.java
index 634fa00344d..8883378c33f 100644
--- a/flink-filesystems/flink-s3-fs-hadoop/src/test/java/org/apache/flink/fs/s3hadoop/HadoopS3RecoverableWriterExceptionTest.java
+++ b/flink-filesystems/flink-s3-fs-hadoop/src/test/java/org/apache/flink/fs/s3hadoop/HadoopS3RecoverableWriterExceptionTest.java
@@ -55,9 +55,9 @@
 
 	// ----------------------- S3 general configuration -----------------------
 
-	private static final String ACCESS_KEY = System.getenv("ARTIFACTS_AWS_ACCESS_KEY");
-	private static final String SECRET_KEY = System.getenv("ARTIFACTS_AWS_SECRET_KEY");
-	private static final String BUCKET = System.getenv("ARTIFACTS_AWS_BUCKET");
+	private static final String ACCESS_KEY = System.getenv("IT_CASE_S3_ACCESS_KEY");
+	private static final String SECRET_KEY = System.getenv("IT_CASE_S3_SECRET_KEY");
+	private static final String BUCKET = System.getenv("IT_CASE_S3_BUCKET");
 
 	private static final long PART_UPLOAD_MIN_SIZE_VALUE = 7L << 20;
 	private static final int MAX_CONCURRENT_UPLOADS_VALUE = 2;
@@ -66,7 +66,7 @@
 
 	private static final Random RND = new Random();
 
-	private static final String TEST_DATA_DIR = "tests-" + UUID.randomUUID();
+	private static final String TEST_DATA_DIR = "temp/tests-" + UUID.randomUUID();
 
 	private static final Path basePath = new Path("s3://" + BUCKET + '/' + TEST_DATA_DIR);
 
diff --git a/flink-filesystems/flink-s3-fs-hadoop/src/test/java/org/apache/flink/fs/s3hadoop/HadoopS3RecoverableWriterTest.java b/flink-filesystems/flink-s3-fs-hadoop/src/test/java/org/apache/flink/fs/s3hadoop/HadoopS3RecoverableWriterTest.java
index 4a1368a815e..bbc02f34c21 100644
--- a/flink-filesystems/flink-s3-fs-hadoop/src/test/java/org/apache/flink/fs/s3hadoop/HadoopS3RecoverableWriterTest.java
+++ b/flink-filesystems/flink-s3-fs-hadoop/src/test/java/org/apache/flink/fs/s3hadoop/HadoopS3RecoverableWriterTest.java
@@ -63,9 +63,9 @@
 
 	// ----------------------- S3 general configuration -----------------------
 
-	private static final String ACCESS_KEY = System.getenv("ARTIFACTS_AWS_ACCESS_KEY");
-	private static final String SECRET_KEY = System.getenv("ARTIFACTS_AWS_SECRET_KEY");
-	private static final String BUCKET = System.getenv("ARTIFACTS_AWS_BUCKET");
+	private static final String ACCESS_KEY = System.getenv("IT_CASE_S3_ACCESS_KEY");
+	private static final String SECRET_KEY = System.getenv("IT_CASE_S3_SECRET_KEY");
+	private static final String BUCKET = System.getenv("IT_CASE_S3_BUCKET");
 
 	private static final long PART_UPLOAD_MIN_SIZE_VALUE = 7L << 20;
 	private static final int MAX_CONCURRENT_UPLOADS_VALUE = 2;
@@ -74,7 +74,7 @@
 
 	private static final Random RND = new Random();
 
-	private static final String TEST_DATA_DIR = "tests-" + UUID.randomUUID();
+	private static final String TEST_DATA_DIR = "temp/tests-" + UUID.randomUUID();
 
 	private static final Path basePath = new Path("s3://" + BUCKET + '/' + TEST_DATA_DIR);
 
diff --git a/flink-filesystems/flink-s3-fs-presto/src/test/java/org/apache/flink/fs/s3presto/PrestoS3FileSystemBehaviorITCase.java b/flink-filesystems/flink-s3-fs-presto/src/test/java/org/apache/flink/fs/s3presto/PrestoS3FileSystemBehaviorITCase.java
index 812404ce639..e7c69b4523b 100644
--- a/flink-filesystems/flink-s3-fs-presto/src/test/java/org/apache/flink/fs/s3presto/PrestoS3FileSystemBehaviorITCase.java
+++ b/flink-filesystems/flink-s3-fs-presto/src/test/java/org/apache/flink/fs/s3presto/PrestoS3FileSystemBehaviorITCase.java
@@ -36,12 +36,12 @@
  */
 public class PrestoS3FileSystemBehaviorITCase extends FileSystemBehaviorTestSuite {
 
-	private static final String BUCKET = System.getenv("ARTIFACTS_AWS_BUCKET");
+	private static final String BUCKET = System.getenv("IT_CASE_S3_BUCKET");
 
-	private static final String TEST_DATA_DIR = "tests-" + UUID.randomUUID();
+	private static final String TEST_DATA_DIR = "temp/tests-" + UUID.randomUUID();
 
-	private static final String ACCESS_KEY = System.getenv("ARTIFACTS_AWS_ACCESS_KEY");
-	private static final String SECRET_KEY = System.getenv("ARTIFACTS_AWS_SECRET_KEY");
+	private static final String ACCESS_KEY = System.getenv("IT_CASE_S3_ACCESS_KEY");
+	private static final String SECRET_KEY = System.getenv("IT_CASE_S3_SECRET_KEY");
 
 	@BeforeClass
 	public static void checkCredentialsAndSetup() throws IOException {
diff --git a/flink-filesystems/flink-s3-fs-presto/src/test/java/org/apache/flink/fs/s3presto/PrestoS3FileSystemITCase.java b/flink-filesystems/flink-s3-fs-presto/src/test/java/org/apache/flink/fs/s3presto/PrestoS3FileSystemITCase.java
index cc5c9935202..0ec693ebc8a 100644
--- a/flink-filesystems/flink-s3-fs-presto/src/test/java/org/apache/flink/fs/s3presto/PrestoS3FileSystemITCase.java
+++ b/flink-filesystems/flink-s3-fs-presto/src/test/java/org/apache/flink/fs/s3presto/PrestoS3FileSystemITCase.java
@@ -68,12 +68,12 @@
 		return Arrays.asList("s3", "s3p");
 	}
 
-	private static final String BUCKET = System.getenv("ARTIFACTS_AWS_BUCKET");
+	private static final String BUCKET = System.getenv("IT_CASE_S3_BUCKET");
 
-	private static final String TEST_DATA_DIR = "tests-" + UUID.randomUUID();
+	private static final String TEST_DATA_DIR = "temp/tests-" + UUID.randomUUID();
 
-	private static final String ACCESS_KEY = System.getenv("ARTIFACTS_AWS_ACCESS_KEY");
-	private static final String SECRET_KEY = System.getenv("ARTIFACTS_AWS_SECRET_KEY");
+	private static final String ACCESS_KEY = System.getenv("IT_CASE_S3_ACCESS_KEY");
+	private static final String SECRET_KEY = System.getenv("IT_CASE_S3_SECRET_KEY");
 
 	@BeforeClass
 	public static void checkIfCredentialsArePresent() {
diff --git a/flink-filesystems/flink-s3-fs-presto/src/test/java/org/apache/flink/fs/s3presto/PrestoS3RecoverableWriterTest.java b/flink-filesystems/flink-s3-fs-presto/src/test/java/org/apache/flink/fs/s3presto/PrestoS3RecoverableWriterTest.java
index 580d957db23..1f37fc24c5b 100644
--- a/flink-filesystems/flink-s3-fs-presto/src/test/java/org/apache/flink/fs/s3presto/PrestoS3RecoverableWriterTest.java
+++ b/flink-filesystems/flink-s3-fs-presto/src/test/java/org/apache/flink/fs/s3presto/PrestoS3RecoverableWriterTest.java
@@ -42,16 +42,16 @@
 
 	// ----------------------- S3 general configuration -----------------------
 
-	private static final String ACCESS_KEY = System.getenv("ARTIFACTS_AWS_ACCESS_KEY");
-	private static final String SECRET_KEY = System.getenv("ARTIFACTS_AWS_SECRET_KEY");
-	private static final String BUCKET = System.getenv("ARTIFACTS_AWS_BUCKET");
+	private static final String ACCESS_KEY = System.getenv("IT_CASE_S3_ACCESS_KEY");
+	private static final String SECRET_KEY = System.getenv("IT_CASE_S3_SECRET_KEY");
+	private static final String BUCKET = System.getenv("IT_CASE_S3_BUCKET");
 
 	private static final long PART_UPLOAD_MIN_SIZE_VALUE = 7L << 20;
 	private static final int MAX_CONCURRENT_UPLOADS_VALUE = 2;
 
 	// ----------------------- Test Specific configuration -----------------------
 
-	private static final String TEST_DATA_DIR = "tests-" + UUID.randomUUID();
+	private static final String TEST_DATA_DIR = "temp/tests-" + UUID.randomUUID();
 
 	private static final Path basePath = new Path("s3://" + BUCKET + '/' + TEST_DATA_DIR);
 
diff --git a/flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTestS3ITCase.java b/flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTestS3ITCase.java
index e1e95b1c379..f4e5227affb 100644
--- a/flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTestS3ITCase.java
+++ b/flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTestS3ITCase.java
@@ -53,12 +53,12 @@
  */
 public class YarnFileStageTestS3ITCase extends TestLogger {
 
-	private static final String BUCKET = System.getenv("ARTIFACTS_AWS_BUCKET");
+	private static final String BUCKET = System.getenv("IT_CASE_S3_BUCKET");
 
-	private static final String TEST_DATA_DIR = "tests-" + UUID.randomUUID();
+	private static final String TEST_DATA_DIR = "temp/tests-" + UUID.randomUUID();
 
-	private static final String ACCESS_KEY = System.getenv("ARTIFACTS_AWS_ACCESS_KEY");
-	private static final String SECRET_KEY = System.getenv("ARTIFACTS_AWS_SECRET_KEY");
+	private static final String ACCESS_KEY = System.getenv("IT_CASE_S3_ACCESS_KEY");
+	private static final String SECRET_KEY = System.getenv("IT_CASE_S3_SECRET_KEY");
 
 	@ClassRule
 	public static final TemporaryFolder TEMP_FOLDER = new TemporaryFolder();
diff --git a/tools/travis_mvn_watchdog.sh b/tools/travis_mvn_watchdog.sh
index 63c177258c4..55121158529 100755
--- a/tools/travis_mvn_watchdog.sh
+++ b/tools/travis_mvn_watchdog.sh
@@ -72,9 +72,9 @@ TRACE_OUT="${ARTIFACTS_DIR}/jps-traces.out"
 UPLOAD_TARGET_PATH="travis-artifacts/${TRAVIS_REPO_SLUG}/${TRAVIS_BUILD_NUMBER}/"
 # These variables are stored as secure variables in '.travis.yml', which are generated per repo via
 # the travis command line tool.
-UPLOAD_BUCKET=$ARTIFACTS_AWS_BUCKET
-UPLOAD_ACCESS_KEY=$ARTIFACTS_AWS_ACCESS_KEY
-UPLOAD_SECRET_KEY=$ARTIFACTS_AWS_SECRET_KEY
+UPLOAD_BUCKET=$IT_CASE_S3_BUCKET
+UPLOAD_ACCESS_KEY=$IT_CASE_S3_ACCESS_KEY
+UPLOAD_SECRET_KEY=$IT_CASE_S3_SECRET_KEY
 
 ARTIFACTS_FILE=${TRAVIS_JOB_NUMBER}.tar.gz
 


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services