You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by rm...@apache.org on 2020/09/10 14:44:15 UTC
[flink] branch master updated: [FLINK-18980][e2e] Add timeout to
get logs from stalling test
This is an automated email from the ASF dual-hosted git repository.
rmetzger pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git
The following commit(s) were added to refs/heads/master by this push:
new 92e2f3b [FLINK-18980][e2e] Add timeout to get logs from stalling test
92e2f3b is described below
commit 92e2f3b12165cfc0d0c5bc96656e87029ee5e694
Author: Robert Metzger <rm...@apache.org>
AuthorDate: Wed Sep 9 10:02:28 2020 +0200
[FLINK-18980][e2e] Add timeout to get logs from stalling test
---
.../test-scripts/test_confluent_schema_registry.sh | 84 +++++++++++-----------
1 file changed, 44 insertions(+), 40 deletions(-)
diff --git a/flink-end-to-end-tests/test-scripts/test_confluent_schema_registry.sh b/flink-end-to-end-tests/test-scripts/test_confluent_schema_registry.sh
index a023f39..2090b1e 100755
--- a/flink-end-to-end-tests/test-scripts/test_confluent_schema_registry.sh
+++ b/flink-end-to-end-tests/test-scripts/test_confluent_schema_registry.sh
@@ -46,58 +46,62 @@ function test_cleanup {
on_exit test_cleanup
-setup_kafka_dist
-setup_confluent_dist
+function schema_registry_test {
+ setup_kafka_dist
+ setup_confluent_dist
-retry_times_with_backoff_and_cleanup 3 5 test_setup test_cleanup
+ retry_times_with_backoff_and_cleanup 3 5 test_setup test_cleanup
-TEST_PROGRAM_JAR=${END_TO_END_DIR}/flink-confluent-schema-registry/target/TestAvroConsumerConfluent.jar
+ TEST_PROGRAM_JAR=${END_TO_END_DIR}/flink-confluent-schema-registry/target/TestAvroConsumerConfluent.jar
-INPUT_MESSAGE_1='{"name":"Alyssa","favoriteNumber":"250","favoriteColor":"green","eventType":"meeting"}'
-INPUT_MESSAGE_2='{"name":"Charlie","favoriteNumber":"10","favoriteColor":"blue","eventType":"meeting"}'
-INPUT_MESSAGE_3='{"name":"Ben","favoriteNumber":"7","favoriteColor":"red","eventType":"meeting"}'
-USER_SCHEMA='{"namespace":"example.avro","type":"record","name":"User","fields":[{"name":"name","type":"string","default":""},{"name":"favoriteNumber","type":"string","default":""},{"name":"favoriteColor","type":"string","default":""},{"name":"eventType","type":{"name":"EventType","type":"enum","symbols":["meeting"]}}]}'
+ INPUT_MESSAGE_1='{"name":"Alyssa","favoriteNumber":"250","favoriteColor":"green","eventType":"meeting"}'
+ INPUT_MESSAGE_2='{"name":"Charlie","favoriteNumber":"10","favoriteColor":"blue","eventType":"meeting"}'
+ INPUT_MESSAGE_3='{"name":"Ben","favoriteNumber":"7","favoriteColor":"red","eventType":"meeting"}'
+ USER_SCHEMA='{"namespace":"example.avro","type":"record","name":"User","fields":[{"name":"name","type":"string","default":""},{"name":"favoriteNumber","type":"string","default":""},{"name":"favoriteColor","type":"string","default":""},{"name":"eventType","type":{"name":"EventType","type":"enum","symbols":["meeting"]}}]}'
-curl -X POST \
- ${SCHEMA_REGISTRY_URL}/subjects/users-value/versions \
- -H 'cache-control: no-cache' \
- -H 'content-type: application/vnd.schemaregistry.v1+json' \
- -d '{"schema": "{\"namespace\": \"example.avro\",\"type\": \"record\",\"name\": \"User\",\"fields\": [{\"name\": \"name\", \"type\": \"string\", \"default\": \"\"},{\"name\": \"favoriteNumber\", \"type\": \"string\", \"default\": \"\"},{\"name\": \"favoriteColor\", \"type\": \"string\", \"default\": \"\"},{\"name\": \"eventType\",\"type\": {\"name\": \"EventType\",\"type\": \"enum\", \"symbols\": [\"meeting\"] }}]}"}'
+ curl -X POST \
+ ${SCHEMA_REGISTRY_URL}/subjects/users-value/versions \
+ -H 'cache-control: no-cache' \
+ -H 'content-type: application/vnd.schemaregistry.v1+json' \
+ -d '{"schema": "{\"namespace\": \"example.avro\",\"type\": \"record\",\"name\": \"User\",\"fields\": [{\"name\": \"name\", \"type\": \"string\", \"default\": \"\"},{\"name\": \"favoriteNumber\", \"type\": \"string\", \"default\": \"\"},{\"name\": \"favoriteColor\", \"type\": \"string\", \"default\": \"\"},{\"name\": \"eventType\",\"type\": {\"name\": \"EventType\",\"type\": \"enum\", \"symbols\": [\"meeting\"] }}]}"}'
-echo "Sending messages to Kafka topic [test-avro-input] ..."
+ echo "Sending messages to Kafka topic [test-avro-input] ..."
-send_messages_to_kafka_avro $INPUT_MESSAGE_1 test-avro-input $USER_SCHEMA
-send_messages_to_kafka_avro $INPUT_MESSAGE_2 test-avro-input $USER_SCHEMA
-send_messages_to_kafka_avro $INPUT_MESSAGE_3 test-avro-input $USER_SCHEMA
+ send_messages_to_kafka_avro $INPUT_MESSAGE_1 test-avro-input $USER_SCHEMA
+ send_messages_to_kafka_avro $INPUT_MESSAGE_2 test-avro-input $USER_SCHEMA
+ send_messages_to_kafka_avro $INPUT_MESSAGE_3 test-avro-input $USER_SCHEMA
-start_cluster
+ start_cluster
-create_kafka_topic 1 1 test-string-out
-create_kafka_topic 1 1 test-avro-out
+ create_kafka_topic 1 1 test-string-out
+ create_kafka_topic 1 1 test-avro-out
-# Read Avro message from [test-avro-input], check the schema and send message to [test-string-ou]
-$FLINK_DIR/bin/flink run -d $TEST_PROGRAM_JAR \
- --input-topic test-avro-input --output-string-topic test-string-out --output-avro-topic test-avro-out --output-subject test-output-subject \
- --bootstrap.servers localhost:9092 --group.id myconsumer --auto.offset.reset earliest \
- --schema-registry-url ${SCHEMA_REGISTRY_URL}
+ # Read Avro message from [test-avro-input], check the schema and send message to [test-string-ou]
+ $FLINK_DIR/bin/flink run -d $TEST_PROGRAM_JAR \
+ --input-topic test-avro-input --output-string-topic test-string-out --output-avro-topic test-avro-out --output-subject test-output-subject \
+ --bootstrap.servers localhost:9092 --group.id myconsumer --auto.offset.reset earliest \
+ --schema-registry-url ${SCHEMA_REGISTRY_URL}
-#echo "Reading messages from Kafka topic [test-string-ou] ..."
+ echo "Reading messages from Kafka topic [test-string-out] ..."
-KEY_1_STRING_MSGS=$(read_messages_from_kafka 3 test-string-out Alyssa_consumer | grep Alyssa)
-KEY_2_STRING_MSGS=$(read_messages_from_kafka 3 test-string-out Charlie_consumer | grep Charlie)
-KEY_3_STRING_MSGS=$(read_messages_from_kafka 3 test-string-out Ben_consumer | grep Ben)
+ KEY_1_STRING_MSGS=$(read_messages_from_kafka 3 test-string-out Alyssa_consumer | grep Alyssa)
+ KEY_2_STRING_MSGS=$(read_messages_from_kafka 3 test-string-out Charlie_consumer | grep Charlie)
+ KEY_3_STRING_MSGS=$(read_messages_from_kafka 3 test-string-out Ben_consumer | grep Ben)
-## Verifying STRING output with actual message
-verify_output $INPUT_MESSAGE_1 "$KEY_1_STRING_MSGS"
-verify_output $INPUT_MESSAGE_2 "$KEY_2_STRING_MSGS"
-verify_output $INPUT_MESSAGE_3 "$KEY_3_STRING_MSGS"
+ ## Verifying STRING output with actual message
+ verify_output $INPUT_MESSAGE_1 "$KEY_1_STRING_MSGS"
+ verify_output $INPUT_MESSAGE_2 "$KEY_2_STRING_MSGS"
+ verify_output $INPUT_MESSAGE_3 "$KEY_3_STRING_MSGS"
-KEY_1_AVRO_MSGS=$(read_messages_from_kafka_avro 3 test-avro-out $USER_SCHEMA Alyssa_consumer_1 | grep Alyssa)
-KEY_2_AVRO_MSGS=$(read_messages_from_kafka_avro 3 test-avro-out $USER_SCHEMA Charlie_consumer_1 | grep Charlie)
-KEY_3_AVRO_MSGS=$(read_messages_from_kafka_avro 3 test-avro-out $USER_SCHEMA Ben_consumer_1 | grep Ben)
+ KEY_1_AVRO_MSGS=$(read_messages_from_kafka_avro 3 test-avro-out $USER_SCHEMA Alyssa_consumer_1 | grep Alyssa)
+ KEY_2_AVRO_MSGS=$(read_messages_from_kafka_avro 3 test-avro-out $USER_SCHEMA Charlie_consumer_1 | grep Charlie)
+ KEY_3_AVRO_MSGS=$(read_messages_from_kafka_avro 3 test-avro-out $USER_SCHEMA Ben_consumer_1 | grep Ben)
-## Verifying AVRO output with actual message
-verify_output $INPUT_MESSAGE_1 "$KEY_1_AVRO_MSGS"
-verify_output $INPUT_MESSAGE_2 "$KEY_2_AVRO_MSGS"
-verify_output $INPUT_MESSAGE_3 "$KEY_3_AVRO_MSGS"
+ ## Verifying AVRO output with actual message
+ verify_output $INPUT_MESSAGE_1 "$KEY_1_AVRO_MSGS"
+ verify_output $INPUT_MESSAGE_2 "$KEY_2_AVRO_MSGS"
+ verify_output $INPUT_MESSAGE_3 "$KEY_3_AVRO_MSGS"
+}
+
+run_test_with_timeout 900 schema_registry_test