You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2020/06/16 13:19:11 UTC

Build failed in Jenkins: beam_LoadTests_Python_ParDo_Flink_Streaming #41

See <https://builds.apache.org/job/beam_LoadTests_Python_ParDo_Flink_Streaming/41/display/redirect?page=changes>

Changes:

[davidyan] [BEAM-10247] Pin google-api-core to 1.17.0, because otherwise the pulled

[davidyan] Bumping grpcio version to 1.29.0 to be compatible with

[ningk] Update screen_diff deps and goldens as stable Chrome version advances.

[robinyqiu] Add zetaSqlValueToJavaObject() with unknown target type

[daniel.o.programmer] [BEAM-9951] Fixing some lint bugs.

[stuart.m.perks] BEAM-10221: Add in four tests cases of base on the java equivalent for

[davidyan] added rsa<4.1 for python2

[bhulette] Lump together PMC-only steps

[github] Clarify release guide for publishing release notes to GitHub (#12015)


------------------------------------------
[...truncated 46.96 KB...]
2be8fac4a55f: Layer already exists
dabd0b48256e: Layer already exists
98d95bdfa037: Layer already exists
37713b95c3f9: Pushed
da9418a2e1b1: Layer already exists
2e5b4ca91984: Layer already exists
527ade4639e0: Layer already exists
c2c789d2d3c5: Layer already exists
8803ef42039d: Layer already exists
abac0c96cba0: Pushed
e106afd42dea: Pushed
01374051a9cd: Pushed
latest: digest: sha256:839b9439868b4a561d8d2d60a92fc9ed233a0c69897f1cbc47b378fddb49cced size: 4938
[Gradle] - Launching build.
[src] $ <https://builds.apache.org/job/beam_LoadTests_Python_ParDo_Flink_Streaming/ws/src/gradlew> --continue --max-****s=12 -Dorg.gradle.jvmargs=-Xms2g -Dorg.gradle.jvmargs=-Xmx4g -Pdocker-pull-licenses -Pdocker-repository-root=gcr.io/apache-beam-testing/beam_portability -Pdocker-tag=streaming-load-tests :runners:flink:1.10:job-server-container:docker
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy UP-TO-DATE
> Task :buildSrc:pluginDescriptors UP-TO-DATE
> Task :buildSrc:processResources UP-TO-DATE
> Task :buildSrc:classes UP-TO-DATE
> Task :buildSrc:jar UP-TO-DATE
> Task :buildSrc:assemble UP-TO-DATE
> Task :buildSrc:spotlessGroovy UP-TO-DATE
> Task :buildSrc:spotlessGroovyCheck UP-TO-DATE
> Task :buildSrc:spotlessGroovyGradle UP-TO-DATE
> Task :buildSrc:spotlessGroovyGradleCheck UP-TO-DATE
> Task :buildSrc:spotlessCheck UP-TO-DATE
> Task :buildSrc:pluginUnderTestMetadata UP-TO-DATE
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties UP-TO-DATE
> Task :buildSrc:check UP-TO-DATE
> Task :buildSrc:build UP-TO-DATE
Configuration on demand is an incubating feature.
> Task :sdks:java:core:generateAvroProtocol NO-SOURCE
> Task :sdks:java:extensions:google-cloud-platform-core:processResources NO-SOURCE
> Task :runners:core-construction-java:processResources NO-SOURCE
> Task :runners:core-java:processResources NO-SOURCE
> Task :vendor:sdks-java-extensions-protobuf:processResources NO-SOURCE
> Task :sdks:java:fn-execution:processResources NO-SOURCE
> Task :sdks:java:harness:processResources NO-SOURCE
> Task :sdks:java:core:generateAvroJava NO-SOURCE
> Task :sdks:java:expansion-service:processResources NO-SOURCE
> Task :runners:java-fn-execution:processResources NO-SOURCE
> Task :runners:java-job-service:processResources NO-SOURCE
> Task :runners:flink:1.10:copyResourcesOverrides NO-SOURCE
> Task :runners:flink:1.10:job-server:processResources NO-SOURCE
> Task :sdks:java:io:google-cloud-platform:processResources NO-SOURCE
> Task :sdks:java:io:kafka:processResources NO-SOURCE
> Task :model:fn-execution:extractProto
> Task :model:job-management:extractProto
> Task :sdks:java:extensions:protobuf:extractProto
> Task :sdks:java:extensions:protobuf:processResources NO-SOURCE
> Task :runners:flink:1.10:job-server-container:copyLicenses
> Task :runners:flink:1.10:job-server-container:dockerClean UP-TO-DATE
> Task :model:job-management:processResources
> Task :model:fn-execution:processResources
> Task :runners:flink:1.10:copySourceOverrides
> Task :runners:flink:1.10:copyTestResourcesOverrides NO-SOURCE
> Task :sdks:java:core:generateGrammarSource FROM-CACHE
> Task :runners:flink:1.10:processResources
> Task :sdks:java:core:processResources
> Task :sdks:java:build-tools:compileJava FROM-CACHE
> Task :sdks:java:build-tools:processResources
> Task :sdks:java:build-tools:classes
> Task :sdks:java:build-tools:jar
> Task :model:pipeline:extractIncludeProto
> Task :model:pipeline:extractProto
> Task :model:pipeline:generateProto
> Task :model:pipeline:compileJava FROM-CACHE
> Task :model:pipeline:processResources
> Task :model:pipeline:classes
> Task :model:pipeline:jar
> Task :model:job-management:extractIncludeProto
> Task :model:fn-execution:extractIncludeProto
> Task :model:job-management:generateProto
> Task :model:fn-execution:generateProto
> Task :model:job-management:compileJava FROM-CACHE
> Task :model:job-management:classes
> Task :model:fn-execution:compileJava FROM-CACHE
> Task :model:fn-execution:classes
> Task :model:pipeline:shadowJar
> Task :model:job-management:shadowJar
> Task :model:fn-execution:shadowJar
> Task :sdks:java:core:compileJava FROM-CACHE
> Task :sdks:java:core:classes
> Task :sdks:java:core:shadowJar
> Task :sdks:java:extensions:google-cloud-platform-core:compileJava FROM-CACHE
> Task :sdks:java:extensions:google-cloud-platform-core:classes UP-TO-DATE
> Task :runners:core-construction-java:compileJava FROM-CACHE
> Task :runners:core-construction-java:classes UP-TO-DATE
> Task :vendor:sdks-java-extensions-protobuf:compileJava FROM-CACHE
> Task :vendor:sdks-java-extensions-protobuf:classes UP-TO-DATE
> Task :sdks:java:extensions:google-cloud-platform-core:jar
> Task :sdks:java:extensions:protobuf:extractIncludeProto
> Task :sdks:java:extensions:protobuf:generateProto NO-SOURCE
> Task :sdks:java:extensions:protobuf:compileJava FROM-CACHE
> Task :sdks:java:extensions:protobuf:classes UP-TO-DATE
> Task :sdks:java:extensions:protobuf:jar
> Task :sdks:java:fn-execution:compileJava FROM-CACHE
> Task :sdks:java:fn-execution:classes UP-TO-DATE
> Task :runners:core-construction-java:jar
> Task :sdks:java:fn-execution:jar
> Task :vendor:sdks-java-extensions-protobuf:shadowJar
> Task :runners:core-java:compileJava FROM-CACHE
> Task :runners:core-java:classes UP-TO-DATE
> Task :runners:core-java:jar
> Task :sdks:java:harness:compileJava FROM-CACHE
> Task :sdks:java:harness:classes UP-TO-DATE
> Task :sdks:java:harness:jar
> Task :sdks:java:harness:shadowJar
> Task :runners:java-fn-execution:compileJava FROM-CACHE
> Task :runners:java-fn-execution:classes UP-TO-DATE
> Task :runners:java-fn-execution:jar
> Task :sdks:java:expansion-service:compileJava FROM-CACHE
> Task :sdks:java:expansion-service:classes UP-TO-DATE
> Task :sdks:java:expansion-service:jar
> Task :runners:java-job-service:compileJava FROM-CACHE
> Task :runners:java-job-service:classes UP-TO-DATE
> Task :runners:java-job-service:jar
> Task :sdks:java:io:kafka:compileJava FROM-CACHE
> Task :sdks:java:io:kafka:classes UP-TO-DATE
> Task :sdks:java:io:kafka:jar
> Task :runners:flink:1.10:compileJava FROM-CACHE
> Task :runners:flink:1.10:classes
> Task :sdks:java:io:google-cloud-platform:compileJava FROM-CACHE
> Task :sdks:java:io:google-cloud-platform:classes UP-TO-DATE
> Task :runners:flink:1.10:jar
> Task :runners:flink:1.10:job-server:compileJava NO-SOURCE
> Task :runners:flink:1.10:job-server:classes UP-TO-DATE
> Task :sdks:java:io:google-cloud-platform:jar
> Task :runners:flink:1.10:job-server:shadowJar
> Task :runners:flink:1.10:job-server-container:copyDockerfileDependencies
> Task :runners:flink:1.10:job-server-container:dockerPrepare
> Task :runners:flink:1.10:job-server-container:docker

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD SUCCESSFUL in 1m 14s
63 actionable tasks: 43 executed, 19 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/pbvx7fx3sbema

[beam_LoadTests_Python_ParDo_Flink_Streaming] $ /bin/bash -xe /tmp/jenkins5609681643244292967.sh
+ echo 'Tagging image...'
Tagging image...
[beam_LoadTests_Python_ParDo_Flink_Streaming] $ /bin/bash -xe /tmp/jenkins5404840496398980296.sh
+ docker tag gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:streaming-load-tests gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:streaming-load-tests
[beam_LoadTests_Python_ParDo_Flink_Streaming] $ /bin/bash -xe /tmp/jenkins136972220957133529.sh
+ echo 'Pushing image...'
Pushing image...
[beam_LoadTests_Python_ParDo_Flink_Streaming] $ /bin/bash -xe /tmp/jenkins2163123318382198885.sh
+ docker push gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:streaming-load-tests
The push refers to repository [gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server]
06492bee20b7: Preparing
b67b4f8d2d66: Preparing
51ff4013e678: Preparing
2a9b5df5d4cb: Preparing
6d4f54742621: Preparing
cb8e2372b23c: Preparing
68bb2d422178: Preparing
f5181c7ef902: Preparing
2e5b4ca91984: Preparing
527ade4639e0: Preparing
c2c789d2d3c5: Preparing
8803ef42039d: Preparing
cb8e2372b23c: Waiting
68bb2d422178: Waiting
f5181c7ef902: Waiting
2e5b4ca91984: Waiting
527ade4639e0: Waiting
8803ef42039d: Waiting
c2c789d2d3c5: Waiting
51ff4013e678: Pushed
06492bee20b7: Pushed
b67b4f8d2d66: Pushed
cb8e2372b23c: Layer already exists
68bb2d422178: Layer already exists
f5181c7ef902: Layer already exists
2e5b4ca91984: Layer already exists
527ade4639e0: Layer already exists
c2c789d2d3c5: Layer already exists
8803ef42039d: Layer already exists
6d4f54742621: Pushed
2a9b5df5d4cb: Pushed
streaming-load-tests: digest: sha256:80187197b45f457dbe1994e69a1344295e49dcfad1a0aa2767b15bd598ea1bce size: 2841
[EnvInject] - Injecting environment variables from a build step.
[EnvInject] - Injecting as environment variables the properties content 
JOB_SERVER_IMAGE=gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:streaming-load-tests
CLUSTER_NAME=beam-loadtests-python-pardo-flink-streaming-41
DETACHED_MODE=true
HARNESS_IMAGES_TO_PULL=gcr.io/apache-beam-testing/beam_portability/beam_python3.7_sdk:latest
FLINK_NUM_WORKERS=5
FLINK_DOWNLOAD_URL=https://archive.apache.org/dist/flink/flink-1.10.1/flink-1.10.1-bin-scala_2.11.tgz
GCS_BUCKET=gs://beam-flink-cluster
HADOOP_DOWNLOAD_URL=https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-9.0/flink-shaded-hadoop-2-uber-2.8.3-9.0.jar
FLINK_TASKMANAGER_SLOTS=1
ARTIFACTS_DIR=gs://beam-flink-cluster/beam-loadtests-python-pardo-flink-streaming-41
GCLOUD_ZONE=us-central1-a

[EnvInject] - Variables injected successfully.
[beam_LoadTests_Python_ParDo_Flink_Streaming] $ /bin/bash -xe /tmp/jenkins6625377507778456042.sh
+ echo Setting up flink cluster
Setting up flink cluster
[beam_LoadTests_Python_ParDo_Flink_Streaming] $ /bin/bash -xe /tmp/jenkins1486107383125773442.sh
+ cd <https://builds.apache.org/job/beam_LoadTests_Python_ParDo_Flink_Streaming/ws/src/.test-infra/dataproc>
+ ./flink_cluster.sh create
+ GCLOUD_ZONE=us-central1-a
+ DATAPROC_VERSION=1.2
+ MASTER_NAME=beam-loadtests-python-pardo-flink-streaming-41-m
+ INIT_ACTIONS_FOLDER_NAME=init-actions
+ FLINK_INIT=gs://beam-flink-cluster/init-actions/flink.sh
+ BEAM_INIT=gs://beam-flink-cluster/init-actions/beam.sh
+ DOCKER_INIT=gs://beam-flink-cluster/init-actions/docker.sh
+ FLINK_LOCAL_PORT=8081
+ FLINK_TASKMANAGER_SLOTS=1
+ YARN_APPLICATION_MASTER=
+ create
+ upload_init_actions
+ echo 'Uploading initialization actions to GCS bucket: gs://beam-flink-cluster'
Uploading initialization actions to GCS bucket: gs://beam-flink-cluster
+ gsutil cp -r init-actions/beam.sh init-actions/docker.sh init-actions/flink.sh gs://beam-flink-cluster/init-actions
Copying file://init-actions/beam.sh [Content-Type=text/x-sh]...
/ [0 files][    0.0 B/  2.3 KiB]                                                / [1 files][  2.3 KiB/  2.3 KiB]                                                Copying file://init-actions/docker.sh [Content-Type=text/x-sh]...
/ [1 files][  2.3 KiB/  6.0 KiB]                                                / [2 files][  6.0 KiB/  6.0 KiB]                                                Copying file://init-actions/flink.sh [Content-Type=text/x-sh]...
/ [2 files][  6.0 KiB/ 13.7 KiB]                                                / [3 files][ 13.7 KiB/ 13.7 KiB]                                                
Operation completed over 3 objects/13.7 KiB.                                     
+ create_cluster
+ local metadata=flink-snapshot-url=https://archive.apache.org/dist/flink/flink-1.10.1/flink-1.10.1-bin-scala_2.11.tgz,
+ metadata+=flink-start-yarn-session=true,
+ metadata+=flink-taskmanager-slots=1,
+ metadata+=hadoop-jar-url=https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-9.0/flink-shaded-hadoop-2-uber-2.8.3-9.0.jar
+ [[ -n gcr.io/apache-beam-testing/beam_portability/beam_python3.7_sdk:latest ]]
+ metadata+=,beam-sdk-harness-images-to-pull=gcr.io/apache-beam-testing/beam_portability/beam_python3.7_sdk:latest
+ [[ -n gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:streaming-load-tests ]]
+ metadata+=,beam-job-server-image=gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:streaming-load-tests
+ local image_version=1.2
+ echo 'Starting dataproc cluster. Dataproc version: 1.2'
Starting dataproc cluster. Dataproc version: 1.2
+ local num_dataproc_****s=6
+ gcloud dataproc clusters create beam-loadtests-python-pardo-flink-streaming-41 --region=global --num-****s=6 --initialization-actions gs://beam-flink-cluster/init-actions/docker.sh,gs://beam-flink-cluster/init-actions/beam.sh,gs://beam-flink-cluster/init-actions/flink.sh --metadata flink-snapshot-url=https://archive.apache.org/dist/flink/flink-1.10.1/flink-1.10.1-bin-scala_2.11.tgz,flink-start-yarn-session=true,flink-taskmanager-slots=1,hadoop-jar-url=https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-9.0/flink-shaded-hadoop-2-uber-2.8.3-9.0.jar,beam-sdk-harness-images-to-pull=gcr.io/apache-beam-testing/beam_portability/beam_python3.7_sdk:latest,beam-job-server-image=gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:streaming-load-tests, --image-version=1.2 --zone=us-central1-a --quiet
ERROR: (gcloud.dataproc.clusters.create) ALREADY_EXISTS: Already exists: Failed to create cluster: Cluster projects/apache-beam-testing/regions/global/clusters/beam-loadtests-python-pardo-flink-streaming-41
Build step 'Execute shell' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org