You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2019/03/01 06:53:54 UTC
Build failed in Jenkins: beam_PostCommit_Python_Verify #7539
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/7539/display/redirect>
------------------------------------------
[...truncated 302.08 KB...]
"properties": {
"display_data": [
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
"type": "STRING",
"value": "add_attribute"
},
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.ParDo",
"shortValue": "CallableWrapperDoFn",
"type": "STRING",
"value": "apache_beam.transforms.core.CallableWrapperDoFn"
}
],
"non_parallel_inputs": {},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": [
{
"@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": [],
"pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
},
{
"@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": [],
"pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
}
],
"is_pair_like": true,
"pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "out",
"user_name": "add_attribute.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s2"
},
"serialized_fn": "ref_AppliedPTransform_add_attribute_5",
"user_name": "add_attribute"
}
},
{
"kind": "ParallelDo",
"name": "s4",
"properties": {
"display_data": [
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
"type": "STRING",
"value": "to_proto_str"
},
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.ParDo",
"shortValue": "CallableWrapperDoFn",
"type": "STRING",
"value": "apache_beam.transforms.core.CallableWrapperDoFn"
}
],
"non_parallel_inputs": {},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "kind:bytes"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "out",
"user_name": "WriteToPubSub/ToProtobuf.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s3"
},
"serialized_fn": "ref_AppliedPTransform_WriteToPubSub/ToProtobuf_7",
"user_name": "WriteToPubSub/ToProtobuf"
}
},
{
"kind": "ParallelWrite",
"name": "s5",
"properties": {
"display_data": [],
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "kind:bytes"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"format": "pubsub",
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s4"
},
"pubsub_id_label": "id",
"pubsub_serialized_attributes_fn": "",
"pubsub_timestamp_label": "timestamp",
"pubsub_topic": "projects/apache-beam-testing/topics/psit_topic_outpute12d6562-699e-4e02-8bce-a5f07e906089",
"user_name": "WriteToPubSub/Write/NativeWrite"
}
}
],
"type": "JOB_TYPE_STREAMING"
}
root: INFO: Create job: <Job
createTime: u'2019-03-01T06:26:29.298377Z'
currentStateTime: u'1970-01-01T00:00:00Z'
id: u'2019-02-28_22_26_28-11822000511413559614'
location: u'us-central1'
name: u'beamapp-jenkins-0301062620-887264'
projectId: u'apache-beam-testing'
stageStates: []
startTime: u'2019-03-01T06:26:29.298377Z'
steps: []
tempFiles: []
type: TypeValueValuesEnum(JOB_TYPE_STREAMING, 2)>
root: INFO: Created job with id: [2019-02-28_22_26_28-11822000511413559614]
root: INFO: To access the Dataflow monitoring console, please navigate to https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_26_28-11822000511413559614?project=apache-beam-testing
root: INFO: Job 2019-02-28_22_26_28-11822000511413559614 is in state JOB_STATE_RUNNING
root: INFO: 2019-03-01T06:26:34.342Z: JOB_MESSAGE_DETAILED: Checking permissions granted to controller Service Account.
root: INFO: 2019-03-01T06:26:35.150Z: JOB_MESSAGE_BASIC: Worker configuration: n1-standard-4 in us-central1-b.
root: INFO: 2019-03-01T06:26:35.592Z: JOB_MESSAGE_DETAILED: Expanding CollectionToSingleton operations into optimizable parts.
root: INFO: 2019-03-01T06:26:35.599Z: JOB_MESSAGE_DETAILED: Expanding CoGroupByKey operations into optimizable parts.
root: INFO: 2019-03-01T06:26:35.602Z: JOB_MESSAGE_DETAILED: Expanding SplittableProcessKeyed operations into optimizable parts.
root: INFO: 2019-03-01T06:26:35.604Z: JOB_MESSAGE_DETAILED: Expanding GroupByKey operations into streaming Read/Write steps
root: INFO: 2019-03-01T06:26:35.606Z: JOB_MESSAGE_DEBUG: Annotating graph with Autotuner information.
root: INFO: 2019-03-01T06:26:35.615Z: JOB_MESSAGE_DETAILED: Fusing adjacent ParDo, Read, Write, and Flatten operations
root: INFO: 2019-03-01T06:26:35.617Z: JOB_MESSAGE_DETAILED: Fusing consumer ReadFromPubSub/Map(_from_proto_str) into ReadFromPubSub/Read
root: INFO: 2019-03-01T06:26:35.618Z: JOB_MESSAGE_DETAILED: Fusing consumer WriteToPubSub/ToProtobuf into add_attribute
root: INFO: 2019-03-01T06:26:35.620Z: JOB_MESSAGE_DETAILED: Fusing consumer add_attribute into ReadFromPubSub/Map(_from_proto_str)
root: INFO: 2019-03-01T06:26:35.622Z: JOB_MESSAGE_DETAILED: Fusing consumer WriteToPubSub/Write/NativeWrite into WriteToPubSub/ToProtobuf
root: INFO: 2019-03-01T06:26:35.629Z: JOB_MESSAGE_DEBUG: Adding StepResource setup and teardown to workflow graph.
root: INFO: 2019-03-01T06:26:35.672Z: JOB_MESSAGE_DEBUG: Adding workflow start and stop steps.
root: INFO: 2019-03-01T06:26:35.681Z: JOB_MESSAGE_DEBUG: Assigning stage ids.
root: INFO: 2019-03-01T06:26:35.778Z: JOB_MESSAGE_DEBUG: Executing wait step start2
root: INFO: 2019-03-01T06:26:35.790Z: JOB_MESSAGE_DEBUG: Starting worker pool setup.
root: INFO: 2019-03-01T06:26:35.794Z: JOB_MESSAGE_BASIC: Starting 1 workers...
root: INFO: 2019-03-01T06:26:36.812Z: JOB_MESSAGE_DETAILED: Pub/Sub resources set up for topic 'projects/apache-beam-testing/topics/psit_topic_inpute12d6562-699e-4e02-8bce-a5f07e906089'.
root: INFO: 2019-03-01T06:26:39.695Z: JOB_MESSAGE_BASIC: Executing operation ReadFromPubSub/Read+ReadFromPubSub/Map(_from_proto_str)+add_attribute+WriteToPubSub/ToProtobuf+WriteToPubSub/Write/NativeWrite
root: WARNING: Timing out on waiting for job 2019-02-28_22_26_28-11822000511413559614 after 183 seconds
google.auth.transport._http_client: DEBUG: Making request: GET http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
urllib3.connectionpool: DEBUG: Starting new HTTP connection (1): metadata.google.internal:80
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144
google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token HTTP/1.1" 200 176
root: ERROR: Timeout after 300 sec. Received 0 messages from projects/apache-beam-testing/subscriptions/psit_subscription_outpute12d6562-699e-4e02-8bce-a5f07e906089.
--------------------- >> end captured logging << ---------------------
----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 28 tests in 2945.246s
FAILED (SKIP=1, failures=1)
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_04_59-1420767074734273997?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_12_35-6140688294830240909?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_19_35-1069137619377892149?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_25_06-10884828397227699684?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_32_36-8444610093625080700?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_41_01-10381273188657461668?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_47_16-14776898147569499467?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_05_00-14956155372437645513?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_19_15-18119060710730382185?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_04_59-15281091755917622107?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_05_00-12931017805355957527?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_17_50-6669481574970800208?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_26_47-17627648042472794078?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_40_14-14241246615696831483?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_04_58-10727472849562489076?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_04_58-3943905210128217895?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_11_45-3127197326631822984?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_18_36-14059385422239771435?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_25_56-15424031085562117455?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_04_58-12948148088224289853?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_12_15-6352208700709835335?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_20_18-14893144814031338408?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_26_28-11822000511413559614?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_04_58-13903030492684003957?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_13_09-2448149024421063641?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-02-28_22_21_54-7764036366281236231?project=apache-beam-testing.
> Task :beam-sdks-python:postCommitIT FAILED
FAILURE: Build failed with an exception.
* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/build.gradle'> line: 278
* What went wrong:
Execution failed for task ':beam-sdks-python:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 53m 33s
6 actionable tasks: 6 executed
Publishing build scan...
https://gradle.com/s/als6gusjwjq4w
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org
Jenkins build is back to normal : beam_PostCommit_Python_Verify
#7544
Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/7544/display/redirect>
---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org
Build failed in Jenkins: beam_PostCommit_Python_Verify #7543
Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/7543/display/redirect?page=changes>
Changes:
[ehudm] Allow setting dataflow_endpoint for Python ITs
------------------------------------------
Started by GitHub push by aaltay
[EnvInject] - Loading node environment variables.
Building remotely on beam9 (beam) in workspace <https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/>
No credentials specified
> git rev-parse --is-inside-work-tree # timeout=10
Fetching changes from the remote Git repository
> git config remote.origin.url https://github.com/apache/beam.git # timeout=10
Fetching upstream changes from https://github.com/apache/beam.git
> git --version # timeout=10
> git fetch --tags --progress https://github.com/apache/beam.git +refs/heads/*:refs/remotes/origin/* +refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*
> git rev-parse origin/master^{commit} # timeout=10
Checking out Revision cf2f35c36355cc4e6805ae7aa56e3933457d83b2 (origin/master)
> git config core.sparsecheckout # timeout=10
> git checkout -f cf2f35c36355cc4e6805ae7aa56e3933457d83b2
Commit message: "Merge pull request #7894 from udim/kms-proto"
> git rev-list --no-walk c41b3c082d924059c393345f4b1e740804d2b877 # timeout=10
Cleaning workspace
> git rev-parse --verify HEAD # timeout=10
Resetting working tree
> git reset --hard # timeout=10
> git clean -fdx # timeout=10
No emails were triggered.
[EnvInject] - Executing scripts and injecting environment variables after the SCM step.
[EnvInject] - Injecting as environment variables the properties content
SPARK_LOCAL_IP=127.0.0.1
[EnvInject] - Variables injected successfully.
[Gradle] - Launching build.
[src] $ <https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/gradlew> --continue --max-workers=12 -Dorg.gradle.jvmargs=-Xms2g -Dorg.gradle.jvmargs=-Xmx4g :pythonPostCommit
To honour the JVM settings for this build a new JVM will be forked. Please consider using the daemon: https://docs.gradle.org/5.2.1/userguide/gradle_daemon.html.
Daemon will be stopped at the end of the build stopping after processing
> Task :buildSrc:compileJava NO-SOURCE
> Task :buildSrc:compileGroovy FROM-CACHE
> Task :buildSrc:pluginDescriptors
> Task :buildSrc:processResources
> Task :buildSrc:classes
> Task :buildSrc:jar
> Task :buildSrc:assemble
> Task :buildSrc:spotlessGroovy
> Task :buildSrc:spotlessGroovyCheck
> Task :buildSrc:spotlessGroovyGradle
> Task :buildSrc:spotlessGroovyGradleCheck
> Task :buildSrc:spotlessCheck
> Task :buildSrc:pluginUnderTestMetadata
> Task :buildSrc:compileTestJava NO-SOURCE
> Task :buildSrc:compileTestGroovy NO-SOURCE
> Task :buildSrc:processTestResources NO-SOURCE
> Task :buildSrc:testClasses UP-TO-DATE
> Task :buildSrc:test NO-SOURCE
> Task :buildSrc:validateTaskProperties FROM-CACHE
> Task :buildSrc:check
> Task :buildSrc:build
Configuration on demand is an incubating feature.
> Task :beam-sdks-python:setupVirtualenv
New python executable in <https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/bin/python2>
Also creating executable in <https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/bin/python>
Installing setuptools, pkg_resources, pip, wheel...done.
Running virtualenv with interpreter /usr/bin/python2
DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7.
Exception:
Traceback (most recent call last):
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_internal/cli/base_command.py",> line 179, in main
status = self.run(options, args)
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_internal/commands/install.py",> line 255, in run
with self._build_session(options) as session:
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_internal/cli/base_command.py",> line 93, in _build_session
insecure_hosts=options.trusted_hosts,
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_internal/download.py",> line 344, in __init__
self.headers["User-Agent"] = user_agent()
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_internal/download.py",> line 108, in user_agent
zip(["name", "version", "id"], distro.linux_distribution()),
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_vendor/distro.py",> line 120, in linux_distribution
return _distro.linux_distribution(full_distribution_name)
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_vendor/distro.py",> line 675, in linux_distribution
self.version(),
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_vendor/distro.py",> line 735, in version
self.lsb_release_attr('release'),
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_vendor/distro.py",> line 892, in lsb_release_attr
return self._lsb_release_info.get(attribute, '')
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_vendor/distro.py",> line 550, in __get__
ret = obj.__dict__[self._fname] = self._f(obj)
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_vendor/distro.py",> line 998, in _lsb_release_info
stdout = subprocess.check_output(cmd, stderr=devnull)
File "/usr/lib/python2.7/subprocess.py", line 574, in check_output
raise CalledProcessError(retcode, cmd, output=output)
CalledProcessError: Command '('lsb_release', '-a')' returned non-zero exit status 1
Traceback (most recent call last):
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/bin/pip",> line 11, in <module>
sys.exit(main())
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_internal/__init__.py",> line 78, in main
return command.main(cmd_args)
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_internal/cli/base_command.py",> line 228, in main
timeout=min(5, options.timeout)
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_internal/cli/base_command.py",> line 93, in _build_session
insecure_hosts=options.trusted_hosts,
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_internal/download.py",> line 344, in __init__
self.headers["User-Agent"] = user_agent()
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_internal/download.py",> line 108, in user_agent
zip(["name", "version", "id"], distro.linux_distribution()),
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_vendor/distro.py",> line 120, in linux_distribution
return _distro.linux_distribution(full_distribution_name)
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_vendor/distro.py",> line 675, in linux_distribution
self.version(),
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_vendor/distro.py",> line 735, in version
self.lsb_release_attr('release'),
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_vendor/distro.py",> line 892, in lsb_release_attr
return self._lsb_release_info.get(attribute, '')
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_vendor/distro.py",> line 550, in __get__
ret = obj.__dict__[self._fname] = self._f(obj)
File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/pip/_vendor/distro.py",> line 998, in _lsb_release_info
stdout = subprocess.check_output(cmd, stderr=devnull)
File "/usr/lib/python2.7/subprocess.py", line 574, in check_output
raise CalledProcessError(retcode, cmd, output=output)
subprocess.CalledProcessError: Command '('lsb_release', '-a')' returned non-zero exit status 1
> Task :beam-sdks-python:setupVirtualenv FAILED
FAILURE: Build failed with an exception.
* What went wrong:
Execution failed for task ':beam-sdks-python:setupVirtualenv'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 23s
1 actionable task: 1 executed
Publishing build scan...
https://gradle.com/s/wzqz6csju4fe6
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org
Build failed in Jenkins: beam_PostCommit_Python_Verify #7542
Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/7542/display/redirect?page=changes>
Changes:
[github] Merge pull request #7353: [BEAM-4461] Support inner and outer style
------------------------------------------
[...truncated 137.35 KB...]
> Task :beam-sdks-python:hdfsIntegrationTest
++ dirname ./apache_beam/io/hdfs_integration_test/hdfs_integration_test.sh
+ TEST_DIR=./apache_beam/io/hdfs_integration_test
+ ROOT_DIR=./apache_beam/io/hdfs_integration_test/../../../../..
+ CONTEXT_DIR=./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration
+ rm -r ./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration
rm: cannot remove './apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration': No such file or directory
+ true
+ mkdir -p ./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration/sdks
+ cp ./apache_beam/io/hdfs_integration_test/docker-compose.yml ./apache_beam/io/hdfs_integration_test/Dockerfile ./apache_beam/io/hdfs_integration_test/hdfscli.cfg ./apache_beam/io/hdfs_integration_test/hdfs_integration_test.sh ./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration/
+ cp -r ./apache_beam/io/hdfs_integration_test/../../../../../sdks/python ./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration/sdks/
+ cp -r ./apache_beam/io/hdfs_integration_test/../../../../../model ./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration/
++ echo hdfs_IT-jenkins-beam_PostCommit_Python_Verify-7542
+ PROJECT_NAME=hdfs_IT-jenkins-beam_PostCommit_Python_Verify-7542
+ '[' -z jenkins-beam_PostCommit_Python_Verify-7542 ']'
+ COLOR_OPT=--no-ansi
+ COMPOSE_OPT='-p hdfs_IT-jenkins-beam_PostCommit_Python_Verify-7542 --no-ansi'
+ cd ./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration
+ docker network prune --force
+ trap finally EXIT
+ docker-compose -p hdfs_IT-jenkins-beam_PostCommit_Python_Verify-7542 --no-ansi build
namenode uses an image, skipping
datanode uses an image, skipping
Building test
Step 1/9 : FROM python:2
---> e49a53a4af5c
Step 2/9 : WORKDIR /app
---> Using cache
---> 39c2db1ef75c
Step 3/9 : ENV HDFSCLI_CONFIG /app/sdks/python/apache_beam/io/hdfs_integration_test/hdfscli.cfg
---> Using cache
---> b98faca55ee7
Step 4/9 : RUN pip install --no-cache-dir holdup gsutil
---> Using cache
---> 86a5f3458108
Step 5/9 : RUN gsutil cp gs://dataflow-samples/shakespeare/kinglear.txt .
---> Using cache
---> 74b16e40bb49
Step 6/9 : ADD sdks/python /app/sdks/python
---> 5e656427f8fc
Removing intermediate container b89ba6ea0ab2
Step 7/9 : ADD model /app/model
---> 18dd53503e0c
Removing intermediate container 9a89452e15fb
Step 8/9 : RUN cd sdks/python && python setup.py sdist && pip install --no-cache-dir $(ls dist/apache-beam-*.tar.gz | tail -n1)[gcp]
---> Running in dff9efeead04
Service 'test' failed to build: grpc: the connection is unavailable
+ finally
+ docker-compose -p hdfs_IT-jenkins-beam_PostCommit_Python_Verify-7542 --no-ansi down
Removing network hdfs_it-jenkins-beam_postcommit_python_verify-7542_test_net
Network hdfs_it-jenkins-beam_postcommit_python_verify-7542_test_net not found.
real 0m0.225s
user 0m0.174s
sys 0m0.044s
> Task :beam-sdks-python:hdfsIntegrationTest FAILED
> Task :beam-sdks-python:postCommitIT
###########################################################################
# Build pipeline options if not provided in --pipeline_opts from commandline
if [[ -z $PIPELINE_OPTS ]]; then
# Check that the script is running in a known directory.
if [[ $PWD != *sdks/python* ]]; then
echo 'Unable to locate Apache Beam Python SDK root directory'
exit 1
fi
# Go to the Apache Beam Python SDK root
if [[ "*sdks/python" != $PWD ]]; then
cd $(pwd | sed 's/sdks\/python.*/sdks\/python/')
fi
# Create a tarball if not exists
if [[ $(find ${SDK_LOCATION}) ]]; then
SDK_LOCATION=$(find ${SDK_LOCATION})
else
python setup.py -q sdist
SDK_LOCATION=$(find dist/apache-beam-*.tar.gz)
fi
# Install test dependencies for ValidatesRunner tests.
echo "pyhamcrest" > postcommit_requirements.txt
echo "mock" >> postcommit_requirements.txt
# Options used to run testing pipeline on Cloud Dataflow Service. Also used for
# running on DirectRunner (some options ignored).
opts=(
"--runner=$RUNNER"
"--project=$PROJECT"
"--staging_location=$GCS_LOCATION/staging-it"
"--temp_location=$GCS_LOCATION/temp-it"
"--output=$GCS_LOCATION/py-it-cloud/output"
"--sdk_location=$SDK_LOCATION"
"--requirements_file=postcommit_requirements.txt"
"--num_workers=$NUM_WORKERS"
"--sleep_secs=$SLEEP_SECS"
)
# Add --streaming if provided
if [[ "$STREAMING" = true ]]; then
opts+=("--streaming")
fi
# Add --dataflow_worker_jar if provided
if [[ ! -z "$WORKER_JAR" ]]; then
opts+=("--dataflow_worker_jar=$WORKER_JAR")
fi
if [[ ! -z "$KMS_KEY_NAME" ]]; then
opts+=(
"--kms_key_name=$KMS_KEY_NAME"
"--dataflow_kms_key=$KMS_KEY_NAME"
)
fi
PIPELINE_OPTS=$(IFS=" " ; echo "${opts[*]}")
fi
pwd | sed 's/sdks\/python.*/sdks\/python/'
find ${SDK_LOCATION}
find ${SDK_LOCATION}
IFS=" " ; echo "${opts[*]}"
###########################################################################
# Run tests and validate that jobs finish successfully.
echo ">>> RUNNING integration tests with pipeline options: $PIPELINE_OPTS"
python setup.py nosetests \
--test-pipeline-options="$PIPELINE_OPTS" \
$TEST_OPTS
>>> RUNNING integration tests with pipeline options: --runner=TestDataflowRunner --project=apache-beam-testing --staging_location=gs://temp-storage-for-end-to-end-tests/staging-it --temp_location=gs://temp-storage-for-end-to-end-tests/temp-it --output=gs://temp-storage-for-end-to-end-tests/py-it-cloud/output --sdk_location=build/apache-beam.tar.gz --requirements_file=postcommit_requirements.txt --num_workers=1 --sleep_secs=20 --kms_key_name=projects/apache-beam-testing/locations/global/keyRings/beam-it/cryptoKeys/test --dataflow_kms_key=projects/apache-beam-testing/locations/global/keyRings/beam-it/cryptoKeys/test
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/setuptools/dist.py>:475: UserWarning: Normalizing '2.12.0.dev' to '2.12.0.dev0'
normalized_version,
running nosetests
running egg_info
writing requirements to apache_beam.egg-info/requires.txt
writing apache_beam.egg-info/PKG-INFO
writing top-level names to apache_beam.egg-info/top_level.txt
writing dependency_links to apache_beam.egg-info/dependency_links.txt
writing entry points to apache_beam.egg-info/entry_points.txt
reading manifest file 'apache_beam.egg-info/SOURCES.txt'
warning: no files found matching 'README.md'
warning: no files found matching 'NOTICE'
warning: no files found matching 'LICENSE'
reading manifest template 'MANIFEST.in'
writing manifest file 'apache_beam.egg-info/SOURCES.txt'
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/examples/cookbook/bigquery_tornadoes.py>:90: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
if 'temp_location' in p.options.get_all_options():
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:835: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
standard_options = p.options.view_as(StandardOptions)
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:835: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
standard_options = p.options.view_as(StandardOptions)
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:835: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
standard_options = p.options.view_as(StandardOptions)
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:835: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
standard_options = p.options.view_as(StandardOptions)
test_autocomplete_it (apache_beam.examples.complete.autocomplete_test.AutocompleteTest) ... ok
test_bigquery_tornadoes_it (apache_beam.examples.cookbook.bigquery_tornadoes_it_test.BigqueryTornadoesIT) ... ok
test_datastore_wordcount_it (apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT) ... ok
test_leader_board_it (apache_beam.examples.complete.game.leader_board_it_test.LeaderBoardIT) ... ok
test_game_stats_it (apache_beam.examples.complete.game.game_stats_it_test.GameStatsIT) ... ok
test_streaming_wordcount_it (apache_beam.examples.streaming_wordcount_it_test.StreamingWordCountIT) ... ok
test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_copy (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_user_score_it (apache_beam.examples.complete.game.user_score_it_test.UserScoreIT) ... ok
test_avro_it (apache_beam.examples.fastavro_it_test.FastavroIT) ... ok
test_hourly_team_score_it (apache_beam.examples.complete.game.hourly_team_score_it_test.HourlyTeamScoreIT) ... ok
test_bigquery_read_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_parquetio_it (apache_beam.io.parquetio_it_test.TestParquetIT) ... ok
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_one_job_fails_all_jobs_fail (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_streaming_data_only (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_streaming_with_attributes (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_big_query_legacy_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql_kms_key (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... SKIP: This test requires BQ Dataflow native source support for KMS, which is not available yet.
test_datastore_write_limit (apache_beam.io.gcp.datastore_write_it_test.DatastoreWriteIT) ... ok
----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 28 tests in 2898.474s
OK (SKIP=1)
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_08_50-8829645874160604733?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_16_45-2169356869488623635?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_24_49-26641457457697919?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_30_36-17233627131473190229?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_37_19-15237151246275288811?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_44_11-7413511125055003045?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_50_39-17338214747680463594?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_08_53-18020680727195202988?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_23_31-14841512479027936994?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_30_34-17841645644890990735?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_08_52-12212945565076245510?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_08_52-16591004738618132630?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_21_05-16906269812969059455?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_30_36-9121572224507622935?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_39_20-10235622098933914335?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_08_51-4629953016301834000?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_26_10-8687873431991495318?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_08_51-13339815738792225529?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_15_33-13859528976635070969?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_24_32-15019413721561971049?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_08_50-8973072240009310697?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_17_09-6952453657291712081?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_25_46-4331414747151928112?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_31_36-297109920759869291?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_08_51-3582191326161040181?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_07_16_23-7187273471276906816?project=apache-beam-testing.
FAILURE: Build failed with an exception.
* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/build.gradle'> line: 317
* What went wrong:
Execution failed for task ':beam-sdks-python:hdfsIntegrationTest'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 50m 3s
6 actionable tasks: 6 executed
Publishing build scan...
https://gradle.com/s/vvbjwthmv6h3e
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org
Build failed in Jenkins: beam_PostCommit_Python_Verify #7541
Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/7541/display/redirect>
------------------------------------------
[...truncated 137.56 KB...]
+ TEST_DIR=./apache_beam/io/hdfs_integration_test
+ ROOT_DIR=./apache_beam/io/hdfs_integration_test/../../../../..
+ CONTEXT_DIR=./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration
+ rm -r ./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration
rm: cannot remove './apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration': No such file or directory
+ true
+ mkdir -p ./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration/sdks
+ cp ./apache_beam/io/hdfs_integration_test/docker-compose.yml ./apache_beam/io/hdfs_integration_test/Dockerfile ./apache_beam/io/hdfs_integration_test/hdfscli.cfg ./apache_beam/io/hdfs_integration_test/hdfs_integration_test.sh ./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration/
+ cp -r ./apache_beam/io/hdfs_integration_test/../../../../../sdks/python ./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration/sdks/
+ cp -r ./apache_beam/io/hdfs_integration_test/../../../../../model ./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration/
++ echo hdfs_IT-jenkins-beam_PostCommit_Python_Verify-7541
+ PROJECT_NAME=hdfs_IT-jenkins-beam_PostCommit_Python_Verify-7541
+ '[' -z jenkins-beam_PostCommit_Python_Verify-7541 ']'
+ COLOR_OPT=--no-ansi
+ COMPOSE_OPT='-p hdfs_IT-jenkins-beam_PostCommit_Python_Verify-7541 --no-ansi'
+ cd ./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration
+ docker network prune --force
+ trap finally EXIT
+ docker-compose -p hdfs_IT-jenkins-beam_PostCommit_Python_Verify-7541 --no-ansi build
namenode uses an image, skipping
datanode uses an image, skipping
Building test
Step 1/9 : FROM python:2
---> e49a53a4af5c
Step 2/9 : WORKDIR /app
---> Using cache
---> 39c2db1ef75c
Step 3/9 : ENV HDFSCLI_CONFIG /app/sdks/python/apache_beam/io/hdfs_integration_test/hdfscli.cfg
---> Using cache
---> b98faca55ee7
Step 4/9 : RUN pip install --no-cache-dir holdup gsutil
---> Using cache
---> 86a5f3458108
Step 5/9 : RUN gsutil cp gs://dataflow-samples/shakespeare/kinglear.txt .
---> Using cache
---> 74b16e40bb49
Step 6/9 : ADD sdks/python /app/sdks/python
---> 601bc07d7fc3
Removing intermediate container 25eaef7410ed
Step 7/9 : ADD model /app/model
---> add7db222649
Removing intermediate container 2848dd5cf4cf
Step 8/9 : RUN cd sdks/python && python setup.py sdist && pip install --no-cache-dir $(ls dist/apache-beam-*.tar.gz | tail -n1)[gcp]
---> Running in 1374b0ff3c3d
Service 'test' failed to build: grpc: the connection is unavailable
+ finally
+ docker-compose -p hdfs_IT-jenkins-beam_PostCommit_Python_Verify-7541 --no-ansi down
Removing network hdfs_it-jenkins-beam_postcommit_python_verify-7541_test_net
Network hdfs_it-jenkins-beam_postcommit_python_verify-7541_test_net not found.
real 0m0.250s
user 0m0.174s
sys 0m0.068s
> Task :beam-sdks-python:hdfsIntegrationTest FAILED
> Task :beam-sdks-python:postCommitIT
###########################################################################
# Build pipeline options if not provided in --pipeline_opts from commandline
if [[ -z $PIPELINE_OPTS ]]; then
# Check that the script is running in a known directory.
if [[ $PWD != *sdks/python* ]]; then
echo 'Unable to locate Apache Beam Python SDK root directory'
exit 1
fi
# Go to the Apache Beam Python SDK root
if [[ "*sdks/python" != $PWD ]]; then
cd $(pwd | sed 's/sdks\/python.*/sdks\/python/')
fi
# Create a tarball if not exists
if [[ $(find ${SDK_LOCATION}) ]]; then
SDK_LOCATION=$(find ${SDK_LOCATION})
else
python setup.py -q sdist
SDK_LOCATION=$(find dist/apache-beam-*.tar.gz)
fi
# Install test dependencies for ValidatesRunner tests.
echo "pyhamcrest" > postcommit_requirements.txt
echo "mock" >> postcommit_requirements.txt
# Options used to run testing pipeline on Cloud Dataflow Service. Also used for
# running on DirectRunner (some options ignored).
opts=(
"--runner=$RUNNER"
"--project=$PROJECT"
"--staging_location=$GCS_LOCATION/staging-it"
"--temp_location=$GCS_LOCATION/temp-it"
"--output=$GCS_LOCATION/py-it-cloud/output"
"--sdk_location=$SDK_LOCATION"
"--requirements_file=postcommit_requirements.txt"
"--num_workers=$NUM_WORKERS"
"--sleep_secs=$SLEEP_SECS"
)
# Add --streaming if provided
if [[ "$STREAMING" = true ]]; then
opts+=("--streaming")
fi
# Add --dataflow_worker_jar if provided
if [[ ! -z "$WORKER_JAR" ]]; then
opts+=("--dataflow_worker_jar=$WORKER_JAR")
fi
if [[ ! -z "$KMS_KEY_NAME" ]]; then
opts+=(
"--kms_key_name=$KMS_KEY_NAME"
"--dataflow_kms_key=$KMS_KEY_NAME"
)
fi
PIPELINE_OPTS=$(IFS=" " ; echo "${opts[*]}")
fi
pwd | sed 's/sdks\/python.*/sdks\/python/'
find ${SDK_LOCATION}
find ${SDK_LOCATION}
IFS=" " ; echo "${opts[*]}"
###########################################################################
# Run tests and validate that jobs finish successfully.
echo ">>> RUNNING integration tests with pipeline options: $PIPELINE_OPTS"
python setup.py nosetests \
>>> RUNNING integration tests with pipeline options: --runner=TestDataflowRunner --project=apache-beam-testing --staging_location=gs://temp-storage-for-end-to-end-tests/staging-it --temp_location=gs://temp-storage-for-end-to-end-tests/temp-it --output=gs://temp-storage-for-end-to-end-tests/py-it-cloud/output --sdk_location=build/apache-beam.tar.gz --requirements_file=postcommit_requirements.txt --num_workers=1 --sleep_secs=20 --kms_key_name=projects/apache-beam-testing/locations/global/keyRings/beam-it/cryptoKeys/test --dataflow_kms_key=projects/apache-beam-testing/locations/global/keyRings/beam-it/cryptoKeys/test
--test-pipeline-options="$PIPELINE_OPTS" \
$TEST_OPTS
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/setuptools/dist.py>:475: UserWarning: Normalizing '2.12.0.dev' to '2.12.0.dev0'
normalized_version,
running nosetests
running egg_info
writing requirements to apache_beam.egg-info/requires.txt
writing apache_beam.egg-info/PKG-INFO
writing top-level names to apache_beam.egg-info/top_level.txt
writing dependency_links to apache_beam.egg-info/dependency_links.txt
writing entry points to apache_beam.egg-info/entry_points.txt
reading manifest file 'apache_beam.egg-info/SOURCES.txt'
reading manifest template 'MANIFEST.in'
warning: no files found matching 'README.md'
warning: no files found matching 'NOTICE'
warning: no files found matching 'LICENSE'
writing manifest file 'apache_beam.egg-info/SOURCES.txt'
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/examples/cookbook/bigquery_tornadoes.py>:90: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
if 'temp_location' in p.options.get_all_options():
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:835: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
standard_options = p.options.view_as(StandardOptions)
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:835: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
standard_options = p.options.view_as(StandardOptions)
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:835: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
standard_options = p.options.view_as(StandardOptions)
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:835: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
standard_options = p.options.view_as(StandardOptions)
test_autocomplete_it (apache_beam.examples.complete.autocomplete_test.AutocompleteTest) ... ok
test_bigquery_tornadoes_it (apache_beam.examples.cookbook.bigquery_tornadoes_it_test.BigqueryTornadoesIT) ... ok
test_datastore_wordcount_it (apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT) ... ok
test_leader_board_it (apache_beam.examples.complete.game.leader_board_it_test.LeaderBoardIT) ... ok
test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:835: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
standard_options = p.options.view_as(StandardOptions)
test_game_stats_it (apache_beam.examples.complete.game.game_stats_it_test.GameStatsIT) ... ok
test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_streaming_wordcount_it (apache_beam.examples.streaming_wordcount_it_test.StreamingWordCountIT) ... ok
test_copy (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_avro_it (apache_beam.examples.fastavro_it_test.FastavroIT) ... ok
test_user_score_it (apache_beam.examples.complete.game.user_score_it_test.UserScoreIT) ... ok
test_hourly_team_score_it (apache_beam.examples.complete.game.hourly_team_score_it_test.HourlyTeamScoreIT) ... ok
test_bigquery_read_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_parquetio_it (apache_beam.io.parquetio_it_test.TestParquetIT) ... ok
test_streaming_data_only (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_streaming_with_attributes (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_one_job_fails_all_jobs_fail (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_big_query_legacy_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql_kms_key (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... SKIP: This test requires BQ Dataflow native source support for KMS, which is not available yet.
test_datastore_write_limit (apache_beam.io.gcp.datastore_write_it_test.DatastoreWriteIT) ... ok
----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 28 tests in 2773.246s
OK (SKIP=1)
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_19_43-9294731338487718848?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_26_59-13926201470305752367?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_34_23-1367708355577769654?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_39_51-5661188175148321526?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_46_29-587378205875567348?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_53_17-5861034986880386585?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_59_30-11017528168230943998?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_19_46-8909542533735015533?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_33_56-12050229473002624955?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_19_44-7333656569852136441?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_19_46-4630694741633929378?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_32_18-1679130732570844115?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_41_16-15128589979819241241?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_50_20-4608896220190719324?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_19_43-11094691106393531177?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_19_44-5375939947336008228?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_27_00-11358737156875316281?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_35_01-2230302911198648511?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_41_21-11763053716564287883?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_19_43-1032616103929648649?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_27_03-5156240082091736878?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_33_53-7054983118903654679?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_42_28-7077410617548757915?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_19_43-5309424727180688936?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_27_12-9210768101377558817?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_36_40-10091317635417608286?project=apache-beam-testing.
FAILURE: Build failed with an exception.
* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/build.gradle'> line: 317
* What went wrong:
Execution failed for task ':beam-sdks-python:hdfsIntegrationTest'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 48m 11s
6 actionable tasks: 6 executed
Publishing build scan...
https://gradle.com/s/rvf2mdrmlu6gs
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org
Build failed in Jenkins: beam_PostCommit_Python_Verify #7540
Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/7540/display/redirect?page=changes>
Changes:
[ttanay100] Fix link to Design Principles in PTransform Style Guide
[ttanay100] Change text as well
------------------------------------------
[...truncated 137.65 KB...]
+ ROOT_DIR=./apache_beam/io/hdfs_integration_test/../../../../..
+ CONTEXT_DIR=./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration
+ rm -r ./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration
rm: cannot remove './apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration': No such file or directory
+ true
+ mkdir -p ./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration/sdks
+ cp ./apache_beam/io/hdfs_integration_test/docker-compose.yml ./apache_beam/io/hdfs_integration_test/Dockerfile ./apache_beam/io/hdfs_integration_test/hdfscli.cfg ./apache_beam/io/hdfs_integration_test/hdfs_integration_test.sh ./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration/
+ cp -r ./apache_beam/io/hdfs_integration_test/../../../../../sdks/python ./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration/sdks/
+ cp -r ./apache_beam/io/hdfs_integration_test/../../../../../model ./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration/
++ echo hdfs_IT-jenkins-beam_PostCommit_Python_Verify-7540
+ PROJECT_NAME=hdfs_IT-jenkins-beam_PostCommit_Python_Verify-7540
+ '[' -z jenkins-beam_PostCommit_Python_Verify-7540 ']'
+ COLOR_OPT=--no-ansi
+ COMPOSE_OPT='-p hdfs_IT-jenkins-beam_PostCommit_Python_Verify-7540 --no-ansi'
+ cd ./apache_beam/io/hdfs_integration_test/../../../../../build/hdfs_integration
+ docker network prune --force
+ trap finally EXIT
+ docker-compose -p hdfs_IT-jenkins-beam_PostCommit_Python_Verify-7540 --no-ansi build
namenode uses an image, skipping
datanode uses an image, skipping
Building test
Step 1/9 : FROM python:2
2: Pulling from library/python
Digest: sha256:35f98a81a8346006cefcb7f2304ed198fc1b87f9ca812cf685b081b9a9074b6b
Status: Downloaded newer image for python:2
---> e49a53a4af5c
Step 2/9 : WORKDIR /app
---> Using cache
---> 39c2db1ef75c
Step 3/9 : ENV HDFSCLI_CONFIG /app/sdks/python/apache_beam/io/hdfs_integration_test/hdfscli.cfg
---> Using cache
---> b98faca55ee7
Step 4/9 : RUN pip install --no-cache-dir holdup gsutil
---> Using cache
---> 86a5f3458108
Step 5/9 : RUN gsutil cp gs://dataflow-samples/shakespeare/kinglear.txt .
---> Using cache
---> 74b16e40bb49
Step 6/9 : ADD sdks/python /app/sdks/python
---> 3080a92060a8
Removing intermediate container cf6db6390c4b
Step 7/9 : ADD model /app/model
---> 498ad0f00b47
Removing intermediate container d915d6b3d667
Step 8/9 : RUN cd sdks/python && python setup.py sdist && pip install --no-cache-dir $(ls dist/apache-beam-*.tar.gz | tail -n1)[gcp]
---> Running in 6c83bae0dd9f
Service 'test' failed to build: grpc: the connection is unavailable
+ finally
+ docker-compose -p hdfs_IT-jenkins-beam_PostCommit_Python_Verify-7540 --no-ansi down
Removing network hdfs_it-jenkins-beam_postcommit_python_verify-7540_test_net
Network hdfs_it-jenkins-beam_postcommit_python_verify-7540_test_net not found.
real 0m0.229s
user 0m0.184s
sys 0m0.040s
> Task :beam-sdks-python:hdfsIntegrationTest FAILED
> Task :beam-sdks-python:postCommitIT
###########################################################################
# Build pipeline options if not provided in --pipeline_opts from commandline
if [[ -z $PIPELINE_OPTS ]]; then
# Check that the script is running in a known directory.
if [[ $PWD != *sdks/python* ]]; then
echo 'Unable to locate Apache Beam Python SDK root directory'
exit 1
fi
# Go to the Apache Beam Python SDK root
if [[ "*sdks/python" != $PWD ]]; then
cd $(pwd | sed 's/sdks\/python.*/sdks\/python/')
fi
# Create a tarball if not exists
if [[ $(find ${SDK_LOCATION}) ]]; then
SDK_LOCATION=$(find ${SDK_LOCATION})
else
python setup.py -q sdist
SDK_LOCATION=$(find dist/apache-beam-*.tar.gz)
fi
# Install test dependencies for ValidatesRunner tests.
echo "pyhamcrest" > postcommit_requirements.txt
echo "mock" >> postcommit_requirements.txt
# Options used to run testing pipeline on Cloud Dataflow Service. Also used for
# running on DirectRunner (some options ignored).
opts=(
"--runner=$RUNNER"
"--project=$PROJECT"
"--staging_location=$GCS_LOCATION/staging-it"
"--temp_location=$GCS_LOCATION/temp-it"
"--output=$GCS_LOCATION/py-it-cloud/output"
"--sdk_location=$SDK_LOCATION"
"--requirements_file=postcommit_requirements.txt"
"--num_workers=$NUM_WORKERS"
"--sleep_secs=$SLEEP_SECS"
)
# Add --streaming if provided
if [[ "$STREAMING" = true ]]; then
opts+=("--streaming")
fi
# Add --dataflow_worker_jar if provided
if [[ ! -z "$WORKER_JAR" ]]; then
opts+=("--dataflow_worker_jar=$WORKER_JAR")
fi
if [[ ! -z "$KMS_KEY_NAME" ]]; then
opts+=(
"--kms_key_name=$KMS_KEY_NAME"
"--dataflow_kms_key=$KMS_KEY_NAME"
)
fi
PIPELINE_OPTS=$(IFS=" " ; echo "${opts[*]}")
fi
pwd | sed 's/sdks\/python.*/sdks\/python/'
find ${SDK_LOCATION}
find ${SDK_LOCATION}
IFS=" " ; echo "${opts[*]}"
###########################################################################
# Run tests and validate that jobs finish successfully.
echo ">>> RUNNING integration tests with pipeline options: $PIPELINE_OPTS"
python setup.py nosetests \
--test-pipeline-options="$PIPELINE_OPTS" \
$TEST_OPTS
>>> RUNNING integration tests with pipeline options: --runner=TestDataflowRunner --project=apache-beam-testing --staging_location=gs://temp-storage-for-end-to-end-tests/staging-it --temp_location=gs://temp-storage-for-end-to-end-tests/temp-it --output=gs://temp-storage-for-end-to-end-tests/py-it-cloud/output --sdk_location=build/apache-beam.tar.gz --requirements_file=postcommit_requirements.txt --num_workers=1 --sleep_secs=20 --kms_key_name=projects/apache-beam-testing/locations/global/keyRings/beam-it/cryptoKeys/test --dataflow_kms_key=projects/apache-beam-testing/locations/global/keyRings/beam-it/cryptoKeys/test
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build/gradleenv/1327086738/local/lib/python2.7/site-packages/setuptools/dist.py>:475: UserWarning: Normalizing '2.12.0.dev' to '2.12.0.dev0'
normalized_version,
running nosetests
running egg_info
writing requirements to apache_beam.egg-info/requires.txt
writing apache_beam.egg-info/PKG-INFO
writing top-level names to apache_beam.egg-info/top_level.txt
writing dependency_links to apache_beam.egg-info/dependency_links.txt
writing entry points to apache_beam.egg-info/entry_points.txt
reading manifest file 'apache_beam.egg-info/SOURCES.txt'
reading manifest template 'MANIFEST.in'
warning: no files found matching 'README.md'
warning: no files found matching 'NOTICE'
warning: no files found matching 'LICENSE'
writing manifest file 'apache_beam.egg-info/SOURCES.txt'
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/examples/cookbook/bigquery_tornadoes.py>:90: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
if 'temp_location' in p.options.get_all_options():
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:835: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
standard_options = p.options.view_as(StandardOptions)
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:835: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
standard_options = p.options.view_as(StandardOptions)
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:835: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
standard_options = p.options.view_as(StandardOptions)
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:835: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
standard_options = p.options.view_as(StandardOptions)
test_autocomplete_it (apache_beam.examples.complete.autocomplete_test.AutocompleteTest) ... ok
test_bigquery_tornadoes_it (apache_beam.examples.cookbook.bigquery_tornadoes_it_test.BigqueryTornadoesIT) ... ok
test_datastore_wordcount_it (apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT) ... ok
test_leader_board_it (apache_beam.examples.complete.game.leader_board_it_test.LeaderBoardIT) ... ok
test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_streaming_wordcount_it (apache_beam.examples.streaming_wordcount_it_test.StreamingWordCountIT) ... ok
test_game_stats_it (apache_beam.examples.complete.game.game_stats_it_test.GameStatsIT) ... ok
test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_copy (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_avro_it (apache_beam.examples.fastavro_it_test.FastavroIT) ... ok
test_user_score_it (apache_beam.examples.complete.game.user_score_it_test.UserScoreIT) ... ok
test_hourly_team_score_it (apache_beam.examples.complete.game.hourly_team_score_it_test.HourlyTeamScoreIT) ... ok
test_bigquery_read_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_parquetio_it (apache_beam.io.parquetio_it_test.TestParquetIT) ... ok
test_streaming_data_only (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_streaming_with_attributes (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_one_job_fails_all_jobs_fail (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_big_query_legacy_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql_kms_key (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... SKIP: This test requires BQ Dataflow native source support for KMS, which is not available yet.
test_datastore_write_limit (apache_beam.io.gcp.datastore_write_it_test.DatastoreWriteIT) ... ok
----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 28 tests in 2766.306s
OK (SKIP=1)
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_31_32-15860636336778804111?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_45_33-8952025808258244717?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_51_21-16351041071301839746?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_58_24-18423921342612250332?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_04_37-1292343120311929294?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_10_51-9077237166566945575?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_31_30-10245705229366364777?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_31_31-5404257322820134531?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_43_38-11691832542582835496?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_52_18-3430222476884725657?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_04_01_23-10963740699061110950?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_31_29-11116154437774038192?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_31_30-3942859390553530555?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_38_05-2204120129203942428?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_45_14-3950948765222870054?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_31_29-13383813015987579578?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_38_24-11743401367649115779?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_45_05-18210663586673876239?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_52_08-14698806539753555298?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_31_29-458235875014372555?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_39_03-4020640435546123035?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_46_10-4623944468602468256?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_52_58-4909193956806809775?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_31_29-8520077405609717886?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_39_02-6004903826142362275?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-03-01_03_48_00-13956186316489683972?project=apache-beam-testing.
FAILURE: Build failed with an exception.
* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/build.gradle'> line: 317
* What went wrong:
Execution failed for task ':beam-sdks-python:hdfsIntegrationTest'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 47m 54s
6 actionable tasks: 6 executed
Publishing build scan...
https://gradle.com/s/zwolx6nip7nvk
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org