You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2020/01/31 18:18:34 UTC

Build failed in Jenkins: beam_PostCommit_Python36 #1594

See <https://builds.apache.org/job/beam_PostCommit_Python36/1594/display/redirect?page=changes>

Changes:

[github] [BEAM-8889] Cleanup Beam to GCS connector interfacing code so it uses


------------------------------------------
[...truncated 95.13 KB...]
	at org.codehaus.groovy.runtime.dgm$186.invoke(Unknown Source)
	at org.codehaus.groovy.runtime.callsite.PojoMetaMethodSite$PojoMetaMethodSiteNoUnwrapNoCoerce.invoke(PojoMetaMethodSite.java:246)
	at org.codehaus.groovy.runtime.callsite.PojoMetaMethodSite.call(PojoMetaMethodSite.java:55)
	at org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:127)
	at com.github.jengelman.gradle.plugins.shadow.tasks.ShadowCopyAction$StreamAction.processArchive(ShadowCopyAction.groovy:263)
	at sun.reflect.GeneratedMethodAccessor540.invoke(Unknown Source)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.codehaus.groovy.runtime.callsite.PlainObjectMetaMethodSite.doInvoke(PlainObjectMetaMethodSite.java:43)
	at org.codehaus.groovy.runtime.callsite.PogoMetaMethodSite$PogoCachedMethodSiteNoUnwrapNoCoerce.invoke(PogoMetaMethodSite.java:190)
	at org.codehaus.groovy.runtime.callsite.PogoMetaMethodSite.callCurrent(PogoMetaMethodSite.java:58)
	at org.codehaus.groovy.runtime.callsite.AbstractCallSite.callCurrent(AbstractCallSite.java:168)
	at com.github.jengelman.gradle.plugins.shadow.tasks.ShadowCopyAction$StreamAction.visitFile(ShadowCopyAction.groovy:248)
	at sun.reflect.GeneratedMethodAccessor420.invoke(Unknown Source)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.codehaus.groovy.runtime.callsite.PlainObjectMetaMethodSite.doInvoke(PlainObjectMetaMethodSite.java:43)
	at org.codehaus.groovy.runtime.callsite.PogoMetaMethodSite$PogoCachedMethodSiteNoUnwrapNoCoerce.invoke(PogoMetaMethodSite.java:190)
	at org.codehaus.groovy.runtime.callsite.PogoMetaMethodSite.callCurrent(PogoMetaMethodSite.java:58)
	at org.codehaus.groovy.runtime.callsite.AbstractCallSite.callCurrent(AbstractCallSite.java:168)
	at com.github.jengelman.gradle.plugins.shadow.tasks.ShadowCopyAction$BaseStreamAction.processFile(ShadowCopyAction.groovy:183)
	at org.gradle.api.internal.file.copy.NormalizingCopyActionDecorator$1$1.processFile(NormalizingCopyActionDecorator.java:66)
	at org.gradle.api.internal.file.copy.DuplicateHandlingCopyActionDecorator$1$1.processFile(DuplicateHandlingCopyActionDecorator.java:60)
	at org.gradle.api.internal.file.copy.CopyFileVisitorImpl.processFile(CopyFileVisitorImpl.java:62)
	at org.gradle.api.internal.file.copy.CopyFileVisitorImpl.visitFile(CopyFileVisitorImpl.java:46)
	at org.gradle.api.internal.file.collections.AbstractSingletonFileTree.visit(AbstractSingletonFileTree.java:36)
	at org.gradle.api.internal.file.collections.FileTreeAdapter.visit(FileTreeAdapter.java:118)
	at org.gradle.api.internal.file.CompositeFileTree.visit(CompositeFileTree.java:93)
	at org.gradle.api.internal.file.copy.CopySpecActionImpl.execute(CopySpecActionImpl.java:39)
	at org.gradle.api.internal.file.copy.CopySpecActionImpl.execute(CopySpecActionImpl.java:24)
	at org.gradle.api.internal.file.copy.DefaultCopySpec$DefaultCopySpecResolver.walk(DefaultCopySpec.java:693)
	at org.gradle.api.internal.file.copy.DefaultCopySpec$DefaultCopySpecResolver.walk(DefaultCopySpec.java:695)
	at org.gradle.api.internal.file.copy.DefaultCopySpec.walk(DefaultCopySpec.java:499)
	at org.gradle.api.internal.file.copy.CopySpecBackedCopyActionProcessingStream.process(CopySpecBackedCopyActionProcessingStream.java:38)
	at org.gradle.api.internal.file.copy.DuplicateHandlingCopyActionDecorator$1.process(DuplicateHandlingCopyActionDecorator.java:44)
	at org.gradle.api.internal.file.copy.NormalizingCopyActionDecorator$1.process(NormalizingCopyActionDecorator.java:57)
	at org.gradle.api.internal.file.copy.CopyActionProcessingStream$process.call(Unknown Source)
	at com.github.jengelman.gradle.plugins.shadow.tasks.ShadowCopyAction$2.execute(ShadowCopyAction.groovy:110)
	at com.github.jengelman.gradle.plugins.shadow.tasks.ShadowCopyAction$2$execute.call(Unknown Source)
	at com.github.jengelman.gradle.plugins.shadow.tasks.ShadowCopyAction.withResource(ShadowCopyAction.groovy:152)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.codehaus.groovy.reflection.CachedMethod.invoke(CachedMethod.java:104)
	at org.codehaus.groovy.runtime.callsite.StaticMetaMethodSite$StaticMetaMethodSiteNoUnwrapNoCoerce.invoke(StaticMetaMethodSite.java:151)
	at org.codehaus.groovy.runtime.callsite.StaticMetaMethodSite.callStatic(StaticMetaMethodSite.java:102)
	at org.codehaus.groovy.runtime.callsite.AbstractCallSite.callStatic(AbstractCallSite.java:216)
	at com.github.jengelman.gradle.plugins.shadow.tasks.ShadowCopyAction.execute(ShadowCopyAction.groovy:107)
	at org.gradle.api.internal.file.copy.NormalizingCopyActionDecorator.execute(NormalizingCopyActionDecorator.java:53)
	at org.gradle.api.internal.file.copy.DuplicateHandlingCopyActionDecorator.execute(DuplicateHandlingCopyActionDecorator.java:42)
	at org.gradle.api.internal.file.copy.CopyActionExecuter.execute(CopyActionExecuter.java:40)
	at org.gradle.api.tasks.AbstractCopyTask.copy(AbstractCopyTask.java:179)
	at com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar.copy(ShadowJar.java:96)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.gradle.internal.reflect.JavaMethod.invoke(JavaMethod.java:103)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.doExecute(StandardTaskAction.java:48)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:41)
	at org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:28)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:705)
	at org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:672)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$4.run(ExecuteActionsTaskExecuter.java:338)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:402)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:394)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$1.execute(DefaultBuildOperationExecutor.java:165)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:250)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:158)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:92)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeAction(ExecuteActionsTaskExecuter.java:327)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:312)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.access$200(ExecuteActionsTaskExecuter.java:75)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$TaskExecution.execute(ExecuteActionsTaskExecuter.java:158)
	at org.gradle.internal.execution.impl.steps.ExecuteStep.execute(ExecuteStep.java:46)
	at org.gradle.internal.execution.impl.steps.CancelExecutionStep.execute(CancelExecutionStep.java:34)
	at org.gradle.internal.execution.impl.steps.TimeoutStep.executeWithoutTimeout(TimeoutStep.java:69)
	at org.gradle.internal.execution.impl.steps.TimeoutStep.execute(TimeoutStep.java:49)
	at org.gradle.internal.execution.impl.steps.CatchExceptionStep.execute(CatchExceptionStep.java:34)
	at org.gradle.internal.execution.impl.steps.CreateOutputsStep.execute(CreateOutputsStep.java:49)
	at org.gradle.internal.execution.impl.steps.SnapshotOutputStep.execute(SnapshotOutputStep.java:42)
	at org.gradle.internal.execution.impl.steps.SnapshotOutputStep.execute(SnapshotOutputStep.java:28)
	at org.gradle.internal.execution.impl.steps.CacheStep.executeWithoutCache(CacheStep.java:133)
	at org.gradle.internal.execution.impl.steps.CacheStep.lambda$execute$5(CacheStep.java:83)
	at java.util.Optional.orElseGet(Optional.java:267)
	at org.gradle.internal.execution.impl.steps.CacheStep.execute(CacheStep.java:82)
	at org.gradle.internal.execution.impl.steps.CacheStep.execute(CacheStep.java:37)
	at org.gradle.internal.execution.impl.steps.PrepareCachingStep.execute(PrepareCachingStep.java:33)
	at org.gradle.internal.execution.impl.steps.StoreSnapshotsStep.execute(StoreSnapshotsStep.java:38)
	at org.gradle.internal.execution.impl.steps.StoreSnapshotsStep.execute(StoreSnapshotsStep.java:23)
	at org.gradle.internal.execution.impl.steps.SkipUpToDateStep.executeBecause(SkipUpToDateStep.java:95)
	at org.gradle.internal.execution.impl.steps.SkipUpToDateStep.lambda$execute$1(SkipUpToDateStep.java:90)
	at java.util.Optional.orElseGet(Optional.java:267)
	at org.gradle.internal.execution.impl.steps.SkipUpToDateStep.execute(SkipUpToDateStep.java:90)
	at org.gradle.internal.execution.impl.steps.SkipUpToDateStep.execute(SkipUpToDateStep.java:36)
	at org.gradle.internal.execution.impl.DefaultWorkExecutor.execute(DefaultWorkExecutor.java:34)
	at org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.execute(ExecuteActionsTaskExecuter.java:109)
	at org.gradle.api.internal.tasks.execution.ResolveIncrementalChangesTaskExecuter.execute(ResolveIncrementalChangesTaskExecuter.java:84)
	at org.gradle.api.internal.tasks.execution.ResolveTaskOutputCachingStateExecuter.execute(ResolveTaskOutputCachingStateExecuter.java:91)
	at org.gradle.api.internal.tasks.execution.FinishSnapshotTaskInputsBuildOperationTaskExecuter.execute(FinishSnapshotTaskInputsBuildOperationTaskExecuter.java:51)
	at org.gradle.api.internal.tasks.execution.ResolveBuildCacheKeyExecuter.execute(ResolveBuildCacheKeyExecuter.java:102)
	at org.gradle.api.internal.tasks.execution.ResolveBeforeExecutionStateTaskExecuter.execute(ResolveBeforeExecutionStateTaskExecuter.java:74)
	at org.gradle.api.internal.tasks.execution.ValidatingTaskExecuter.execute(ValidatingTaskExecuter.java:58)
	at org.gradle.api.internal.tasks.execution.SkipEmptySourceFilesTaskExecuter.execute(SkipEmptySourceFilesTaskExecuter.java:109)
	at org.gradle.api.internal.tasks.execution.ResolveBeforeExecutionOutputsTaskExecuter.execute(ResolveBeforeExecutionOutputsTaskExecuter.java:67)
	at org.gradle.api.internal.tasks.execution.StartSnapshotTaskInputsBuildOperationTaskExecuter.execute(StartSnapshotTaskInputsBuildOperationTaskExecuter.java:52)
	at org.gradle.api.internal.tasks.execution.ResolveAfterPreviousExecutionStateTaskExecuter.execute(ResolveAfterPreviousExecutionStateTaskExecuter.java:46)
	at org.gradle.api.internal.tasks.execution.CleanupStaleOutputsExecuter.execute(CleanupStaleOutputsExecuter.java:93)
	at org.gradle.api.internal.tasks.execution.FinalizePropertiesTaskExecuter.execute(FinalizePropertiesTaskExecuter.java:45)
	at org.gradle.api.internal.tasks.execution.ResolveTaskExecutionModeExecuter.execute(ResolveTaskExecutionModeExecuter.java:94)
	at org.gradle.api.internal.tasks.execution.SkipTaskWithNoActionsExecuter.execute(SkipTaskWithNoActionsExecuter.java:57)
	at org.gradle.api.internal.tasks.execution.SkipOnlyIfTaskExecuter.execute(SkipOnlyIfTaskExecuter.java:56)
	at org.gradle.api.internal.tasks.execution.CatchExceptionTaskExecuter.execute(CatchExceptionTaskExecuter.java:36)
	at org.gradle.api.internal.tasks.execution.EventFiringTaskExecuter$1.executeTask(EventFiringTaskExecuter.java:63)
	at org.gradle.api.internal.tasks.execution.EventFiringTaskExecuter$1.call(EventFiringTaskExecuter.java:49)
	at org.gradle.api.internal.tasks.execution.EventFiringTaskExecuter$1.call(EventFiringTaskExecuter.java:46)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$CallableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:416)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$CallableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:406)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor$1.execute(DefaultBuildOperationExecutor.java:165)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:250)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:158)
	at org.gradle.internal.operations.DefaultBuildOperationExecutor.call(DefaultBuildOperationExecutor.java:102)
	at org.gradle.internal.operations.DelegatingBuildOperationExecutor.call(DelegatingBuildOperationExecutor.java:36)
	at org.gradle.api.internal.tasks.execution.EventFiringTaskExecuter.execute(EventFiringTaskExecuter.java:46)
	at org.gradle.execution.plan.LocalTaskNodeExecutor.execute(LocalTaskNodeExecutor.java:43)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionGraph$InvokeNodeExecutorsAction.execute(DefaultTaskExecutionGraph.java:355)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionGraph$InvokeNodeExecutorsAction.execute(DefaultTaskExecutionGraph.java:343)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionGraph$BuildOperationAwareExecutionAction.execute(DefaultTaskExecutionGraph.java:336)
	at org.gradle.execution.taskgraph.DefaultTaskExecutionGraph$BuildOperationAwareExecutionAction.execute(DefaultTaskExecutionGraph.java:322)
	at org.gradle.execution.plan.DefaultPlanExecutor$ExecutorWorker$1.execute(DefaultPlanExecutor.java:134)
	at org.gradle.execution.plan.DefaultPlanExecutor$ExecutorWorker$1.execute(DefaultPlanExecutor.java:129)
	at org.gradle.execution.plan.DefaultPlanExecutor$ExecutorWorker.execute(DefaultPlanExecutor.java:202)
	at org.gradle.execution.plan.DefaultPlanExecutor$ExecutorWorker.executeNextNode(DefaultPlanExecutor.java:193)
	at org.gradle.execution.plan.DefaultPlanExecutor$ExecutorWorker.run(DefaultPlanExecutor.java:129)
	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:63)
	at org.gradle.internal.concurrent.ManagedExecutorImpl$1.run(ManagedExecutorImpl.java:46)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at org.gradle.internal.concurrent.ThreadFactoryImpl$ManagedThreadRunnable.run(ThreadFactoryImpl.java:55)
	at java.lang.Thread.run(Thread.java:748)

> Task :runners:google-cloud-dataflow-java:worker:shadowJar

> Task :sdks:python:test-suites:direct:py36:installGcpTest
Successfully installed apache-beam-2.20.0.dev0 apipkg-1.5 atomicwrites-1.3.0 attrs-19.3.0 avro-python3-1.9.1 cachetools-3.1.1 certifi-2019.11.28 chardet-3.0.4 crcmod-1.7 dill-0.3.1.1 docopt-0.6.2 execnet-1.7.1 fastavro-0.21.24 fasteners-0.15 freezegun-0.3.14 google-api-core-1.16.0 google-apitools-0.5.28 google-auth-1.11.0 google-cloud-bigquery-1.17.1 google-cloud-bigtable-1.0.0 google-cloud-core-1.2.0 google-cloud-datastore-1.7.4 google-cloud-pubsub-1.0.2 google-cloud-spanner-1.13.0 google-resumable-media-0.4.1 googleapis-common-protos-1.51.0 grpc-google-iam-v1-0.12.3 grpcio-gcp-0.2.2 hdfs-2.5.8 httplib2-0.12.0 idna-2.8 mock-2.0.0 monotonic-1.5 more-itertools-8.2.0 nose-1.3.7 nose-xunitmp-0.4.1 numpy-1.18.1 oauth2client-3.0.0 packaging-20.1 pandas-0.24.2 parameterized-0.7.1 pbr-5.4.4 pyarrow-0.15.1 pyasn1-0.4.8 pyasn1-modules-0.2.8 pydot-1.4.1 pyhamcrest-1.10.1 pymongo-3.10.1 pyparsing-2.4.6 pytest-4.6.9 pytest-forked-1.1.3 pytest-xdist-1.31.0 python-dateutil-2.8.1 pytz-2019.3 pyyaml-5.3 requests-2.22.0 requests-mock-1.7.0 rsa-4.0 tenacity-5.1.5 typing-3.7.4.1 typing-extensions-3.7.4.1 urllib3-1.25.8 wcwidth-0.1.8

> Task :sdks:python:test-suites:direct:py36:postCommitIT
>>> RUNNING integration tests with pipeline options: --runner=TestDirectRunner --project=apache-beam-testing --staging_location=gs://temp-storage-for-end-to-end-tests/staging-it --temp_location=gs://temp-storage-for-end-to-end-tests/temp-it --output=gs://temp-storage-for-end-to-end-tests/py-it-cloud/output --sdk_location=build/apache-beam.tar.gz --requirements_file=postcommit_requirements.txt --num_workers=1 --sleep_secs=20 --kms_key_name=projects/apache-beam-testing/locations/global/keyRings/beam-it/cryptoKeys/test --dataflow_kms_key=projects/apache-beam-testing/locations/global/keyRings/beam-it/cryptoKeys/test
>>>   test options: --tests=apache_beam.examples.wordcount_it_test:WordCountIT.test_wordcount_it,apache_beam.io.gcp.pubsub_integration_test:PubSubIntegrationTest,apache_beam.io.gcp.big_query_query_to_table_it_test:BigQueryQueryToTableIT,apache_beam.io.gcp.bigquery_io_read_it_test,apache_beam.io.gcp.bigquery_read_it_test,apache_beam.io.gcp.bigquery_write_it_test,apache_beam.io.gcp.datastore.v1new.datastore_write_it_test --nocapture --processes=8 --process-timeout=4500
running nosetests
running egg_info
Skipping proto regeneration: all files up to date
writing apache_beam.egg-info/PKG-INFO
writing dependency_links to apache_beam.egg-info/dependency_links.txt
writing entry points to apache_beam.egg-info/entry_points.txt
writing requirements to apache_beam.egg-info/requires.txt
writing top-level names to apache_beam.egg-info/top_level.txt
reading manifest file 'apache_beam.egg-info/SOURCES.txt'
reading manifest template 'MANIFEST.in'
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/1398941890/lib/python3.6/site-packages/setuptools/dist.py>:476: UserWarning: Normalizing '2.20.0.dev' to '2.20.0.dev0'
  normalized_version,
warning: no files found matching 'README.md'
warning: no files found matching 'NOTICE'
warning: no files found matching 'LICENSE'
writing manifest file 'apache_beam.egg-info/SOURCES.txt'
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1418: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  self.table_reference.projectId = pcoll.pipeline.options.view_as(
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_io_read_pipeline.py>:75: FutureWarning: _ReadFromBigQuery is experimental.
  known_args.input_table))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1605: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  temp_location = pcoll.pipeline.options.view_as(
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_read_it_test.py>:259: FutureWarning: _ReadFromBigQuery is experimental.
  query=self.query, use_standard_sql=True, project=self.project))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1605: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  temp_location = pcoll.pipeline.options.view_as(
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_read_it_test.py>:155: FutureWarning: _ReadFromBigQuery is experimental.
  query=self.query, use_standard_sql=True, project=self.project))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1605: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  temp_location = pcoll.pipeline.options.view_as(
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1418: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  self.table_reference.projectId = pcoll.pipeline.options.view_as(
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_file_loads.py>:775: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  temp_location = p.options.view_as(GoogleCloudOptions).temp_location
test_datastore_write_limit (apache_beam.io.gcp.datastore.v1new.datastore_write_it_test.DatastoreWriteIT) ... ok
test_streaming_data_only (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_streaming_with_attributes (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_bigquery_read_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_bigquery_read_custom_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_big_query_legacy_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql_kms_key_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... SKIP: This test doesn't work on DirectRunner.
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_big_query_write (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_new_types (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_schema_autodetect (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_without_schema (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok

----------------------------------------------------------------------
XML: nosetests-postCommitIT-direct-py36.xml
----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 18 tests in 24.139s

OK (SKIP=1)

FAILURE: Build completed with 2 failures.

1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':sdks:python:test-suites:portable:py36:installGcpTest'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py36:installGcpTest'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 2m 4s
79 actionable tasks: 62 executed, 17 from cache

Publishing build scan...
https://gradle.com/s/ababhcvfgglf4

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Jenkins build is back to normal : beam_PostCommit_Python36 #1597

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python36/1597/display/redirect?page=changes>


---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_Python36 #1596

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python36/1596/display/redirect?page=changes>

Changes:

[github] [BEAM-9188] CassandraIO split performance improvement - cache size of

[radoslaws] spotless fixes

[radoslaws] comments and tests

[radoslaws] spottless


------------------------------------------
[...truncated 2.59 MB...]
	debug_error_string = "{"created":"@1580511219.943630541","description":"Error received from peer ipv4:127.0.0.1:37859","file":"src/core/lib/surface/call.cc","file_line":1056,"grpc_message":"Socket closed","grpc_status":14}"
>
Exception in thread read_state:
Traceback (most recent call last):
  File "/usr/lib/python3.6/threading.py", line 916, in _bootstrap_inner
    self.run()
  File "/usr/lib/python3.6/threading.py", line 864, in run
    self._target(*self._args, **self._kwargs)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/2022703440/lib/python3.6/site-packages/apache_beam/runners/worker/sdk_worker.py",> line 665, in pull_responses
    for response in responses:
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/2022703440/lib/python3.6/site-packages/grpc/_channel.py",> line 416, in __next__
    return self._next()
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/2022703440/lib/python3.6/site-packages/grpc/_channel.py",> line 703, in _next
    raise self
grpc._channel._MultiThreadedRendezvous: <_MultiThreadedRendezvous of RPC that terminated with:
	status = StatusCode.UNAVAILABLE
	details = "Socket closed"
	debug_error_string = "{"created":"@1580511219.943672902","description":"Error received from peer ipv4:127.0.0.1:42513","file":"src/core/lib/surface/call.cc","file_line":1056,"grpc_message":"Socket closed","grpc_status":14}"
>

Exception in thread read_grpc_client_inputs:
Traceback (most recent call last):
  File "/usr/lib/python3.6/threading.py", line 916, in _bootstrap_inner
    self.run()
  File "/usr/lib/python3.6/threading.py", line 864, in run
    self._target(*self._args, **self._kwargs)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/2022703440/lib/python3.6/site-packages/apache_beam/runners/worker/data_plane.py",> line 438, in <lambda>
    target=lambda: self._read_inputs(elements_iterator),
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/2022703440/lib/python3.6/site-packages/apache_beam/runners/worker/data_plane.py",> line 423, in _read_inputs
    for elements in elements_iterator:
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/2022703440/lib/python3.6/site-packages/grpc/_channel.py",> line 416, in __next__
    return self._next()
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/2022703440/lib/python3.6/site-packages/grpc/_channel.py",> line 703, in _next
    raise self
grpc._channel._MultiThreadedRendezvous: <_MultiThreadedRendezvous of RPC that terminated with:
	status = StatusCode.UNAVAILABLE
	details = "Socket closed"
	debug_error_string = "{"created":"@1580511219.943630541","description":"Error received from peer ipv4:127.0.0.1:37859","file":"src/core/lib/surface/call.cc","file_line":1056,"grpc_message":"Socket closed","grpc_status":14}"
>

Exception in thread run_worker_1-1:
Traceback (most recent call last):
  File "/usr/lib/python3.6/threading.py", line 916, in _bootstrap_inner
    self.run()
  File "/usr/lib/python3.6/threading.py", line 864, in run
    self._target(*self._args, **self._kwargs)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/2022703440/lib/python3.6/site-packages/apache_beam/runners/worker/sdk_worker.py",> line 151, in run
    for work_request in control_stub.Control(get_responses()):
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/2022703440/lib/python3.6/site-packages/grpc/_channel.py",> line 416, in __next__
    return self._next()
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/2022703440/lib/python3.6/site-packages/grpc/_channel.py",> line 703, in _next
    raise self
grpc._channel._MultiThreadedRendezvous: <_MultiThreadedRendezvous of RPC that terminated with:
	status = StatusCode.UNAVAILABLE
	details = "Socket closed"
	debug_error_string = "{"created":"@1580511219.943681868","description":"Error received from peer ipv4:127.0.0.1:44913","file":"src/core/lib/surface/call.cc","file_line":1056,"grpc_message":"Socket closed","grpc_status":14}"
>


> Task :sdks:python:test-suites:dataflow:py36:postCommitIT
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1421: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_14_40_34-4041376051567645567?project=apache-beam-testing
  experiments = p.options.view_as(DebugOptions).experiments or []
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_14_55_14-16403914419638337942?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_04_11-13027859748281195714?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_read_it_test.py>:259: FutureWarning: _ReadFromBigQuery is experimental.
  query=self.query, use_standard_sql=True, project=self.project))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_13_00-712903634269103997?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1605: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_23_22-10111712976977560025?project=apache-beam-testing
  temp_location = pcoll.pipeline.options.view_as(
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_14_40_28-3965891874733284067?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_00_41-6785792606038710904?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_10_32-622148691089098718?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:753: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_19_34-6872776841888002704?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1421: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  experiments = p.options.view_as(DebugOptions).experiments or []
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_file_loads.py>:775: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_28_38-5408550302184341618?project=apache-beam-testing
  temp_location = p.options.view_as(GoogleCloudOptions).temp_location
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:753: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:753: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:753: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_14_40_32-11374411836849621815?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_14_53_12-17618692538730589212?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_03_18-40987893774811289?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1421: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  experiments = p.options.view_as(DebugOptions).experiments or []
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_12_18-1943347938996221091?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_io_read_pipeline.py>:75: FutureWarning: _ReadFromBigQuery is experimental.
  known_args.input_table))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_22_26-16803475964368911393?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1605: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  temp_location = pcoll.pipeline.options.view_as(
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_io_read_pipeline.py>:75: FutureWarning: _ReadFromBigQuery is experimental.
  known_args.input_table))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1605: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  temp_location = pcoll.pipeline.options.view_as(
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/fileio_test.py>:298: FutureWarning: MatchAll is experimental.
  | 'GetPath' >> beam.Map(lambda metadata: metadata.path))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/fileio_test.py>:309: FutureWarning: MatchAll is experimental.
  | 'Checksums' >> beam.Map(compute_hash))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/fileio_test.py>:309: FutureWarning: ReadMatches is experimental.
  | 'Checksums' >> beam.Map(compute_hash))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_14_40_29-6921657993676211418?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_00_21-15459877422922131102?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_test.py>:757: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_09_42-1298142001390580595?project=apache-beam-testing
  streaming = self.test_pipeline.options.view_as(StandardOptions).streaming
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1421: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  experiments = p.options.view_as(DebugOptions).experiments or []
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1418: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  self.table_reference.projectId = pcoll.pipeline.options.view_as(
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_14_40_29-8770906620536013041?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_14_50_55-2970097051178683196?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_01_24-2338711281762644371?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_11_55-5553815859948121292?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_20_22-12490452522041321423?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1421: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  experiments = p.options.view_as(DebugOptions).experiments or []
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_file_loads.py>:775: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  temp_location = p.options.view_as(GoogleCloudOptions).temp_location
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1421: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  experiments = p.options.view_as(DebugOptions).experiments or []
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_file_loads.py>:775: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  temp_location = p.options.view_as(GoogleCloudOptions).temp_location
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_14_40_30-1708654460544128066?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_14_49_13-9650262937437455494?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:753: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_14_59_11-9614145165734702917?project=apache-beam-testing
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_08_57-15406142729114134370?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_read_it_test.py>:155: FutureWarning: _ReadFromBigQuery is experimental.
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_17_29-2878491320670026879?project=apache-beam-testing
  query=self.query, use_standard_sql=True, project=self.project))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_26_03-10536464842001758695?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1605: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  temp_location = pcoll.pipeline.options.view_as(
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_14_40_32-11027255897147559125?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_14_49_38-6420692661453850598?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:753: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_14_58_17-16892088135352593593?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:753: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_06_59-11442370727614423867?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_15_31-9913183748215538074?project=apache-beam-testing
  kms_key=transform.kms_key))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:753: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/big_query_query_to_table_pipeline.py>:75: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_14_40_32-2986009999026272234?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_14_49_38-11020853405383150052?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_14_57_53-7035641610049479527?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_06_54-4623914618065446449?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_16_23-7041175097289367570?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_15_25_22-6276867686689136990?project=apache-beam-testing
test_datastore_wordcount_it (apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT) ... SKIP: This test still needs to be fixed on Python 3TODO: BEAM-4543
test_avro_it (apache_beam.examples.fastavro_it_test.FastavroIT) ... SKIP: Due to a known issue in avro-python3 package, thistest is skipped until BEAM-6522 is addressed. 
test_bigquery_tornadoes_it (apache_beam.examples.cookbook.bigquery_tornadoes_it_test.BigqueryTornadoesIT) ... ok
test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_streaming_wordcount_it (apache_beam.examples.streaming_wordcount_it_test.StreamingWordCountIT) ... ok
test_autocomplete_it (apache_beam.examples.complete.autocomplete_test.AutocompleteTest) ... ok
test_leader_board_it (apache_beam.examples.complete.game.leader_board_it_test.LeaderBoardIT) ... ok
test_game_stats_it (apache_beam.examples.complete.game.game_stats_it_test.GameStatsIT) ... ok
test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_user_score_it (apache_beam.examples.complete.game.user_score_it_test.UserScoreIT) ... ok
test_hourly_team_score_it (apache_beam.examples.complete.game.hourly_team_score_it_test.HourlyTeamScoreIT) ... ok
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_value_provider_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_bqfl_streaming (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... SKIP: TestStream is not supported on TestDataflowRunner
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_one_job_fails_all_jobs_fail (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_datastore_write_limit (apache_beam.io.gcp.datastore_write_it_test.DatastoreWriteIT) ... SKIP: This test still needs to be fixed on Python 3TODO: BEAM-4543
test_copy (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_bigquery_read_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_bigquery_read_custom_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_transform_on_gcs (apache_beam.io.fileio_test.MatchIntegrationTest) ... ok
test_parquetio_it (apache_beam.io.parquetio_it_test.TestParquetIT) ... ok
test_big_query_legacy_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql_kms_key_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_basic_execution (apache_beam.testing.test_stream_it_test.TestStreamIntegrationTests) ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream supports emitting to multiple PCollections. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream can independently control output watermarks. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
test_file_loads (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... SKIP: https://issuetracker.google.com/issues/118375066
test_streaming_inserts (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_streaming_data_only (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_streaming_with_attributes (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
Runs streaming Dataflow job and verifies that user metrics are reported ... ok
test_job_python_from_python_it (apache_beam.transforms.external_test_it.ExternalTransformIT) ... ok
test_datastore_write_limit (apache_beam.io.gcp.datastore.v1new.datastore_write_it_test.DatastoreWriteIT) ... ok
test_metrics_fnapi_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_metrics_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_big_query_write (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_new_types (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_schema_autodetect (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... SKIP: DataflowRunner does not support schema autodetection
test_big_query_write_without_schema (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py36.xml
----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 51 tests in 3410.582s

OK (SKIP=9)

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/test-suites/portable/py36/build.gradle'> line: 62

* What went wrong:
Execution failed for task ':sdks:python:test-suites:portable:py36:postCommitPy36IT'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 58m 9s
84 actionable tasks: 63 executed, 21 from cache

Publishing build scan...
https://gradle.com/s/5lbncyvppy6ws

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Build failed in Jenkins: beam_PostCommit_Python36 #1595

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python36/1595/display/redirect?page=changes>

Changes:

[github] [BEAM-9233] Support -buildmode=pie -ldflags=-w with unregistered Go


------------------------------------------
[...truncated 2.57 MB...]
	debug_error_string = "{"created":"@1580504722.076665933","description":"Error received from peer ipv4:127.0.0.1:39683","file":"src/core/lib/surface/call.cc","file_line":1056,"grpc_message":"Socket closed","grpc_status":14}"
>
Exception in thread run_worker_1-1:
Traceback (most recent call last):
  File "/usr/lib/python3.6/threading.py", line 916, in _bootstrap_inner
    self.run()
  File "/usr/lib/python3.6/threading.py", line 864, in run
    self._target(*self._args, **self._kwargs)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/2022703440/lib/python3.6/site-packages/apache_beam/runners/worker/sdk_worker.py",> line 151, in run
    for work_request in control_stub.Control(get_responses()):
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/2022703440/lib/python3.6/site-packages/grpc/_channel.py",> line 416, in __next__
    return self._next()
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/2022703440/lib/python3.6/site-packages/grpc/_channel.py",> line 703, in _next
    raise self
grpc._channel._MultiThreadedRendezvous: <_MultiThreadedRendezvous of RPC that terminated with:
	status = StatusCode.UNAVAILABLE
	details = "Socket closed"
	debug_error_string = "{"created":"@1580504722.076702723","description":"Error received from peer ipv4:127.0.0.1:43515","file":"src/core/lib/surface/call.cc","file_line":1056,"grpc_message":"Socket closed","grpc_status":14}"
>

Exception in thread read_grpc_client_inputs:
Traceback (most recent call last):
  File "/usr/lib/python3.6/threading.py", line 916, in _bootstrap_inner
    self.run()
  File "/usr/lib/python3.6/threading.py", line 864, in run
    self._target(*self._args, **self._kwargs)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/2022703440/lib/python3.6/site-packages/apache_beam/runners/worker/data_plane.py",> line 438, in <lambda>
    target=lambda: self._read_inputs(elements_iterator),
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/2022703440/lib/python3.6/site-packages/apache_beam/runners/worker/data_plane.py",> line 423, in _read_inputs
    for elements in elements_iterator:
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/2022703440/lib/python3.6/site-packages/grpc/_channel.py",> line 416, in __next__
    return self._next()
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/2022703440/lib/python3.6/site-packages/grpc/_channel.py",> line 703, in _next
    raise self
grpc._channel._MultiThreadedRendezvous: <_MultiThreadedRendezvous of RPC that terminated with:
	status = StatusCode.UNAVAILABLE
	details = "Socket closed"
	debug_error_string = "{"created":"@1580504722.076665933","description":"Error received from peer ipv4:127.0.0.1:39683","file":"src/core/lib/surface/call.cc","file_line":1056,"grpc_message":"Socket closed","grpc_status":14}"
>

Exception in thread read_state:
Traceback (most recent call last):
  File "/usr/lib/python3.6/threading.py", line 916, in _bootstrap_inner
    self.run()
  File "/usr/lib/python3.6/threading.py", line 864, in run
    self._target(*self._args, **self._kwargs)
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/2022703440/lib/python3.6/site-packages/apache_beam/runners/worker/sdk_worker.py",> line 665, in pull_responses
    for response in responses:
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/2022703440/lib/python3.6/site-packages/grpc/_channel.py",> line 416, in __next__
    return self._next()
  File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/build/gradleenv/2022703440/lib/python3.6/site-packages/grpc/_channel.py",> line 703, in _next
    raise self
grpc._channel._MultiThreadedRendezvous: <_MultiThreadedRendezvous of RPC that terminated with:
	status = StatusCode.UNAVAILABLE
	details = "Socket closed"
	debug_error_string = "{"created":"@1580504722.076682544","description":"Error received from peer ipv4:127.0.0.1:42683","file":"src/core/lib/surface/call.cc","file_line":1056,"grpc_message":"Socket closed","grpc_status":14}"
>


> Task :sdks:python:test-suites:dataflow:py36:postCommitIT
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1421: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_31_34-16526579437585277589?project=apache-beam-testing
  experiments = p.options.view_as(DebugOptions).experiments or []
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_48_36-1601845007538502258?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_read_it_test.py>:259: FutureWarning: _ReadFromBigQuery is experimental.
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_57_14-16975754751143737871?project=apache-beam-testing
  query=self.query, use_standard_sql=True, project=self.project))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_13_06_00-12786540509591050549?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1605: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  temp_location = pcoll.pipeline.options.view_as(
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_13_15_56-8030270306789147805?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_31_31-3548887297922096491?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_55_45-6005166988553553422?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_13_05_19-4191309107983984839?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:753: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_13_14_45-15217876462286360686?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1421: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  experiments = p.options.view_as(DebugOptions).experiments or []
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_file_loads.py>:775: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  temp_location = p.options.view_as(GoogleCloudOptions).temp_location
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/fileio_test.py>:298: FutureWarning: MatchAll is experimental.
  | 'GetPath' >> beam.Map(lambda metadata: metadata.path))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/fileio_test.py>:309: FutureWarning: MatchAll is experimental.
  | 'Checksums' >> beam.Map(compute_hash))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/fileio_test.py>:309: FutureWarning: ReadMatches is experimental.
  | 'Checksums' >> beam.Map(compute_hash))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_31_31-5163673175888955342?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_test.py>:757: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  streaming = self.test_pipeline.options.view_as(StandardOptions).streaming
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_52_52-9454831054011065768?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1421: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  experiments = p.options.view_as(DebugOptions).experiments or []
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:753: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_13_02_01-3547011333695676756?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:753: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_13_11_06-4629323106858309733?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:753: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_13_20_23-11909624489201736388?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_31_33-6096285111949417855?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_44_08-15465807576725356307?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_52_31-13434280050284201531?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_13_01_20-7487129735966880685?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1421: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  experiments = p.options.view_as(DebugOptions).experiments or []
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_io_read_pipeline.py>:75: FutureWarning: _ReadFromBigQuery is experimental.
  known_args.input_table))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1605: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  temp_location = pcoll.pipeline.options.view_as(
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_io_read_pipeline.py>:75: FutureWarning: _ReadFromBigQuery is experimental.
  known_args.input_table))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1605: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  temp_location = pcoll.pipeline.options.view_as(
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1418: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  self.table_reference.projectId = pcoll.pipeline.options.view_as(
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_31_33-7869984972063454344?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_42_59-3998448807282048152?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_53_25-7649048158264042986?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_13_03_49-4104726770454717973?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1421: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  experiments = p.options.view_as(DebugOptions).experiments or []
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_13_11_42-9796623575123043641?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_file_loads.py>:775: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  temp_location = p.options.view_as(GoogleCloudOptions).temp_location
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1421: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  experiments = p.options.view_as(DebugOptions).experiments or []
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_file_loads.py>:775: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
  temp_location = p.options.view_as(GoogleCloudOptions).temp_location
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_31_31-3921708734495228099?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:753: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery_read_it_test.py>:155: FutureWarning: _ReadFromBigQuery is experimental.
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_41_10-2990593770957319347?project=apache-beam-testing
  query=self.query, use_standard_sql=True, project=self.project))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_50_41-4360373067842289267?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:1605: BeamDeprecationWarning: options is deprecated since First stable release. References to <pipeline>.options will not be supported
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_13_00_15-9784073639116698454?project=apache-beam-testing
  temp_location = pcoll.pipeline.options.view_as(
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_13_09_01-754880787081769849?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_13_18_00-4146659407465453815?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_31_38-11441020143086559329?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_41_10-4471096683192603423?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_49_47-16860210309948158119?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_58_11-8448283292470433215?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_13_07_26-7123666682279205600?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_13_16_40-7954547840240281578?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_31_31-15568095939338273446?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:753: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:753: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_41_17-739397033557444661?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:753: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=transform.kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_49_44-4317775510785065876?project=apache-beam-testing
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/io/gcp/big_query_query_to_table_pipeline.py>:75: BeamDeprecationWarning: BigQuerySink is deprecated since 2.11.0. Use WriteToBigQuery instead.
  kms_key=kms_key))
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_12_58_39-10240185811726959301?project=apache-beam-testing
Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-31_13_08_14-3552458130880915676?project=apache-beam-testing
test_datastore_wordcount_it (apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT) ... SKIP: This test still needs to be fixed on Python 3TODO: BEAM-4543
test_avro_it (apache_beam.examples.fastavro_it_test.FastavroIT) ... SKIP: Due to a known issue in avro-python3 package, thistest is skipped until BEAM-6522 is addressed. 
test_bigquery_tornadoes_it (apache_beam.examples.cookbook.bigquery_tornadoes_it_test.BigqueryTornadoesIT) ... ok
test_streaming_wordcount_it (apache_beam.examples.streaming_wordcount_it_test.StreamingWordCountIT) ... ok
test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_autocomplete_it (apache_beam.examples.complete.autocomplete_test.AutocompleteTest) ... ok
test_leader_board_it (apache_beam.examples.complete.game.leader_board_it_test.LeaderBoardIT) ... ok
test_game_stats_it (apache_beam.examples.complete.game.game_stats_it_test.GameStatsIT) ... ok
test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_user_score_it (apache_beam.examples.complete.game.user_score_it_test.UserScoreIT) ... ok
test_hourly_team_score_it (apache_beam.examples.complete.game.hourly_team_score_it_test.HourlyTeamScoreIT) ... ok
test_bigquery_read_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_bigquery_read_custom_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT) ... ok
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_bqfl_streaming (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... SKIP: TestStream is not supported on TestDataflowRunner
test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_one_job_fails_all_jobs_fail (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) ... ok
test_datastore_write_limit (apache_beam.io.gcp.datastore_write_it_test.DatastoreWriteIT) ... SKIP: This test still needs to be fixed on Python 3TODO: BEAM-4543
test_copy (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_batch_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_kms (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_copy_rewrite_token (apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest) ... ok
test_value_provider_transform (apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) ... ok
test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests) ... ok
test_transform_on_gcs (apache_beam.io.fileio_test.MatchIntegrationTest) ... ok
test_parquetio_it (apache_beam.io.parquetio_it_test.TestParquetIT) ... ok
test_big_query_legacy_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_new_types (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_big_query_standard_sql_kms_key_native (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT) ... ok
test_basic_execution (apache_beam.testing.test_stream_it_test.TestStreamIntegrationTests) ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream supports emitting to multiple PCollections. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
Tests that the TestStream can independently control output watermarks. ... SKIP: The "TestDataflowRunner", does not support the TestStream transform. Supported runners: ['DirectRunner', 'SwitchingDirectRunner']
test_file_loads (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... SKIP: https://issuetracker.google.com/issues/118375066
test_streaming_inserts (apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT) ... ok
test_streaming_data_only (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
test_streaming_with_attributes (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest) ... ok
Runs streaming Dataflow job and verifies that user metrics are reported ... ok
test_job_python_from_python_it (apache_beam.transforms.external_test_it.ExternalTransformIT) ... ok
test_datastore_write_limit (apache_beam.io.gcp.datastore.v1new.datastore_write_it_test.DatastoreWriteIT) ... ok
test_metrics_fnapi_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_metrics_it (apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest) ... ok
test_big_query_write (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_new_types (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok
test_big_query_write_schema_autodetect (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... SKIP: DataflowRunner does not support schema autodetection
test_big_query_write_without_schema (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests) ... ok

----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py36.xml
----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 51 tests in 3477.959s

OK (SKIP=9)

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/test-suites/portable/py36/build.gradle'> line: 62

* What went wrong:
Execution failed for task ':sdks:python:test-suites:portable:py36:postCommitPy36IT'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 59m 37s
84 actionable tasks: 63 executed, 21 from cache

Publishing build scan...
https://gradle.com/s/yt4i7mrvutcuw

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org