You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2018/01/05 20:25:52 UTC

Build failed in Jenkins: beam_PostCommit_Python_Verify #3885

See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/3885/display/redirect?page=changes>

Changes:

[altay] Remove Queue based 60 seconds timeout for GCS io.

------------------------------------------
[...truncated 1.05 MB...]
copying apache_beam/portability/api/endpoints_pb2.py -> apache-beam-2.3.0.dev0/apache_beam/portability/api
copying apache_beam/portability/api/endpoints_pb2_grpc.py -> apache-beam-2.3.0.dev0/apache_beam/portability/api
copying apache_beam/portability/api/standard_window_fns_pb2.py -> apache-beam-2.3.0.dev0/apache_beam/portability/api
copying apache_beam/portability/api/standard_window_fns_pb2_grpc.py -> apache-beam-2.3.0.dev0/apache_beam/portability/api
copying apache_beam/runners/__init__.py -> apache-beam-2.3.0.dev0/apache_beam/runners
copying apache_beam/runners/common.pxd -> apache-beam-2.3.0.dev0/apache_beam/runners
copying apache_beam/runners/common.py -> apache-beam-2.3.0.dev0/apache_beam/runners
copying apache_beam/runners/common_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners
copying apache_beam/runners/pipeline_context.py -> apache-beam-2.3.0.dev0/apache_beam/runners
copying apache_beam/runners/pipeline_context_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners
copying apache_beam/runners/runner.py -> apache-beam-2.3.0.dev0/apache_beam/runners
copying apache_beam/runners/runner_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners
copying apache_beam/runners/sdf_common.py -> apache-beam-2.3.0.dev0/apache_beam/runners
copying apache_beam/runners/dataflow/__init__.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow
copying apache_beam/runners/dataflow/dataflow_metrics.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow
copying apache_beam/runners/dataflow/dataflow_metrics_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow
copying apache_beam/runners/dataflow/dataflow_runner.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow
copying apache_beam/runners/dataflow/dataflow_runner_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow
copying apache_beam/runners/dataflow/ptransform_overrides.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow
copying apache_beam/runners/dataflow/template_runner_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow
copying apache_beam/runners/dataflow/test_dataflow_runner.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow
copying apache_beam/runners/dataflow/internal/__init__.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow/internal
copying apache_beam/runners/dataflow/internal/apiclient.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow/internal
copying apache_beam/runners/dataflow/internal/apiclient_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow/internal
copying apache_beam/runners/dataflow/internal/dependency.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow/internal
copying apache_beam/runners/dataflow/internal/dependency_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow/internal
copying apache_beam/runners/dataflow/internal/names.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow/internal
copying apache_beam/runners/dataflow/internal/clients/__init__.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow/internal/clients
copying apache_beam/runners/dataflow/internal/clients/dataflow/__init__.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow/internal/clients/dataflow
copying apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow/internal/clients/dataflow
copying apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow/internal/clients/dataflow
copying apache_beam/runners/dataflow/internal/clients/dataflow/message_matchers.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow/internal/clients/dataflow
copying apache_beam/runners/dataflow/internal/clients/dataflow/message_matchers_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow/internal/clients/dataflow
copying apache_beam/runners/dataflow/native_io/__init__.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow/native_io
copying apache_beam/runners/dataflow/native_io/iobase.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow/native_io
copying apache_beam/runners/dataflow/native_io/iobase_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow/native_io
copying apache_beam/runners/dataflow/native_io/streaming_create.py -> apache-beam-2.3.0.dev0/apache_beam/runners/dataflow/native_io
copying apache_beam/runners/direct/__init__.py -> apache-beam-2.3.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/bundle_factory.py -> apache-beam-2.3.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/clock.py -> apache-beam-2.3.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/consumer_tracking_pipeline_visitor.py -> apache-beam-2.3.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/consumer_tracking_pipeline_visitor_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/direct_metrics.py -> apache-beam-2.3.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/direct_metrics_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/direct_runner.py -> apache-beam-2.3.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/direct_runner_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/evaluation_context.py -> apache-beam-2.3.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/executor.py -> apache-beam-2.3.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/helper_transforms.py -> apache-beam-2.3.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/sdf_direct_runner.py -> apache-beam-2.3.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/sdf_direct_runner_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/transform_evaluator.py -> apache-beam-2.3.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/util.py -> apache-beam-2.3.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/watermark_manager.py -> apache-beam-2.3.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/experimental/__init__.py -> apache-beam-2.3.0.dev0/apache_beam/runners/experimental
copying apache_beam/runners/experimental/python_rpc_direct/__init__.py -> apache-beam-2.3.0.dev0/apache_beam/runners/experimental/python_rpc_direct
copying apache_beam/runners/experimental/python_rpc_direct/python_rpc_direct_runner.py -> apache-beam-2.3.0.dev0/apache_beam/runners/experimental/python_rpc_direct
copying apache_beam/runners/experimental/python_rpc_direct/server.py -> apache-beam-2.3.0.dev0/apache_beam/runners/experimental/python_rpc_direct
copying apache_beam/runners/job/__init__.py -> apache-beam-2.3.0.dev0/apache_beam/runners/job
copying apache_beam/runners/job/manager.py -> apache-beam-2.3.0.dev0/apache_beam/runners/job
copying apache_beam/runners/job/utils.py -> apache-beam-2.3.0.dev0/apache_beam/runners/job
copying apache_beam/runners/portability/__init__.py -> apache-beam-2.3.0.dev0/apache_beam/runners/portability
copying apache_beam/runners/portability/fn_api_runner.py -> apache-beam-2.3.0.dev0/apache_beam/runners/portability
copying apache_beam/runners/portability/fn_api_runner_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/portability
copying apache_beam/runners/portability/maptask_executor_runner.py -> apache-beam-2.3.0.dev0/apache_beam/runners/portability
copying apache_beam/runners/portability/maptask_executor_runner_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/portability
copying apache_beam/runners/portability/universal_local_runner.py -> apache-beam-2.3.0.dev0/apache_beam/runners/portability
copying apache_beam/runners/portability/universal_local_runner_main.py -> apache-beam-2.3.0.dev0/apache_beam/runners/portability
copying apache_beam/runners/portability/universal_local_runner_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/portability
copying apache_beam/runners/test/__init__.py -> apache-beam-2.3.0.dev0/apache_beam/runners/test
copying apache_beam/runners/worker/__init__.py -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/bundle_processor.py -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/data_plane.py -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/data_plane_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/log_handler.py -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/log_handler_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/logger.pxd -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/logger.py -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/logger_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/opcounters.pxd -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/opcounters.py -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/opcounters_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/operation_specs.py -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/operations.pxd -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/operations.py -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/sdk_worker.py -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/sdk_worker_main.py -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/sdk_worker_main_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/sdk_worker_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/sideinputs.py -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/sideinputs_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/statesampler.pyx -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/statesampler_fake.py -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/statesampler_test.py -> apache-beam-2.3.0.dev0/apache_beam/runners/worker
copying apache_beam/testing/__init__.py -> apache-beam-2.3.0.dev0/apache_beam/testing
copying apache_beam/testing/pipeline_verifiers.py -> apache-beam-2.3.0.dev0/apache_beam/testing
copying apache_beam/testing/pipeline_verifiers_test.py -> apache-beam-2.3.0.dev0/apache_beam/testing
copying apache_beam/testing/test_pipeline.py -> apache-beam-2.3.0.dev0/apache_beam/testing
copying apache_beam/testing/test_pipeline_test.py -> apache-beam-2.3.0.dev0/apache_beam/testing
copying apache_beam/testing/test_stream.py -> apache-beam-2.3.0.dev0/apache_beam/testing
copying apache_beam/testing/test_stream_test.py -> apache-beam-2.3.0.dev0/apache_beam/testing
copying apache_beam/testing/test_utils.py -> apache-beam-2.3.0.dev0/apache_beam/testing
copying apache_beam/testing/test_utils_test.py -> apache-beam-2.3.0.dev0/apache_beam/testing
copying apache_beam/testing/util.py -> apache-beam-2.3.0.dev0/apache_beam/testing
copying apache_beam/testing/util_test.py -> apache-beam-2.3.0.dev0/apache_beam/testing
copying apache_beam/testing/data/standard_coders.yaml -> apache-beam-2.3.0.dev0/apache_beam/testing/data
copying apache_beam/testing/data/trigger_transcripts.yaml -> apache-beam-2.3.0.dev0/apache_beam/testing/data
copying apache_beam/transforms/__init__.py -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/transforms/combiners.py -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/transforms/combiners_test.py -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/transforms/core.py -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/transforms/create_test.py -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/transforms/cy_combiners.pxd -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/transforms/cy_combiners.py -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/transforms/display.py -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/transforms/display_test.py -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/transforms/ptransform.py -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/transforms/ptransform_test.py -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/transforms/sideinputs.py -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/transforms/sideinputs_test.py -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/transforms/timeutil.py -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/transforms/trigger.py -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/transforms/trigger_test.py -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/transforms/util.py -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/transforms/util_test.py -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/transforms/window.py -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/transforms/window_test.py -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/transforms/write_ptransform_test.py -> apache-beam-2.3.0.dev0/apache_beam/transforms
copying apache_beam/typehints/__init__.py -> apache-beam-2.3.0.dev0/apache_beam/typehints
copying apache_beam/typehints/decorators.py -> apache-beam-2.3.0.dev0/apache_beam/typehints
copying apache_beam/typehints/native_type_compatibility.py -> apache-beam-2.3.0.dev0/apache_beam/typehints
copying apache_beam/typehints/native_type_compatibility_test.py -> apache-beam-2.3.0.dev0/apache_beam/typehints
copying apache_beam/typehints/opcodes.py -> apache-beam-2.3.0.dev0/apache_beam/typehints
copying apache_beam/typehints/trivial_inference.py -> apache-beam-2.3.0.dev0/apache_beam/typehints
copying apache_beam/typehints/trivial_inference_test.py -> apache-beam-2.3.0.dev0/apache_beam/typehints
copying apache_beam/typehints/typecheck.py -> apache-beam-2.3.0.dev0/apache_beam/typehints
copying apache_beam/typehints/typed_pipeline_test.py -> apache-beam-2.3.0.dev0/apache_beam/typehints
copying apache_beam/typehints/typehints.py -> apache-beam-2.3.0.dev0/apache_beam/typehints
copying apache_beam/typehints/typehints_test.py -> apache-beam-2.3.0.dev0/apache_beam/typehints
copying apache_beam/utils/__init__.py -> apache-beam-2.3.0.dev0/apache_beam/utils
copying apache_beam/utils/annotations.py -> apache-beam-2.3.0.dev0/apache_beam/utils
copying apache_beam/utils/annotations_test.py -> apache-beam-2.3.0.dev0/apache_beam/utils
copying apache_beam/utils/counters.pxd -> apache-beam-2.3.0.dev0/apache_beam/utils
copying apache_beam/utils/counters.py -> apache-beam-2.3.0.dev0/apache_beam/utils
copying apache_beam/utils/counters_test.py -> apache-beam-2.3.0.dev0/apache_beam/utils
copying apache_beam/utils/plugin.py -> apache-beam-2.3.0.dev0/apache_beam/utils
copying apache_beam/utils/processes.py -> apache-beam-2.3.0.dev0/apache_beam/utils
copying apache_beam/utils/processes_test.py -> apache-beam-2.3.0.dev0/apache_beam/utils
copying apache_beam/utils/profiler.py -> apache-beam-2.3.0.dev0/apache_beam/utils
copying apache_beam/utils/proto_utils.py -> apache-beam-2.3.0.dev0/apache_beam/utils
copying apache_beam/utils/retry.py -> apache-beam-2.3.0.dev0/apache_beam/utils
copying apache_beam/utils/retry_test.py -> apache-beam-2.3.0.dev0/apache_beam/utils
copying apache_beam/utils/timestamp.py -> apache-beam-2.3.0.dev0/apache_beam/utils
copying apache_beam/utils/timestamp_test.py -> apache-beam-2.3.0.dev0/apache_beam/utils
copying apache_beam/utils/urns.py -> apache-beam-2.3.0.dev0/apache_beam/utils
copying apache_beam/utils/windowed_value.pxd -> apache-beam-2.3.0.dev0/apache_beam/utils
copying apache_beam/utils/windowed_value.py -> apache-beam-2.3.0.dev0/apache_beam/utils
copying apache_beam/utils/windowed_value_test.py -> apache-beam-2.3.0.dev0/apache_beam/utils
Writing apache-beam-2.3.0.dev0/setup.cfg
creating dist
Creating tar archive
removing 'apache-beam-2.3.0.dev0' (and everything under it)

SDK_LOCATION=$(find dist/apache-beam-*.tar.gz)
find dist/apache-beam-*.tar.gz

# Run integration tests on the Google Cloud Dataflow service
# and validate that jobs finish successfully.
echo ">>> RUNNING TEST DATAFLOW RUNNER it tests"
>>> RUNNING TEST DATAFLOW RUNNER it tests
python setup.py nosetests \
  --attr IT \
  --nocapture \
  --processes=4 \
  --process-timeout=900 \
  --test-pipeline-options=" \
    --runner=TestDataflowRunner \
    --project=$PROJECT \
    --staging_location=$GCS_LOCATION/staging-it \
    --temp_location=$GCS_LOCATION/temp-it \
    --output=$GCS_LOCATION/py-it-cloud/output \
    --sdk_location=$SDK_LOCATION \
    --num_workers=1 \
    --sleep_secs=20"
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/local/lib/python2.7/site-packages/setuptools/dist.py>:355: UserWarning: Normalizing '2.3.0.dev' to '2.3.0.dev0'
  normalized_version,
running nosetests
running egg_info
writing requirements to apache_beam.egg-info/requires.txt
writing apache_beam.egg-info/PKG-INFO
writing top-level names to apache_beam.egg-info/top_level.txt
writing dependency_links to apache_beam.egg-info/dependency_links.txt
writing entry points to apache_beam.egg-info/entry_points.txt
reading manifest file 'apache_beam.egg-info/SOURCES.txt'
reading manifest template 'MANIFEST.in'
warning: no files found matching 'README.md'
warning: no files found matching 'NOTICE'
warning: no files found matching 'LICENSE'
writing manifest file 'apache_beam.egg-info/SOURCES.txt'
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/io/gcp/gcsio.py>:160: DeprecationWarning: object() takes no parameters
  super(GcsIO, cls).__new__(cls, storage_client))
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/io/gcp/gcsio.py>:160: DeprecationWarning: object() takes no parameters
  super(GcsIO, cls).__new__(cls, storage_client))
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/coders/typecoders.py>:133: UserWarning: Using fallback coder for typehint: Any.
  warnings.warn('Using fallback coder for typehint: %r.' % typehint)
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/coders/typecoders.py>:133: UserWarning: Using fallback coder for typehint: Dict[Any, Any].
  warnings.warn('Using fallback coder for typehint: %r.' % typehint)
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/coders/typecoders.py>:133: UserWarning: Using fallback coder for typehint: <type 'NoneType'>.
  warnings.warn('Using fallback coder for typehint: %r.' % typehint)
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/coders/typecoders.py>:133: UserWarning: Using fallback coder for typehint: Any.
  warnings.warn('Using fallback coder for typehint: %r.' % typehint)
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/coders/typecoders.py>:133: UserWarning: Using fallback coder for typehint: Any.
  warnings.warn('Using fallback coder for typehint: %r.' % typehint)
test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_bigquery_tornadoes_it (apache_beam.examples.cookbook.bigquery_tornadoes_it_test.BigqueryTornadoesIT) ... ok
test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ERROR

======================================================================
ERROR: test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT)
----------------------------------------------------------------------
Traceback (most recent call last):
  File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/.eggs/nose-1.3.7-py2.7.egg/nose/plugins/multiprocess.py",> line 812, in run
    test(orig)
  File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/.eggs/nose-1.3.7-py2.7.egg/nose/case.py",> line 45, in __call__
    return self.run(*arg, **kwarg)
  File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/.eggs/nose-1.3.7-py2.7.egg/nose/case.py",> line 133, in run
    self.runTest(result)
  File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/.eggs/nose-1.3.7-py2.7.egg/nose/case.py",> line 151, in runTest
    test(result)
  File "/usr/lib/python2.7/unittest/case.py", line 395, in __call__
    return self.run(*args, **kwds)
  File "/usr/lib/python2.7/unittest/case.py", line 331, in run
    testMethod()
  File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/examples/wordcount_it_test.py",> line 77, in test_wordcount_fnapi_it
    on_success_matcher=PipelineStateMatcher()))
  File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/examples/wordcount_fnapi.py",> line 130, in run
    result.wait_until_finish()
  File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py",> line 956, in wait_until_finish
    time.sleep(5.0)
  File "<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/.eggs/nose-1.3.7-py2.7.egg/nose/plugins/multiprocess.py",> line 276, in signalhandler
    raise TimedOutException()
TimedOutException: 'test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT)'

----------------------------------------------------------------------
Ran 3 tests in 901.276s

FAILED (errors=1)
Build step 'Execute shell' marked build as failure
Not sending mail to unregistered user github@alasdairhodge.co.uk

Jenkins build is back to normal : beam_PostCommit_Python_Verify #3887

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/3887/display/redirect?page=changes>


Build failed in Jenkins: beam_PostCommit_Python_Verify #3886

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/3886/display/redirect>

------------------------------------------
[...truncated 1.14 MB...]
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s2"
        }, 
        "serialized_fn": "<string of 952 bytes>", 
        "user_name": "pair_with_one"
      }
    }, 
    {
      "kind": "GroupByKey", 
      "name": "s4", 
      "properties": {
        "display_data": [], 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "kind:pair", 
                  "component_encodings": [
                    {
                      "@type": "StrUtf8Coder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlzBJUWhJWkWziAeVyGDZmMhY20hU5IeAAajEkY=", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "kind:stream", 
                      "component_encodings": [
                        {
                          "@type": "VarIntCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxhiUWeeSXOIA5XIYNmYyFjbSFTkh4A89cR+g==", 
                          "component_encodings": []
                        }
                      ], 
                      "is_stream_like": true
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "group.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s3"
        }, 
        "serialized_fn": "%0AJ%22H%0A%1Dref_Coder_GlobalWindowCoder_1%12%27%0A%25%0A%23%0A%21urn%3Abeam%3Acoders%3Aglobal_window%3A0.1jT%0A%25%0A%23%0A%21beam%3Awindowfn%3Aglobal_windows%3Av0.1%10%01%1A%1Dref_Coder_GlobalWindowCoder_1%22%02%3A%00%28%010%018%01H%01", 
        "user_name": "group"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s5", 
      "properties": {
        "display_data": [
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "count_ones"
          }, 
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.ParDo", 
            "shortValue": "CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          }
        ], 
        "non_parallel_inputs": {}, 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "kind:pair", 
                  "component_encodings": [
                    {
                      "@type": "StrUtf8Coder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlzBJUWhJWkWziAeVyGDZmMhY20hU5IeAAajEkY=", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": [
                        {
                          "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                          "component_encodings": []
                        }, 
                        {
                          "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                          "component_encodings": []
                        }
                      ], 
                      "is_pair_like": true
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "count.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s4"
        }, 
        "serialized_fn": "<string of 1032 bytes>", 
        "user_name": "count"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s6", 
      "properties": {
        "display_data": [
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "format_result"
          }, 
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.ParDo", 
            "shortValue": "CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          }
        ], 
        "non_parallel_inputs": {}, 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "kind:global_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "format.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s5"
        }, 
        "serialized_fn": "<string of 1028 bytes>", 
        "user_name": "format"
      }
    }
  ], 
  "type": "JOB_TYPE_BATCH"
}
root: INFO: Create job: <Job
 createTime: u'2018-01-05T21:25:52.548614Z'
 currentStateTime: u'1970-01-01T00:00:00Z'
 id: u'2018-01-05_13_25_51-4093034360880509119'
 location: u'us-central1'
 name: u'beamapp-jenkins-0105212550-913809'
 projectId: u'apache-beam-testing'
 stageStates: []
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
root: INFO: Created job with id: [2018-01-05_13_25_51-4093034360880509119]
root: INFO: To access the Dataflow monitoring console, please navigate to https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-01-05_13_25_51-4093034360880509119?project=apache-beam-testing
root: INFO: Job 2018-01-05_13_25_51-4093034360880509119 is in state JOB_STATE_PENDING
root: INFO: 2018-01-05T21:25:51.932Z: JOB_MESSAGE_WARNING: (38cd6110bdc03e03): Setting the number of workers (1) disables autoscaling for this job. If you are trying to cap autoscaling, consider only setting max_num_workers. If you want to disable autoscaling altogether, the documented way is to explicitly use autoscalingAlgorithm=NONE.
root: INFO: 2018-01-05T21:25:54.041Z: JOB_MESSAGE_DETAILED: (e920b33f627dea3c): Checking required Cloud APIs are enabled.
root: INFO: 2018-01-05T21:25:55.095Z: JOB_MESSAGE_DETAILED: (e920b33f627decd7): Expanding CollectionToSingleton operations into optimizable parts.
root: INFO: 2018-01-05T21:25:55.118Z: JOB_MESSAGE_DETAILED: (e920b33f627deeac): Expanding CoGroupByKey operations into optimizable parts.
root: INFO: 2018-01-05T21:25:55.134Z: JOB_MESSAGE_DEBUG: (e920b33f627de081): Combiner lifting skipped for step group: GroupByKey not followed by a combiner.
root: INFO: 2018-01-05T21:25:55.148Z: JOB_MESSAGE_DETAILED: (e920b33f627de70f): Expanding GroupByKey operations into optimizable parts.
root: INFO: 2018-01-05T21:25:55.166Z: JOB_MESSAGE_DEBUG: (e920b33f627ded9d): Annotating graph with Autotuner information.
root: INFO: 2018-01-05T21:25:55.186Z: JOB_MESSAGE_DETAILED: (e920b33f627deab9): Fusing adjacent ParDo, Read, Write, and Flatten operations
root: INFO: 2018-01-05T21:25:55.204Z: JOB_MESSAGE_DETAILED: (e920b33f627de147): Fusing consumer split into read/Read
root: INFO: 2018-01-05T21:25:55.226Z: JOB_MESSAGE_DETAILED: (e920b33f627de7d5): Fusing consumer group/Reify into pair_with_one
root: INFO: 2018-01-05T21:25:55.247Z: JOB_MESSAGE_DETAILED: (e920b33f627dee63): Fusing consumer format into count
root: INFO: 2018-01-05T21:25:55.267Z: JOB_MESSAGE_DETAILED: (e920b33f627de4f1): Fusing consumer count into group/GroupByWindow
root: INFO: 2018-01-05T21:25:55.285Z: JOB_MESSAGE_DETAILED: (e920b33f627deb7f): Fusing consumer pair_with_one into split
root: INFO: 2018-01-05T21:25:55.298Z: JOB_MESSAGE_DETAILED: (e920b33f627de20d): Fusing consumer group/Write into group/Reify
root: INFO: 2018-01-05T21:25:55.319Z: JOB_MESSAGE_DETAILED: (e920b33f627de89b): Fusing consumer group/GroupByWindow into group/Read
root: INFO: 2018-01-05T21:25:55.344Z: JOB_MESSAGE_DEBUG: (e920b33f627def29): Workflow config is missing a default resource spec.
root: INFO: 2018-01-05T21:25:55.359Z: JOB_MESSAGE_DEBUG: (e920b33f627de5b7): Adding StepResource setup and teardown to workflow graph.
root: INFO: 2018-01-05T21:25:55.374Z: JOB_MESSAGE_DEBUG: (e920b33f627dec45): Adding workflow start and stop steps.
root: INFO: 2018-01-05T21:25:55.396Z: JOB_MESSAGE_DEBUG: (e920b33f627de2d3): Assigning stage ids.
root: INFO: 2018-01-05T21:25:55.522Z: JOB_MESSAGE_DEBUG: (2ec95b0aa6997287): Executing wait step start13
root: INFO: 2018-01-05T21:25:55.567Z: JOB_MESSAGE_BASIC: (2ec95b0aa6997eb2): Executing operation group/Create
root: INFO: 2018-01-05T21:25:55.603Z: JOB_MESSAGE_DEBUG: (b3259c595fc2bd22): Starting worker pool setup.
root: INFO: 2018-01-05T21:25:55.623Z: JOB_MESSAGE_BASIC: (b3259c595fc2b420): Starting 1 workers in us-central1-f...
root: INFO: 2018-01-05T21:25:55.668Z: JOB_MESSAGE_DEBUG: (3e2f7a10db1ca503): Value "group/Session" materialized.
root: INFO: Job 2018-01-05_13_25_51-4093034360880509119 is in state JOB_STATE_RUNNING
root: INFO: 2018-01-05T21:25:55.713Z: JOB_MESSAGE_BASIC: (3e2f7a10db1ca4cb): Executing operation read/Read+split+pair_with_one+group/Reify+group/Write
root: INFO: 2018-01-05T21:26:03.207Z: JOB_MESSAGE_DETAILED: (519a95f67e03f566): Autoscaling: Raised the number of workers to 0 based on the rate of progress in the currently running step(s).
root: INFO: 2018-01-05T21:26:39.047Z: JOB_MESSAGE_ERROR: (519a95f67e03f791): Startup of the worker pool in zone us-central1-f failed to bring up any of the desired 1 workers. QUOTA_EXCEEDED: Quota 'DISKS_TOTAL_GB' exceeded.  Limit: 21000.0 in region us-central1.
root: INFO: 2018-01-05T21:26:39.058Z: JOB_MESSAGE_ERROR: (519a95f67e03f1c7): Workflow failed.
root: INFO: 2018-01-05T21:26:39.291Z: JOB_MESSAGE_DETAILED: (e920b33f627de852): Cleaning up.
root: INFO: 2018-01-05T21:26:39.322Z: JOB_MESSAGE_DEBUG: (e920b33f627de56e): Starting worker pool teardown.
root: INFO: 2018-01-05T21:26:39.330Z: JOB_MESSAGE_BASIC: (e920b33f627debfc): Stopping worker pool...
root: INFO: 2018-01-05T21:27:52.682Z: JOB_MESSAGE_BASIC: (e920b33f627de634): Worker pool stopped.
root: INFO: 2018-01-05T21:27:52.731Z: JOB_MESSAGE_DEBUG: (e920b33f627de350): Tearing down pending resources...
root: INFO: Job 2018-01-05_13_25_51-4093034360880509119 is in state JOB_STATE_FAILED
--------------------- >> end captured logging << ---------------------

----------------------------------------------------------------------
Ran 3 tests in 393.131s

FAILED (errors=2)
Build step 'Execute shell' marked build as failure
Not sending mail to unregistered user github@alasdairhodge.co.uk