You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2021/07/01 12:05:19 UTC

Build failed in Jenkins: beam_PostCommit_Python_VR_Spark #4778

See <https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/4778/display/redirect>

Changes:


------------------------------------------
[...truncated 354.08 KB...]
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

cls = <class 'apache_beam.runners.portability.spark_runner_test.SparkRunnerTest'>

    @classmethod
    def _start_local_runner_subprocess_job_service(cls):
      cls._maybe_kill_subprocess()
      # TODO(robertwb): Consider letting the subprocess pick one and
      # communicate it back...
      # pylint: disable=unbalanced-tuple-unpacking
      job_port, expansion_port = cls._pick_unused_ports(num_ports=2)
      _LOGGER.info('Starting server on port %d.', job_port)
      cls._subprocess = subprocess.Popen(
          cls._subprocess_command(job_port, expansion_port))
      address = 'localhost:%d' % job_port
      job_service = beam_job_api_pb2_grpc.JobServiceStub(
          GRPCChannelFactory.insecure_channel(address))
      _LOGGER.info('Waiting for server to be ready...')
      start = time.time()
      timeout = 30
      while True:
        time.sleep(0.1)
        if cls._subprocess.poll() is not None:
>         raise RuntimeError(
              'Subprocess terminated unexpectedly with exit code %d.' %
E             RuntimeError: Subprocess terminated unexpectedly with exit code 1.

apache_beam/runners/portability/portable_runner_test.py:99: RuntimeError
----------------------------- Captured stderr call -----------------------------
Error: Invalid or corrupt jarfile <https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/runners/spark/2/job-server/build/libs/beam-runners-spark-job-server-2.32.0-SNAPSHOT.jar>
__________ SparkRunnerTest.test_sdf_with_dofn_as_restriction_provider __________

self = <apache_beam.runners.portability.spark_runner_test.SparkRunnerTest testMethod=test_sdf_with_dofn_as_restriction_provider>

    def test_sdf_with_dofn_as_restriction_provider(self):
      class ExpandingStringsDoFn(beam.DoFn, ExpandStringsProvider):
        def process(
            self, element, restriction_tracker=beam.DoFn.RestrictionParam()):
          assert isinstance(restriction_tracker, RestrictionTrackerView)
          cur = restriction_tracker.current_restriction().start
          while restriction_tracker.try_claim(cur):
            yield element[cur]
            cur += 1
    
>     with self.create_pipeline() as p:

apache_beam/runners/portability/fn_api_runner/fn_runner_test.py:542: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/runners/portability/portable_runner_test.py:167: in create_pipeline
    return beam.Pipeline(self.get_runner(), self.create_options())
apache_beam/runners/portability/spark_runner_test.py:133: in create_options
    options = super(SparkRunnerTest, self).create_options()
apache_beam/runners/portability/portable_runner_test.py:155: in create_options
    options.view_as(PortableOptions).job_endpoint = self._get_job_endpoint()
apache_beam/runners/portability/portable_runner_test.py:120: in _get_job_endpoint
    cls._job_endpoint = cls._create_job_endpoint()
apache_beam/runners/portability/portable_runner_test.py:126: in _create_job_endpoint
    return cls._start_local_runner_subprocess_job_service()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

cls = <class 'apache_beam.runners.portability.spark_runner_test.SparkRunnerTest'>

    @classmethod
    def _start_local_runner_subprocess_job_service(cls):
      cls._maybe_kill_subprocess()
      # TODO(robertwb): Consider letting the subprocess pick one and
      # communicate it back...
      # pylint: disable=unbalanced-tuple-unpacking
      job_port, expansion_port = cls._pick_unused_ports(num_ports=2)
      _LOGGER.info('Starting server on port %d.', job_port)
      cls._subprocess = subprocess.Popen(
          cls._subprocess_command(job_port, expansion_port))
      address = 'localhost:%d' % job_port
      job_service = beam_job_api_pb2_grpc.JobServiceStub(
          GRPCChannelFactory.insecure_channel(address))
      _LOGGER.info('Waiting for server to be ready...')
      start = time.time()
      timeout = 30
      while True:
        time.sleep(0.1)
        if cls._subprocess.poll() is not None:
>         raise RuntimeError(
              'Subprocess terminated unexpectedly with exit code %d.' %
E             RuntimeError: Subprocess terminated unexpectedly with exit code 1.

apache_beam/runners/portability/portable_runner_test.py:99: RuntimeError
----------------------------- Captured stderr call -----------------------------
Error: Invalid or corrupt jarfile <https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/runners/spark/2/job-server/build/libs/beam-runners-spark-job-server-2.32.0-SNAPSHOT.jar>
_______________ SparkRunnerTest.test_windowed_pardo_state_timers _______________

self = <apache_beam.runners.portability.spark_runner_test.SparkRunnerTest testMethod=test_windowed_pardo_state_timers>

    def test_windowed_pardo_state_timers(self):
>     self._run_pardo_state_timers(windowed=True)

apache_beam/runners/portability/fn_api_runner/fn_runner_test.py:423: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/runners/portability/fn_api_runner/fn_runner_test.py:476: in _run_pardo_state_timers
    with self.create_pipeline() as p:
apache_beam/runners/portability/portable_runner_test.py:167: in create_pipeline
    return beam.Pipeline(self.get_runner(), self.create_options())
apache_beam/runners/portability/spark_runner_test.py:133: in create_options
    options = super(SparkRunnerTest, self).create_options()
apache_beam/runners/portability/portable_runner_test.py:155: in create_options
    options.view_as(PortableOptions).job_endpoint = self._get_job_endpoint()
apache_beam/runners/portability/portable_runner_test.py:120: in _get_job_endpoint
    cls._job_endpoint = cls._create_job_endpoint()
apache_beam/runners/portability/portable_runner_test.py:126: in _create_job_endpoint
    return cls._start_local_runner_subprocess_job_service()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

cls = <class 'apache_beam.runners.portability.spark_runner_test.SparkRunnerTest'>

    @classmethod
    def _start_local_runner_subprocess_job_service(cls):
      cls._maybe_kill_subprocess()
      # TODO(robertwb): Consider letting the subprocess pick one and
      # communicate it back...
      # pylint: disable=unbalanced-tuple-unpacking
      job_port, expansion_port = cls._pick_unused_ports(num_ports=2)
      _LOGGER.info('Starting server on port %d.', job_port)
      cls._subprocess = subprocess.Popen(
          cls._subprocess_command(job_port, expansion_port))
      address = 'localhost:%d' % job_port
      job_service = beam_job_api_pb2_grpc.JobServiceStub(
          GRPCChannelFactory.insecure_channel(address))
      _LOGGER.info('Waiting for server to be ready...')
      start = time.time()
      timeout = 30
      while True:
        time.sleep(0.1)
        if cls._subprocess.poll() is not None:
>         raise RuntimeError(
              'Subprocess terminated unexpectedly with exit code %d.' %
E             RuntimeError: Subprocess terminated unexpectedly with exit code 1.

apache_beam/runners/portability/portable_runner_test.py:99: RuntimeError
----------------------------- Captured stderr call -----------------------------
Error: Invalid or corrupt jarfile <https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/runners/spark/2/job-server/build/libs/beam-runners-spark-job-server-2.32.0-SNAPSHOT.jar>
________________________ SparkRunnerTest.test_windowing ________________________

self = <apache_beam.runners.portability.spark_runner_test.SparkRunnerTest testMethod=test_windowing>

    def test_windowing(self):
>     with self.create_pipeline() as p:

apache_beam/runners/portability/fn_api_runner/fn_runner_test.py:767: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/runners/portability/portable_runner_test.py:167: in create_pipeline
    return beam.Pipeline(self.get_runner(), self.create_options())
apache_beam/runners/portability/spark_runner_test.py:133: in create_options
    options = super(SparkRunnerTest, self).create_options()
apache_beam/runners/portability/portable_runner_test.py:155: in create_options
    options.view_as(PortableOptions).job_endpoint = self._get_job_endpoint()
apache_beam/runners/portability/portable_runner_test.py:120: in _get_job_endpoint
    cls._job_endpoint = cls._create_job_endpoint()
apache_beam/runners/portability/portable_runner_test.py:126: in _create_job_endpoint
    return cls._start_local_runner_subprocess_job_service()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

cls = <class 'apache_beam.runners.portability.spark_runner_test.SparkRunnerTest'>

    @classmethod
    def _start_local_runner_subprocess_job_service(cls):
      cls._maybe_kill_subprocess()
      # TODO(robertwb): Consider letting the subprocess pick one and
      # communicate it back...
      # pylint: disable=unbalanced-tuple-unpacking
      job_port, expansion_port = cls._pick_unused_ports(num_ports=2)
      _LOGGER.info('Starting server on port %d.', job_port)
      cls._subprocess = subprocess.Popen(
          cls._subprocess_command(job_port, expansion_port))
      address = 'localhost:%d' % job_port
      job_service = beam_job_api_pb2_grpc.JobServiceStub(
          GRPCChannelFactory.insecure_channel(address))
      _LOGGER.info('Waiting for server to be ready...')
      start = time.time()
      timeout = 30
      while True:
        time.sleep(0.1)
        if cls._subprocess.poll() is not None:
>         raise RuntimeError(
              'Subprocess terminated unexpectedly with exit code %d.' %
E             RuntimeError: Subprocess terminated unexpectedly with exit code 1.

apache_beam/runners/portability/portable_runner_test.py:99: RuntimeError
----------------------------- Captured stderr call -----------------------------
Error: Invalid or corrupt jarfile <https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/runners/spark/2/job-server/build/libs/beam-runners-spark-job-server-2.32.0-SNAPSHOT.jar>
=============================== warnings summary ===============================
target/.tox-spark-runner-test/spark-runner-test/lib/python3.8/site-packages/tenacity/_asyncio.py:42
  <https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/sdks/python/test-suites/portable/py38/build/srcs/sdks/python/target/.tox-spark-runner-test/spark-runner-test/lib/python3.8/site-packages/tenacity/_asyncio.py>:42: DeprecationWarning: "@coroutine" decorator is deprecated since Python 3.8, use "async def" instead
    def call(self, fn, *args, **kwargs):

-- Docs: https://docs.pytest.org/en/latest/warnings.html
- generated xml file: <https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/sdks/python/test-suites/portable/py38/build/srcs/sdks/python/pytest_spark-runner-test.xml> -
========= 33 failed, 1 passed, 16 skipped, 1 warnings in 12.65 seconds =========
ERROR: InvocationError for command <https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/sdks/python/test-suites/portable/py38/build/srcs/sdks/python/scripts/pytest_validates_runner.sh> spark-runner-test apache_beam/runners/portability/spark_runner_test.py '--spark_job_server_jar=<https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/runners/spark/2/job-server/build/libs/beam-runners-spark-job-server-2.32.0-SNAPSHOT.jar> --environment_type=LOOPBACK' (exited with code 1)
spark-runner-test run-test-post: commands[0] | bash <https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/sdks/python/test-suites/portable/py38/build/srcs/sdks/python/scripts/run_tox_cleanup.sh>
___________________________________ summary ____________________________________
ERROR:   spark-runner-test: commands failed

> Task :sdks:python:test-suites:portable:py38:sparkCompatibilityMatrixLOOPBACK FAILED

FAILURE: Build completed with 3 failures.

1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':sdks:python:test-suites:portable:py36:sparkCompatibilityMatrixLOOPBACK'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':sdks:python:test-suites:portable:py37:sparkCompatibilityMatrixLOOPBACK'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

3: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':sdks:python:test-suites:portable:py38:sparkCompatibilityMatrixLOOPBACK'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 4m 44s
67 actionable tasks: 45 executed, 22 from cache

Publishing build scan...
https://gradle.com/s/zk633vh5pl56o

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org


Jenkins build is back to normal : beam_PostCommit_Python_VR_Spark #4779

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/4779/display/redirect?page=changes>


---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscribe@beam.apache.org
For additional commands, e-mail: builds-help@beam.apache.org